status
stringclasses
1 value
repo_name
stringclasses
31 values
repo_url
stringclasses
31 values
issue_id
int64
1
104k
title
stringlengths
4
369
body
stringlengths
0
254k
issue_url
stringlengths
37
56
pull_url
stringlengths
37
54
before_fix_sha
stringlengths
40
40
after_fix_sha
stringlengths
40
40
report_datetime
unknown
language
stringclasses
5 values
commit_datetime
unknown
updated_file
stringlengths
4
188
file_content
stringlengths
0
5.12M
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,309
[Bug][Common] memoryUsage is `-33%`
**To Reproduce** `memoryUsage` is `-33%` **Expected behavior** Bug fixed **Screenshots** ![image](https://user-images.githubusercontent.com/4902714/115050583-779bf480-9f0e-11eb-8eed-4abcc8df3fc4.png) **Which version of Dolphin Scheduler:** -[1.3.x] -[dev]
https://github.com/apache/dolphinscheduler/issues/5309
https://github.com/apache/dolphinscheduler/pull/5312
5ef1f731b7f3b48915859b7a9595ec42df614c48
e92e29ef9a126d1d5660011545056d4dd806d105
"2021-04-16T15:51:04Z"
java
"2021-04-19T02:24:54Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.PropertiesConfiguration; import java.io.IOException; import java.util.List; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OSUtilsTest { private static final Logger logger = LoggerFactory.getLogger(OSUtilsTest.class); @Test public void getUserList() { List<String> userList = OSUtils.getUserList(); Assert.assertNotEquals("System user list should not be empty", userList.size(), 0); logger.info("OS user list : {}", userList.toString()); } @Test public void testOSMetric(){ if (!OSUtils.isWindows()) { double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); Assert.assertTrue(availablePhysicalMemorySize >= 0.0d); double totalMemorySize = OSUtils.totalMemorySize(); Assert.assertTrue(totalMemorySize >= 0.0d); double loadAverage = OSUtils.loadAverage(); logger.info("loadAverage {}", loadAverage); double memoryUsage = OSUtils.memoryUsage(); Assert.assertTrue(memoryUsage >= 0.0d); double cpuUsage = OSUtils.cpuUsage(); Assert.assertTrue(cpuUsage >= 0.0d || cpuUsage == -1.0d); } else { // TODO window ut } } @Test public void getGroup() { try { String group = OSUtils.getGroup(); Assert.assertNotNull(group); } catch (IOException e) { Assert.fail("get group failed " + e.getMessage()); } } @Test public void createUser() { boolean result = OSUtils.createUser("test123"); if (result) { Assert.assertTrue("create user test123 success", true); } else { Assert.assertTrue("create user test123 fail", true); } } @Test public void createUserIfAbsent() { OSUtils.createUserIfAbsent("test123"); Assert.assertTrue("create user test123 success", true); } @Test public void testGetSudoCmd() { String cmd = "kill -9 1234"; String sudoCmd = OSUtils.getSudoCmd("test123", cmd); Assert.assertEquals("sudo -u test123 " + cmd, sudoCmd); } @Test public void exeCmd() { if(OSUtils.isMacOS() || !OSUtils.isWindows()){ try { String result = OSUtils.exeCmd("echo helloWorld"); Assert.assertEquals("helloWorld\n",result); } catch (IOException e) { Assert.fail("exeCmd " + e.getMessage()); } } } @Test public void getProcessID(){ int processId = OSUtils.getProcessID(); Assert.assertNotEquals(0, processId); } @Test public void checkResource(){ boolean resource = OSUtils.checkResource(100,0); Assert.assertTrue(resource); resource = OSUtils.checkResource(0,Double.MAX_VALUE); Assert.assertFalse(resource); Configuration configuration = new PropertiesConfiguration(); configuration.setProperty(Constants.MASTER_MAX_CPULOAD_AVG,100); configuration.setProperty(Constants.MASTER_RESERVED_MEMORY,0); resource = OSUtils.checkResource(configuration,true); Assert.assertTrue(resource); configuration.setProperty(Constants.MASTER_MAX_CPULOAD_AVG,0); configuration.setProperty(Constants.MASTER_RESERVED_MEMORY,Double.MAX_VALUE); resource = OSUtils.checkResource(configuration,true); Assert.assertFalse(resource); configuration.setProperty(Constants.WORKER_MAX_CPULOAD_AVG,100); configuration.setProperty(Constants.WORKER_RESERVED_MEMORY,0); resource = OSUtils.checkResource(configuration,false); Assert.assertTrue(resource); configuration.setProperty(Constants.WORKER_MAX_CPULOAD_AVG,0); configuration.setProperty(Constants.WORKER_RESERVED_MEMORY,Double.MAX_VALUE); resource = OSUtils.checkResource(configuration,false); Assert.assertFalse(resource); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,309
[Bug][Common] memoryUsage is `-33%`
**To Reproduce** `memoryUsage` is `-33%` **Expected behavior** Bug fixed **Screenshots** ![image](https://user-images.githubusercontent.com/4902714/115050583-779bf480-9f0e-11eb-8eed-4abcc8df3fc4.png) **Which version of Dolphin Scheduler:** -[1.3.x] -[dev]
https://github.com/apache/dolphinscheduler/issues/5309
https://github.com/apache/dolphinscheduler/pull/5312
5ef1f731b7f3b48915859b7a9595ec42df614c48
e92e29ef9a126d1d5660011545056d4dd806d105
"2021-04-16T15:51:04Z"
java
"2021-04-19T02:24:54Z"
dolphinscheduler-dist/release-docs/LICENSE
Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ======================================================================= Apache DolphinScheduler Subcomponents: The Apache DolphinScheduler project contains subcomponents with separate copyright notices and license terms. Your use of the source code for the these subcomponents is subject to the terms and conditions of the following licenses. ======================================================================== Apache 2.0 licenses ======================================================================== The following components are provided under the Apache License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. apacheds-i18n 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-i18n/2.0.0-M15, Apache 2.0 apacheds-kerberos-codec 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-kerberos-codec/2.0.0-M15, Apache 2.0 apache-el 8.5.35.1: https://mvnrepository.com/artifact/org.mortbay.jasper/apache-el/8.5.35.1, Apache 2.0 api-asn1-api 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-asn1-api/1.0.0-M20, Apache 2.0 api-util 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-util/1.0.0-M20, Apache 2.0 async-http-client 1.6.5: https://mvnrepository.com/artifact/com.ning/async-http-client, Apache 2.0 audience-annotations 0.5.0: https://mvnrepository.com/artifact/org.apache.yetus/audience-annotations/0.5.0, Apache 2.0 avro 1.7.4: https://github.com/apache/avro, Apache 2.0 aws-sdk-java 1.7.4: https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk/1.7.4, Apache 2.0 bonecp 0.8.0.RELEASE: https://github.com/wwadge/bonecp, Apache 2.0 byte-buddy 1.9.16: https://mvnrepository.com/artifact/net.bytebuddy/byte-buddy/1.9.16, Apache 2.0 classmate 1.4.0: https://mvnrepository.com/artifact/com.fasterxml/classmate/1.4.0, Apache 2.0 clickhouse-jdbc 0.1.52: https://mvnrepository.com/artifact/ru.yandex.clickhouse/clickhouse-jdbc/0.1.52, Apache 2.0 commons-beanutils 1.9.4 https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils/1.9.4, Apache 2.0 commons-cli 1.2: https://mvnrepository.com/artifact/commons-cli/commons-cli/1.2, Apache 2.0 commons-codec 1.11: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.11, Apache 2.0 commons-collections 3.2.2: https://mvnrepository.com/artifact/commons-collections/commons-collections/3.2.2, Apache 2.0 commons-collections4 4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-collections4/4.1, Apache 2.0 commons-compress 1.4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.4.1, Apache 2.0 commons-configuration 1.10: https://mvnrepository.com/artifact/commons-configuration/commons-configuration/1.10, Apache 2.0 commons-daemon 1.0.13 https://mvnrepository.com/artifact/commons-daemon/commons-daemon/1.0.13, Apache 2.0 commons-dbcp 1.4: https://github.com/apache/commons-dbcp, Apache 2.0 commons-email 1.5: https://github.com/apache/commons-email, Apache 2.0 commons-httpclient 3.0.1: https://mvnrepository.com/artifact/commons-httpclient/commons-httpclient/3.0.1, Apache 2.0 commons-io 2.4: https://github.com/apache/commons-io, Apache 2.0 commons-lang 2.6: https://github.com/apache/commons-lang, Apache 2.0 commons-logging 1.1.1: https://github.com/apache/commons-logging, Apache 2.0 commons-math3 3.1.1: https://github.com/apache/commons-math, Apache 2.0 commons-net 3.1: https://github.com/apache/commons-net, Apache 2.0 commons-pool 1.6: https://github.com/apache/commons-pool, Apache 2.0 cron-utils 5.0.5: https://mvnrepository.com/artifact/com.cronutils/cron-utils/5.0.5, Apache 2.0 curator-client 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-client/4.3.0, Apache 2.0 curator-framework 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-framework/4.3.0, Apache 2.0 curator-recipes 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-recipes/4.3.0, Apache 2.0 datanucleus-api-jdo 4.2.1: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-api-jdo/4.2.1, Apache 2.0 datanucleus-core 4.1.6: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-core/4.1.6, Apache 2.0 datanucleus-rdbms 4.1.7: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-rdbms/4.1.7, Apache 2.0 derby 10.14.2.0: https://github.com/apache/derby, Apache 2.0 druid 1.1.14: https://mvnrepository.com/artifact/com.alibaba/druid/1.1.14, Apache 2.0 error_prone_annotations 2.1.3 https://mvnrepository.com/artifact/com.google.errorprone/error_prone_annotations/2.1.3, Apache 2.0 gson 2.8.5: https://github.com/google/gson, Apache 2.0 guava 24.1-jre: https://mvnrepository.com/artifact/com.google.guava/guava/24.1-jre, Apache 2.0 guice 3.0: https://mvnrepository.com/artifact/com.google.inject/guice/3.0, Apache 2.0 guice-servlet 3.0: https://mvnrepository.com/artifact/com.google.inject.extensions/guice-servlet/3.0, Apache 2.0 hadoop-annotations 2.7.3:https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-annotations/2.7.3, Apache 2.0 hadoop-auth 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-auth/2.7.3, Apache 2.0 hadoop-aws 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/2.7.3, Apache 2.0 hadoop-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client/2.7.3, Apache 2.0 hadoop-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common/2.7.3, Apache 2.0 hadoop-hdfs 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs/2.7.3, Apache 2.0 hadoop-mapreduce-client-app 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-app/2.7.3, Apache 2.0 hadoop-mapreduce-client-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-common/2.7.3, Apache 2.0 hadoop-mapreduce-client-core 2.7.3: https://mvnrepository.com/artifact/io.hops/hadoop-mapreduce-client-core/2.7.3, Apache 2.0 hadoop-mapreduce-client-jobclient 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-jobclient/2.7.3, Apache 2.0 hadoop-mapreduce-client-shuffle 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-shuffle/2.7.3, Apache 2.0 hadoop-yarn-api 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-api/2.7.3, Apache 2.0 hadoop-yarn-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-client/2.7.3, Apache 2.0 hadoop-yarn-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-common/2.7.3, Apache 2.0 hadoop-yarn-server-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-server-common/2.7.3, Apache 2.0 hibernate-validator 6.0.14.Final: https://github.com/hibernate/hibernate-validator, Apache 2.0 HikariCP 3.2.0: https://mvnrepository.com/artifact/com.zaxxer/HikariCP/3.2.0, Apache 2.0 hive-common 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-common/2.1.0, Apache 2.0 hive-jdbc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc/2.1.0, Apache 2.0 hive-metastore 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-metastore/2.1.0, Apache 2.0 hive-orc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-orc/2.1.0, Apache 2.0 hive-serde 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-serde/2.1.0, Apache 2.0 hive-service 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service/2.1.0, Apache 2.0 hive-service-rpc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service-rpc/2.1.0, Apache 2.0 hive-storage-api 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-storage-api/2.1.0, Apache 2.0 htrace-core 3.1.0-incubating: https://mvnrepository.com/artifact/org.apache.htrace/htrace-core/3.1.0-incubating, Apache 2.0 httpclient 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient/4.4.1, Apache 2.0 httpcore 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore/4.4.1, Apache 2.0 httpmime 4.5.7: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpmime/4.5.7, Apache 2.0 jackson-annotations 2.10.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations/2.10.5, Apache 2.0 jackson-core 2.10.5: https://github.com/FasterXML/jackson-core, Apache 2.0 jackson-core-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-core-asl/1.9.13, Apache 2.0 jackson-databind 2.10.5: https://github.com/FasterXML/jackson-databind, Apache 2.0 jackson-datatype-jdk8 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.9.10, Apache 2.0 jackson-datatype-jsr310 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.9.10, Apache 2.0 jackson-jaxrs 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-jaxrs/1.9.13, Apache 2.0 and LGPL 2.1 jackson-mapper-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-mapper-asl/1.9.13, Apache 2.0 jackson-module-parameter-names 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.9.10, Apache 2.0 jackson-xc 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-xc/1.9.13, Apache 2.0 and LGPL 2.1 javax.inject 1: https://mvnrepository.com/artifact/javax.inject/javax.inject/1, Apache 2.0 javax.jdo-3.2.0-m3: https://mvnrepository.com/artifact/org.datanucleus/javax.jdo/3.2.0-m3, Apache 2.0 java-xmlbuilder 0.4 : https://mvnrepository.com/artifact/com.jamesmurty.utils/java-xmlbuilder/0.4, Apache 2.0 jboss-logging 3.3.2.Final: https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging/3.3.2.Final, Apache 2.0 jdo-api 3.0.1: https://mvnrepository.com/artifact/javax.jdo/jdo-api/3.0.1, Apache 2.0 jets3t 0.9.0: https://mvnrepository.com/artifact/net.java.dev.jets3t/jets3t/0.9.0, Apache 2.0 jettison 1.1: https://github.com/jettison-json/jettison, Apache 2.0 jetty 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty/6.1.26, Apache 2.0 and EPL 1.0 jetty-continuation 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-continuation/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-http 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-http/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-io 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-io/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-security 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-security/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-server 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-servlet 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-servlets 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlets/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-util 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty-util/6.1.26, Apache 2.0 and EPL 1.0 jetty-util 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-webapp 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-xml 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jna 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna/4.5.2, Apache 2.0 and LGPL 2.1 jna-platform 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna-platform/4.5.2, Apache 2.0 and LGPL 2.1 joda-time 2.10.1: https://github.com/JodaOrg/joda-time, Apache 2.0 jpam 1.1: https://mvnrepository.com/artifact/net.sf.jpam/jpam/1.1, Apache 2.0 jsqlparser 2.1: https://github.com/JSQLParser/JSqlParser, Apache 2.0 or LGPL 2.1 jsr305 3.0.0: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0 jsr305 1.3.9: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0 j2objc-annotations 1.1 https://mvnrepository.com/artifact/com.google.j2objc/j2objc-annotations/1.1, Apache 2.0 libfb303 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libfb303/0.9.3, Apache 2.0 libthrift 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libthrift/0.9.3, Apache 2.0 log4j-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api/2.11.2, Apache 2.0 log4j-core-2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core/2.11.2, Apache 2.0 log4j 1.2.17: https://mvnrepository.com/artifact/log4j/log4j/1.2.17, Apache 2.0 log4j-1.2-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-1.2-api/2.11.2, Apache 2.0 lz4 1.3.0: https://mvnrepository.com/artifact/net.jpountz.lz4/lz4/1.3.0, Apache 2.0 mapstruct 1.2.0.Final: https://github.com/mapstruct/mapstruct, Apache 2.0 maven-aether-provider 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-aether-provider/3.0.4, Apache 2.0 maven-artifact 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-artifact/3.0.4, Apache 2.0 maven-compat 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-compat/3.0.4, Apache 2.0 maven-core 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-core/3.0.4, Apache 2.0 maven-embedder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-embedder/3.0.4, Apache 2.0 maven-model 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-model/3.0.4, Apache 2.0 maven-model-builder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-model-builder/3.0.4, Apache 2.0 maven-plugin-api 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-plugin-api/3.0.4, Apache 2.0 maven-repository-metadata 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-repository-metadata/3.0.4, Apache 2.0 maven-settings 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-settings/3.0.4, Apache 2.0 maven-settings-builder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-settings-builder/3.0.4, Apache 2.0 mybatis 3.5.2 https://mvnrepository.com/artifact/org.mybatis/mybatis/3.5.2, Apache 2.0 mybatis-plus 3.2.0: https://github.com/baomidou/mybatis-plus, Apache 2.0 mybatis-plus-annotation 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-annotation/3.2.0, Apache 2.0 mybatis-plus-boot-starter 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-boot-starter/3.2.0, Apache 2.0 mybatis-plus-core 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-core/3.2.0, Apache 2.0 mybatis-plus-extension 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-extension/3.2.0, Apache 2.0 mybatis-spring 2.0.2: https://mvnrepository.com/artifact/org.mybatis/mybatis-spring/2.0.2, Apache 2.0 netty 3.6.2.Final: https://github.com/netty/netty, Apache 2.0 netty-all 4.1.33.Final: https://github.com/netty/netty/blob/netty-4.1.33.Final/LICENSE.txt, Apache 2.0 opencsv 2.3: https://mvnrepository.com/artifact/net.sf.opencsv/opencsv/2.3, Apache 2.0 parquet-hadoop-bundle 1.8.1: https://mvnrepository.com/artifact/org.apache.parquet/parquet-hadoop-bundle/1.8.1, Apache 2.0 poi 3.17: https://mvnrepository.com/artifact/org.apache.poi/poi/3.17, Apache 2.0 plexus-cipher 1.7.0: https://mvnrepository.com/artifact/org.sonatype.plexus/plexus-cipher/1.7.0, Apache 2.0 plexus-classworlds 2.4: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-classworlds/2.4, Apache 2.0 plexus-component-annotations 1.5.5: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-component-annotations/1.5.5, Apache 2.0 plexus-container-default 1.5.5: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-container-default/1.5.5, Apache 2.0 plexus-interpolation 1.14: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-interpolation/1.14, Apache 2.0 plexus-sec-dispatcher 1.3: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-sec-dispatcher/1.3, Apache 2.0 plexus-utils 2.0.6: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-utils/2.0.6, Apache 2.0 quartz 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.3.0, Apache 2.0 quartz-jobs 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.3.0, Apache 2.0 resolver 1.5: https://mvnrepository.com/artifact/io.airlift.resolver/resolver/1.5 Apache 2.0 snakeyaml 1.23: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.23, Apache 2.0 snappy 0.2: https://mvnrepository.com/artifact/org.iq80.snappy/snappy/0.2, Apache 2.0 snappy-java 1.0.4.1: https://github.com/xerial/snappy-java, Apache 2.0 spring-aop 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.1.18.RELEASE, Apache 2.0 spring-beans 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.1.18.RELEASE, Apache 2.0 spring-boot 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.1.17.RELEASE, Apache 2.0 spring-boot-autoconfigure 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.1.17.RELEASE, Apache 2.0 spring-boot-starter 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-aop 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-jdbc 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-jetty 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-json 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-logging 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-web 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.1.17.RELEASE, Apache 2.0 spring-context 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-context/5.1.18.RELEASE, Apache 2.0 spring-core 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-core/5.1.18.RELEASE, Apache 2.0 spring-expression 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-expression/5.1.18.RELEASE, Apache 2.0 springfox-core 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-core, Apache 2.0 springfox-schema 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-schema, Apache 2.0 springfox-spi 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spi, Apache 2.0 springfox-spring-web 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spring-web/2.9.2, Apache 2.0 springfox-swagger2 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger2/2.9.2, Apache 2.0 springfox-swagger-common 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-common/2.9.2, Apache 2.0 springfox-swagger-ui 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui/2.9.2, Apache 2.0 spring-jcl 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.1.18.RELEASE, Apache 2.0 spring-jdbc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.1.18.RELEASE, Apache 2.0 spring-plugin-core 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-core/1.2.0.RELEASE, Apache 2.0 spring-plugin-metadata 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-metadata/1.2.0.RELEASE, Apache 2.0 spring-tx 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.1.18.RELEASE, Apache 2.0 spring-web 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-web/5.1.18.RELEASE, Apache 2.0 spring-webmvc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.1.18.RELEASE, Apache 2.0 swagger-annotations 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-annotations/1.5.20, Apache 2.0 swagger-bootstrap-ui 1.9.3: https://mvnrepository.com/artifact/com.github.xiaoymin/swagger-bootstrap-ui/1.9.3, Apache 2.0 swagger-models 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-models/1.5.20, Apache 2.0 tephra-api 0.6.0: https://mvnrepository.com/artifact/co.cask.tephra/tephra-api/0.6.0, Apache 2.0 validation-api 2.0.1.Final: https://mvnrepository.com/artifact/javax.validation/validation-api/2.0.1.Final, Apache 2.0 wagon-provider-api 2.2: https://mvnrepository.com/artifact/org.apache.maven.wagon/wagon-provider-api/2.2, Apache 2.0 xbean-reflect 3.4: https://mvnrepository.com/artifact/org.apache.xbean/xbean-reflect/3.4, Apache 2.0 xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0 xml-apis 1.4.01: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.4.01, Apache 2.0 and W3C zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0 presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1 protostuff-core 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0 protostuff-runtime 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0 protostuff-api 1.7.2: https://github.com/protostuff/protostuff/protostuff-api Apache-2.0 protostuff-collectionschema 1.7.2: https://github.com/protostuff/protostuff/protostuff-collectionschema Apache-2.0 ======================================================================== BSD licenses ======================================================================== The following components are provided under a BSD license. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. asm 3.1: https://github.com/jdf/javalin/tree/master/lib/asm-3.1, BSD javolution 5.5.1: https://mvnrepository.com/artifact/javolution/javolution/5.5.1, BSD jline 0.9.94: https://github.com/jline/jline3, BSD jsch 0.1.42: https://mvnrepository.com/artifact/com.jcraft/jsch/0.1.42, BSD leveldbjni-all 1.8: https://github.com/fusesource/leveldbjni, BSD-3-Clause postgresql 42.1.4: https://mvnrepository.com/artifact/org.postgresql/postgresql/42.1.4, BSD 2-clause protobuf-java 2.5.0: https://mvnrepository.com/artifact/com.google.protobuf/protobuf-java/2.5.0, BSD 2-clause paranamer 2.3: https://mvnrepository.com/artifact/com.thoughtworks.paranamer/paranamer/2.3, BSD threetenbp 1.3.6: https://mvnrepository.com/artifact/org.threeten/threetenbp/1.3.6, BSD 3-clause xmlenc 0.52: https://mvnrepository.com/artifact/xmlenc/xmlenc/0.52, BSD hamcrest-core 1.3: https://mvnrepository.com/artifact/org.hamcrest/hamcrest-core/1.3, BSD 2-Clause ======================================================================== CDDL licenses ======================================================================== The following components are provided under the CDDL License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. activation 1.1: https://mvnrepository.com/artifact/javax.activation/activation/1.1 CDDL 1.0 javax.activation-api 1.2.0: https://mvnrepository.com/artifact/javax.activation/javax.activation-api/1.2.0, CDDL and LGPL 2.0 javax.annotation-api 1.3.2: https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api/1.3.2, CDDL + GPLv2 javax.mail 1.6.2: https://mvnrepository.com/artifact/com.sun.mail/javax.mail/1.6.2, CDDL/GPLv2 javax.servlet-api 3.1.0: https://mvnrepository.com/artifact/javax.servlet/javax.servlet-api/3.1.0, CDDL + GPLv2 jaxb-api 2.3.1: https://mvnrepository.com/artifact/javax.xml.bind/jaxb-api/2.3.1, CDDL 1.1 jaxb-impl 2.2.3-1: https://mvnrepository.com/artifact/com.sun.xml.bind/jaxb-impl/2.2.3-1, CDDL and GPL 1.1 jersey-client 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-client/1.9, CDDL 1.1 and GPL 1.1 jersey-core 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-core/1.9, CDDL 1.1 and GPL 1.1 jersey-guice 1.9: https://mvnrepository.com/artifact/com.sun.jersey.contribs/jersey-guice/1.9, CDDL 1.1 and GPL 1.1 jersey-json 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-json/1.9, CDDL 1.1 and GPL 1.1 jersey-server 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-server/1.9, CDDL 1.1 and GPL 1.1 jta 1.1: https://mvnrepository.com/artifact/javax.transaction/jta/1.1, CDDL 1.0 transaction-api 1.1: https://mvnrepository.com/artifact/javax.transaction/transaction-api/1.1, CDDL 1.0 ======================================================================== EPL licenses ======================================================================== The following components are provided under the EPL License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. aether-api 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-api/1.13.1, EPL 1.0 aether-connector-asynchttpclient 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-connector-asynchttpclient/1.13.1, EPL 1.0 aether-connector-file 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-connector-file/1.13.1, EPL 1.0 aether-impl 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-impl/1.13.1, EPL 1.0 aether-spi 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-spi/1.13.1, EPL 1.0 aether-util 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-util/1.13.1, EPL 1.0 aspectjweaver 1.9.2:https://mvnrepository.com/artifact/org.aspectj/aspectjweaver/1.9.2, EPL 1.0 logback-classic 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-classic/1.2.3, EPL 1.0 and LGPL 2.1 logback-core 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-core/1.2.3, EPL 1.0 and LGPL 2.1 oshi-core 3.5.0: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/3.5.0, EPL 1.0 junit 4.12: https://mvnrepository.com/artifact/junit/junit/4.12, EPL 1.0 h2-1.4.200 https://github.com/h2database/h2database/blob/master/LICENSE.txt, MPL 2.0 or EPL 1.0 ======================================================================== MIT licenses ======================================================================== The following components are provided under a MIT 2.0 license. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. jul-to-slf4j 1.7.25: https://mvnrepository.com/artifact/org.slf4j/jul-to-slf4j/1.7.25, MIT mssql-jdbc 6.1.0.jre8: https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc/6.1.0.jre8, MIT slf4j-api 1.7.5: https://mvnrepository.com/artifact/org.slf4j/slf4j-api/1.7.5, MIT animal-sniffer-annotations 1.14 https://mvnrepository.com/artifact/org.codehaus.mojo/animal-sniffer-annotations/1.14, MIT checker-compat-qual 2.0.0 https://mvnrepository.com/artifact/org.checkerframework/checker-compat-qual/2.0.0, MIT + GPLv2 ======================================================================== MPL 1.1 licenses ======================================================================== The following components are provided under a MPL 1.1 license. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. jamon-runtime 2.3.1: https://mvnrepository.com/artifact/org.jamon/jamon-runtime/2.3.1, MPL-1.1 javassist 3.26.0-GA: https://github.com/jboss-javassist/javassist, MPL-1.1 ======================================================================== Public Domain licenses ======================================================================== xz 1.0: https://mvnrepository.com/artifact/org.tukaani/xz/1.0, Public Domain aopalliance 1.0: https://mvnrepository.com/artifact/aopalliance/aopalliance/1.0, Public Domain ======================================== WTFPL License ======================================== reflections 0.9.12: https://github.com/ronmamo/reflections WTFPL ======================================================================== UI related licenses ======================================================================== The following components are used in UI.See project link for details. The text of each license is also included at licenses/ui-licenses/LICENSE-[project].txt. ======================================== MIT licenses ======================================== @form-create/element-ui 1.0.18: https://github.com/xaboy/form-create MIT axios 0.16.2: https://github.com/axios/axios MIT bootstrap 3.3.7: https://github.com/twbs/bootstrap MIT canvg 1.5.1: https://github.com/canvg/canvg MIT clipboard 2.0.1: https://github.com/zenorocha/clipboard.js MIT codemirror 5.43.0: https://github.com/codemirror/CodeMirror MIT dayjs 1.7.8: https://github.com/iamkun/dayjs MIT element-ui 2.13.2: https://github.com/ElemeFE/element MIT html2canvas 0.5.0-beta4: https://github.com/niklasvh/html2canvas MIT jquery 3.3.1: https://github.com/jquery/jquery MIT jquery-ui 1.12.1: https://github.com/jquery/jquery-ui MIT js-cookie 2.2.1: https://github.com/js-cookie/js-cookie MIT jsplumb 2.8.6: https://github.com/jsplumb/jsplumb MIT and GPLv2 lodash 4.17.11: https://github.com/lodash/lodash MIT vue-treeselect 0.4.0: https://github.com/riophae/vue-treeselect MIT vue 2.5.17: https://github.com/vuejs/vue MIT vue-router 2.7.0: https://github.com/vuejs/vue-router MIT vuex 3.0.0: https://github.com/vuejs/vuex MIT vuex-router-sync 4.1.2: https://github.com/vuejs/vuex-router-sync MIT dagre 0.8.5: https://github.com/dagrejs/dagre MIT ======================================== Apache 2.0 licenses ======================================== echarts 4.1.0: https://github.com/apache/incubator-echarts Apache-2.0 remixicon 2.5.0 https://github.com/Remix-Design/remixicon Apache-2.0 ======================================== BSD licenses ======================================== d3 3.5.17: https://github.com/d3/d3 BSD-3-Clause
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,309
[Bug][Common] memoryUsage is `-33%`
**To Reproduce** `memoryUsage` is `-33%` **Expected behavior** Bug fixed **Screenshots** ![image](https://user-images.githubusercontent.com/4902714/115050583-779bf480-9f0e-11eb-8eed-4abcc8df3fc4.png) **Which version of Dolphin Scheduler:** -[1.3.x] -[dev]
https://github.com/apache/dolphinscheduler/issues/5309
https://github.com/apache/dolphinscheduler/pull/5312
5ef1f731b7f3b48915859b7a9595ec42df614c48
e92e29ef9a126d1d5660011545056d4dd806d105
"2021-04-16T15:51:04Z"
java
"2021-04-19T02:24:54Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.18.RELEASE</spring.version> <spring.boot.version>2.1.17.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.1.4</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.5.0</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/DingTalkUtilsTest.java</include> <include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,309
[Bug][Common] memoryUsage is `-33%`
**To Reproduce** `memoryUsage` is `-33%` **Expected behavior** Bug fixed **Screenshots** ![image](https://user-images.githubusercontent.com/4902714/115050583-779bf480-9f0e-11eb-8eed-4abcc8df3fc4.png) **Which version of Dolphin Scheduler:** -[1.3.x] -[dev]
https://github.com/apache/dolphinscheduler/issues/5309
https://github.com/apache/dolphinscheduler/pull/5312
5ef1f731b7f3b48915859b7a9595ec42df614c48
e92e29ef9a126d1d5660011545056d4dd806d105
"2021-04-16T15:51:04Z"
java
"2021-04-19T02:24:54Z"
tools/dependencies/known-dependencies.txt
HikariCP-3.2.0.jar activation-1.1.jar aether-api-1.13.1.jar aether-connector-asynchttpclient-1.13.1.jar aether-connector-file-1.13.1.jar aether-impl-1.13.1.jar aether-spi-1.13.1.jar aether-util-1.13.1.jar animal-sniffer-annotations-1.14.jar aopalliance-1.0.jar apache-el-8.5.54.jar apacheds-i18n-2.0.0-M15.jar apacheds-kerberos-codec-2.0.0-M15.jar api-asn1-api-1.0.0-M20.jar api-util-1.0.0-M20.jar asm-3.1.jar asm-6.2.1.jar aspectjweaver-1.9.6.jar async-http-client-1.6.5.jar audience-annotations-0.5.0.jar avro-1.7.4.jar aws-java-sdk-1.7.4.jar bonecp-0.8.0.RELEASE.jar byte-buddy-1.9.16.jar checker-compat-qual-2.0.0.jar classmate-1.4.0.jar clickhouse-jdbc-0.1.52.jar commons-email-1.5.jar commons-cli-1.2.jar commons-codec-1.11.jar commons-collections-3.2.2.jar commons-collections4-4.1.jar commons-compress-1.4.1.jar commons-compiler-3.0.16.jar commons-configuration-1.10.jar commons-daemon-1.0.13.jar commons-beanutils-1.9.4.jar commons-dbcp-1.4.jar commons-httpclient-3.0.1.jar commons-io-2.4.jar commons-lang-2.6.jar commons-logging-1.1.1.jar commons-math3-3.1.1.jar commons-net-3.1.jar commons-pool-1.6.jar cron-utils-5.0.5.jar curator-client-4.3.0.jar curator-framework-4.3.0.jar curator-recipes-4.3.0.jar datanucleus-api-jdo-4.2.1.jar datanucleus-core-4.1.6.jar datanucleus-rdbms-4.1.7.jar derby-10.14.2.0.jar error_prone_annotations-2.1.3.jar druid-1.1.22.jar gson-2.8.6.jar guava-24.1-jre.jar guava-retrying-2.0.0.jar guice-3.0.jar guice-servlet-3.0.jar h2-1.4.200.jar hadoop-annotations-2.7.3.jar hadoop-auth-2.7.3.jar hadoop-aws-2.7.3.jar hadoop-client-2.7.3.jar hadoop-common-2.7.3.jar hadoop-hdfs-2.7.3.jar hadoop-mapreduce-client-app-2.7.3.jar hadoop-mapreduce-client-common-2.7.3.jar hadoop-mapreduce-client-core-2.7.3.jar hadoop-mapreduce-client-jobclient-2.7.3.jar hadoop-mapreduce-client-shuffle-2.7.3.jar hadoop-yarn-api-2.7.3.jar hadoop-yarn-client-2.7.3.jar hadoop-yarn-common-2.7.3.jar hadoop-yarn-server-common-2.7.3.jar hamcrest-core-1.3.jar hibernate-validator-6.0.20.Final.jar hive-common-2.1.0.jar hive-jdbc-2.1.0.jar hive-metastore-2.1.0.jar hive-orc-2.1.0.jar hive-serde-2.1.0.jar hive-service-2.1.0.jar hive-service-rpc-2.1.0.jar hive-storage-api-2.1.0.jar htrace-core-3.1.0-incubating.jar httpclient-4.4.1.jar httpcore-4.4.1.jar httpmime-4.5.12.jar j2objc-annotations-1.1.jar jackson-annotations-2.10.5.jar jackson-core-2.10.5.jar jackson-core-asl-1.9.13.jar jackson-databind-2.10.5.jar jackson-datatype-jdk8-2.9.10.jar jackson-datatype-jsr310-2.9.10.jar jackson-jaxrs-1.9.13.jar jackson-mapper-asl-1.9.13.jar jackson-module-parameter-names-2.9.10.jar jackson-xc-1.9.13.jar jamon-runtime-2.3.1.jar janino-3.0.16.jar java-xmlbuilder-0.4.jar javassist-3.26.0-GA.jar javax.activation-api-1.2.0.jar javax.annotation-api-1.3.2.jar javax.inject-1.jar javax.jdo-3.2.0-m3.jar javax.mail-1.6.2.jar javax.servlet-api-3.1.0.jar javolution-5.5.1.jar jaxb-api-2.3.1.jar jaxb-impl-2.2.3-1.jar jboss-logging-3.3.3.Final.jar jdo-api-3.0.1.jar jersey-client-1.9.jar jersey-core-1.9.jar jersey-guice-1.9.jar jersey-json-1.9.jar jersey-server-1.9.jar jets3t-0.9.0.jar jettison-1.1.jar jetty-6.1.26.jar jetty-continuation-9.4.31.v20200723.jar jetty-http-9.4.31.v20200723.jar jetty-io-9.4.31.v20200723.jar jetty-security-9.4.31.v20200723.jar jetty-server-9.4.31.v20200723.jar jetty-servlet-9.4.31.v20200723.jar jetty-servlets-9.4.31.v20200723.jar jetty-util-6.1.26.jar jetty-util-9.4.31.v20200723.jar jetty-webapp-9.4.31.v20200723.jar jetty-xml-9.4.31.v20200723.jar jline-0.9.94.jar jna-4.5.2.jar jna-platform-4.5.2.jar joda-time-2.10.6.jar jpam-1.1.jar jsch-0.1.42.jar jsp-api-2.1.jar jsqlparser-2.1.jar jsr305-1.3.9.jar jsr305-3.0.0.jar jta-1.1.jar jul-to-slf4j-1.7.30.jar junit-4.12.jar leveldbjni-all-1.8.jar libfb303-0.9.3.jar libthrift-0.9.3.jar log4j-1.2-api-2.11.2.jar log4j-1.2.17.jar log4j-api-2.11.2.jar log4j-core-2.11.2.jar logback-classic-1.2.3.jar logback-core-1.2.3.jar lz4-1.3.0.jar mapstruct-1.2.0.Final.jar maven-aether-provider-3.0.4.jar maven-artifact-3.0.4.jar maven-compat-3.0.4.jar maven-core-3.0.4.jar maven-embedder-3.0.4.jar maven-model-3.0.4.jar maven-model-builder-3.0.4.jar maven-plugin-api-3.0.4.jar maven-repository-metadata-3.0.4.jar maven-settings-3.0.4.jar maven-settings-builder-3.0.4.jar mssql-jdbc-6.1.0.jre8.jar mybatis-3.5.2.jar mybatis-plus-3.2.0.jar mybatis-plus-annotation-3.2.0.jar mybatis-plus-boot-starter-3.2.0.jar mybatis-plus-core-3.2.0.jar mybatis-plus-extension-3.2.0.jar mybatis-spring-2.0.2.jar netty-3.6.2.Final.jar netty-all-4.1.52.Final.jar opencsv-2.3.jar oshi-core-3.5.0.jar paranamer-2.3.jar parquet-hadoop-bundle-1.8.1.jar plexus-cipher-1.7.jar plexus-classworlds-2.4.jar plexus-component-annotations-1.5.5.jar plexus-container-default-1.5.5.jar plexus-interpolation-1.14.jar plexus-sec-dispatcher-1.3.jar plexus-utils-2.0.6.jar poi-3.17.jar postgresql-42.1.4.jar presto-jdbc-0.238.1.jar protobuf-java-2.5.0.jar protostuff-core-1.7.2.jar protostuff-runtime-1.7.2.jar protostuff-api-1.7.2.jar protostuff-collectionschema-1.7.2.jar quartz-2.3.0.jar quartz-jobs-2.3.0.jar reflections-0.9.12.jar resolver-1.5.jar slf4j-api-1.7.5.jar snakeyaml-1.23.jar snappy-0.2.jar snappy-java-1.0.4.1.jar spring-aop-5.1.18.RELEASE.jar spring-beans-5.1.18.RELEASE.jar spring-boot-2.1.17.RELEASE.jar spring-boot-autoconfigure-2.1.17.RELEASE.jar spring-boot-starter-2.1.17.RELEASE.jar spring-boot-starter-aop-2.1.17.RELEASE.jar spring-boot-starter-jdbc-2.1.17.RELEASE.jar spring-boot-starter-jetty-2.1.17.RELEASE.jar spring-boot-starter-json-2.1.17.RELEASE.jar spring-boot-starter-logging-2.1.17.RELEASE.jar spring-boot-starter-web-2.1.17.RELEASE.jar spring-context-5.1.18.RELEASE.jar spring-core-5.1.18.RELEASE.jar spring-expression-5.1.18.RELEASE.jar spring-jcl-5.1.18.RELEASE.jar spring-jdbc-5.1.18.RELEASE.jar spring-plugin-core-1.2.0.RELEASE.jar spring-plugin-metadata-1.2.0.RELEASE.jar spring-tx-5.1.18.RELEASE.jar spring-web-5.1.18.RELEASE.jar spring-webmvc-5.1.18.RELEASE.jar springfox-core-2.9.2.jar springfox-schema-2.9.2.jar springfox-spi-2.9.2.jar springfox-spring-web-2.9.2.jar springfox-swagger-common-2.9.2.jar springfox-swagger-ui-2.9.2.jar springfox-swagger2-2.9.2.jar swagger-annotations-1.5.20.jar swagger-bootstrap-ui-1.9.3.jar swagger-models-1.5.24.jar tephra-api-0.6.0.jar threetenbp-1.3.6.jar transaction-api-1.1.jar validation-api-2.0.1.Final.jar wagon-provider-api-2.2.jar xbean-reflect-3.4.jar xercesImpl-2.9.1.jar xml-apis-1.4.01.jar xmlenc-0.52.jar xz-1.0.jar zookeeper-3.4.14.jar
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,310
[Improvement][Server] The log "load is too high or availablePhysicalMemorySize(G) is too low" is not clear
**Describe the question** The log `load is too high or availablePhysicalMemorySize(G) is too low` is not clear **What are the current deficiencies and the benefits of improvement** - The updated log `current cpu load average {} is too high or available memory {}G is too low, under max.cpuload.avg={} and reserved.memory={}G` will be more clear **Which version of DolphinScheduler:** -[1.3.x] -[dev] **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5310
https://github.com/apache/dolphinscheduler/pull/5311
e92e29ef9a126d1d5660011545056d4dd806d105
83519bdfc3a5cdd40f2d538d64ab6f8221bef246
"2021-04-16T16:01:16Z"
java
"2021-04-19T02:29:32Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.shell.ShellExecutor; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.lang.management.ManagementFactory; import java.lang.management.OperatingSystemMXBean; import java.lang.management.RuntimeMXBean; import java.math.RoundingMode; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.StringTokenizer; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import oshi.SystemInfo; import oshi.hardware.CentralProcessor; import oshi.hardware.GlobalMemory; import oshi.hardware.HardwareAbstractionLayer; /** * os utils */ public class OSUtils { private static final Logger logger = LoggerFactory.getLogger(OSUtils.class); public static final ThreadLocal<Logger> taskLoggerThreadLocal = new ThreadLocal<>(); private static final SystemInfo SI = new SystemInfo(); public static final String TWO_DECIMAL = "0.00"; /** * return -1 when the function can not get hardware env info * e.g {@link OSUtils#loadAverage()} {@link OSUtils#cpuUsage()} */ public static final double NEGATIVE_ONE = -1; private static HardwareAbstractionLayer hal = SI.getHardware(); private OSUtils() { throw new UnsupportedOperationException("Construct OSUtils"); } /** * Initialization regularization, solve the problem of pre-compilation performance, * avoid the thread safety problem of multi-thread operation */ private static final Pattern PATTERN = Pattern.compile("\\s+"); /** * get memory usage * Keep 2 decimal * * @return percent % */ public static double memoryUsage() { GlobalMemory memory = hal.getMemory(); double memoryUsage = (memory.getTotal() - memory.getAvailable()) * 1.0 / memory.getTotal(); DecimalFormat df = new DecimalFormat(TWO_DECIMAL); df.setRoundingMode(RoundingMode.HALF_UP); return Double.parseDouble(df.format(memoryUsage)); } /** * get available physical memory size * <p> * Keep 2 decimal * * @return available Physical Memory Size, unit: G */ public static double availablePhysicalMemorySize() { GlobalMemory memory = hal.getMemory(); double availablePhysicalMemorySize = memory.getAvailable() / 1024.0 / 1024 / 1024; DecimalFormat df = new DecimalFormat(TWO_DECIMAL); df.setRoundingMode(RoundingMode.HALF_UP); return Double.parseDouble(df.format(availablePhysicalMemorySize)); } /** * get total physical memory size * <p> * Keep 2 decimal * * @return available Physical Memory Size, unit: G */ public static double totalPhysicalMemorySize() { GlobalMemory memory = hal.getMemory(); double totalPhysicalMemorySize = memory.getTotal() / 1024.0 / 1024 / 1024; DecimalFormat df = new DecimalFormat(TWO_DECIMAL); df.setRoundingMode(RoundingMode.HALF_UP); return Double.parseDouble(df.format(totalPhysicalMemorySize)); } /** * load average * * @return load average */ public static double loadAverage() { double loadAverage; try { OperatingSystemMXBean osBean = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class); loadAverage = osBean.getSystemLoadAverage(); } catch (Exception e) { logger.error("get operation system load average exception, try another method ", e); loadAverage = hal.getProcessor().getSystemLoadAverage(); if (Double.isNaN(loadAverage)) { return NEGATIVE_ONE; } } DecimalFormat df = new DecimalFormat(TWO_DECIMAL); df.setRoundingMode(RoundingMode.HALF_UP); return Double.parseDouble(df.format(loadAverage)); } /** * get cpu usage * * @return cpu usage */ public static double cpuUsage() { CentralProcessor processor = hal.getProcessor(); double cpuUsage = processor.getSystemCpuLoad(); if (Double.isNaN(cpuUsage)) { return NEGATIVE_ONE; } DecimalFormat df = new DecimalFormat(TWO_DECIMAL); df.setRoundingMode(RoundingMode.HALF_UP); return Double.parseDouble(df.format(cpuUsage)); } public static List<String> getUserList() { try { if (isMacOS()) { return getUserListFromMac(); } else if (isWindows()) { return getUserListFromWindows(); } else { return getUserListFromLinux(); } } catch (Exception e) { logger.error(e.getMessage(), e); } return Collections.emptyList(); } /** * get user list from linux * * @return user list */ private static List<String> getUserListFromLinux() throws IOException { List<String> userList = new ArrayList<>(); try (BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(new FileInputStream("/etc/passwd")))) { String line; while ((line = bufferedReader.readLine()) != null) { if (line.contains(":")) { String[] userInfo = line.split(":"); userList.add(userInfo[0]); } } } return userList; } /** * get user list from mac * * @return user list */ private static List<String> getUserListFromMac() throws IOException { String result = exeCmd("dscl . list /users"); if (StringUtils.isNotEmpty(result)) { return Arrays.asList(result.split("\n")); } return Collections.emptyList(); } /** * get user list from windows * * @return user list */ private static List<String> getUserListFromWindows() throws IOException { String result = exeCmd("net user"); String[] lines = result.split("\n"); int startPos = 0; int endPos = lines.length - 2; for (int i = 0; i < lines.length; i++) { if (lines[i].isEmpty()) { continue; } int count = 0; if (lines[i].charAt(0) == '-') { for (int j = 0; j < lines[i].length(); j++) { if (lines[i].charAt(i) == '-') { count++; } } } if (count == lines[i].length()) { startPos = i + 1; break; } } List<String> users = new ArrayList<>(); while (startPos <= endPos) { users.addAll(Arrays.asList(PATTERN.split(lines[startPos]))); startPos++; } return users; } /** * create user * * @param userName user name */ public static void createUserIfAbsent(String userName) { // if not exists this user, then create taskLoggerThreadLocal.set(taskLoggerThreadLocal.get()); if (!getUserList().contains(userName)) { boolean isSuccess = createUser(userName); String infoLog = String.format("create user %s %s", userName, isSuccess ? "success" : "fail"); LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog); } taskLoggerThreadLocal.remove(); } /** * create user * * @param userName user name * @return true if creation was successful, otherwise false */ public static boolean createUser(String userName) { try { String userGroup = getGroup(); if (StringUtils.isEmpty(userGroup)) { String errorLog = String.format("%s group does not exist for this operating system.", userGroup); LoggerUtils.logError(Optional.ofNullable(logger), errorLog); LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), errorLog); return false; } if (isMacOS()) { createMacUser(userName, userGroup); } else if (isWindows()) { createWindowsUser(userName, userGroup); } else { createLinuxUser(userName, userGroup); } return true; } catch (Exception e) { LoggerUtils.logError(Optional.ofNullable(logger), e); LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), e); } return false; } /** * create linux user * * @param userName user name * @param userGroup user group * @throws IOException in case of an I/O error */ private static void createLinuxUser(String userName, String userGroup) throws IOException { String infoLog1 = String.format("create linux os user : %s", userName); LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog1); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog1); String cmd = String.format("sudo useradd -g %s %s", userGroup, userName); String infoLog2 = String.format("execute cmd : %s", cmd); LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog2); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog2); exeCmd(cmd); } /** * create mac user (Supports Mac OSX 10.10+) * * @param userName user name * @param userGroup user group * @throws IOException in case of an I/O error */ private static void createMacUser(String userName, String userGroup) throws IOException { Optional<Logger> optionalLogger = Optional.ofNullable(logger); Optional<Logger> optionalTaskLogger = Optional.ofNullable(taskLoggerThreadLocal.get()); String infoLog1 = String.format("create mac os user : %s", userName); LoggerUtils.logInfo(optionalLogger, infoLog1); LoggerUtils.logInfo(optionalTaskLogger, infoLog1); String createUserCmd = String.format("sudo sysadminctl -addUser %s -password %s", userName, userName); String infoLog2 = String.format("create user command : %s", createUserCmd); LoggerUtils.logInfo(optionalLogger, infoLog2); LoggerUtils.logInfo(optionalTaskLogger, infoLog2); exeCmd(createUserCmd); String appendGroupCmd = String.format("sudo dseditgroup -o edit -a %s -t user %s", userName, userGroup); String infoLog3 = String.format("append user to group : %s", appendGroupCmd); LoggerUtils.logInfo(optionalLogger, infoLog3); LoggerUtils.logInfo(optionalTaskLogger, infoLog3); exeCmd(appendGroupCmd); } /** * create windows user * * @param userName user name * @param userGroup user group * @throws IOException in case of an I/O error */ private static void createWindowsUser(String userName, String userGroup) throws IOException { String infoLog1 = String.format("create windows os user : %s", userName); LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog1); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog1); String userCreateCmd = String.format("net user \"%s\" /add", userName); String infoLog2 = String.format("execute create user command : %s", userCreateCmd); LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog2); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog2); exeCmd(userCreateCmd); String appendGroupCmd = String.format("net localgroup \"%s\" \"%s\" /add", userGroup, userName); String infoLog3 = String.format("execute append user to group : %s", appendGroupCmd); LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog3); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog3); exeCmd(appendGroupCmd); } /** * get system group information * * @return system group info * @throws IOException errors */ public static String getGroup() throws IOException { if (isWindows()) { String currentProcUserName = System.getProperty("user.name"); String result = exeCmd(String.format("net user \"%s\"", currentProcUserName)); String line = result.split("\n")[22]; String group = PATTERN.split(line)[1]; if (group.charAt(0) == '*') { return group.substring(1); } else { return group; } } else { String result = exeCmd("groups"); if (StringUtils.isNotEmpty(result)) { String[] groupInfo = result.split(" "); return groupInfo[0]; } } return null; } /** * get sudo command * * @param tenantCode tenantCode * @param command command * @return result of sudo execute command */ public static String getSudoCmd(String tenantCode, String command) { if (!CommonUtils.isSudoEnable() || StringUtils.isEmpty(tenantCode)) { return command; } return String.format("sudo -u %s %s", tenantCode, command); } /** * Execute the corresponding command of Linux or Windows * * @param command command * @return result of execute command * @throws IOException errors */ public static String exeCmd(String command) throws IOException { StringTokenizer st = new StringTokenizer(command); String[] cmdArray = new String[st.countTokens()]; for (int i = 0; st.hasMoreTokens(); i++) { cmdArray[i] = st.nextToken(); } return exeShell(cmdArray); } /** * Execute the shell * * @param command command * @return result of execute the shell * @throws IOException errors */ public static String exeShell(String[] command) throws IOException { return ShellExecutor.execCommand(command); } /** * get process id * * @return process id */ public static int getProcessID() { RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean(); return Integer.parseInt(runtimeMXBean.getName().split("@")[0]); } /** * whether is macOS * * @return true if mac */ public static boolean isMacOS() { return getOSName().startsWith("Mac"); } /** * whether is windows * * @return true if windows */ public static boolean isWindows() { return getOSName().startsWith("Windows"); } /** * get current OS name * * @return current OS name */ public static String getOSName() { return System.getProperty("os.name"); } /** * check memory and cpu usage * * @param systemCpuLoad systemCpuLoad * @param systemReservedMemory systemReservedMemory * @return check memory and cpu usage */ public static Boolean checkResource(double systemCpuLoad, double systemReservedMemory) { // system load average double loadAverage = loadAverage(); // system available physical memory double availablePhysicalMemorySize = availablePhysicalMemorySize(); if (loadAverage > systemCpuLoad || availablePhysicalMemorySize < systemReservedMemory) { logger.warn("load is too high or availablePhysicalMemorySize(G) is too low, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize, loadAverage); return false; } else { return true; } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,310
[Improvement][Server] The log "load is too high or availablePhysicalMemorySize(G) is too low" is not clear
**Describe the question** The log `load is too high or availablePhysicalMemorySize(G) is too low` is not clear **What are the current deficiencies and the benefits of improvement** - The updated log `current cpu load average {} is too high or available memory {}G is too low, under max.cpuload.avg={} and reserved.memory={}G` will be more clear **Which version of DolphinScheduler:** -[1.3.x] -[dev] **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5310
https://github.com/apache/dolphinscheduler/pull/5311
e92e29ef9a126d1d5660011545056d4dd806d105
83519bdfc3a5cdd40f2d538d64ab6f8221bef246
"2021-04-16T16:01:16Z"
java
"2021-04-19T02:29:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.consumer; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.SqoopJobType; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; import org.apache.dolphinscheduler.common.task.sql.SqlParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.EnumUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.ProcedureTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.PostConstruct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** * TaskUpdateQueue consumer */ @Component public class TaskPriorityQueueConsumer extends Thread { /** * logger of TaskUpdateQueueConsumer */ private static final Logger logger = LoggerFactory.getLogger(TaskPriorityQueueConsumer.class); /** * taskUpdateQueue */ @Autowired private TaskPriorityQueue<TaskPriority> taskPriorityQueue; /** * processService */ @Autowired private ProcessService processService; /** * executor dispatcher */ @Autowired private ExecutorDispatcher dispatcher; /** * master config */ @Autowired private MasterConfig masterConfig; @PostConstruct public void init() { super.setName("TaskUpdateQueueConsumerThread"); super.start(); } @Override public void run() { List<TaskPriority> failedDispatchTasks = new ArrayList<>(); while (Stopper.isRunning()) { try { int fetchTaskNum = masterConfig.getMasterDispatchTaskNumber(); failedDispatchTasks.clear(); for (int i = 0; i < fetchTaskNum; i++) { TaskPriority taskPriority = taskPriorityQueue.poll(Constants.SLEEP_TIME_MILLIS, TimeUnit.MILLISECONDS); if (Objects.isNull(taskPriority)) { continue; } boolean dispatchResult = dispatch(taskPriority); if (!dispatchResult) { failedDispatchTasks.add(taskPriority); } } if (!failedDispatchTasks.isEmpty()) { for (TaskPriority dispatchFailedTask : failedDispatchTasks) { taskPriorityQueue.put(dispatchFailedTask); } // If there are tasks in a cycle that cannot find the worker group, // sleep for 1 second if (taskPriorityQueue.size() <= failedDispatchTasks.size()) { TimeUnit.MILLISECONDS.sleep(Constants.SLEEP_TIME_MILLIS); } } } catch (Exception e) { logger.error("dispatcher task error", e); } } } /** * dispatch task * * @param taskPriority taskPriority * @return result */ protected boolean dispatch(TaskPriority taskPriority) { boolean result = false; try { int taskInstanceId = taskPriority.getTaskId(); TaskExecutionContext context = getTaskExecutionContext(taskInstanceId); ExecutionContext executionContext = new ExecutionContext(context.toCommand(), ExecutorType.WORKER, context.getWorkerGroup()); if (taskInstanceIsFinalState(taskInstanceId)) { // when task finish, ignore this task, there is no need to dispatch anymore return true; } else { result = dispatcher.dispatch(executionContext); } } catch (ExecuteException e) { logger.error("dispatch error", e); } return result; } /** * taskInstance is final state * success,failure,kill,stop,pause,threadwaiting is final state * * @param taskInstanceId taskInstanceId * @return taskInstance is final state */ public Boolean taskInstanceIsFinalState(int taskInstanceId) { TaskInstance taskInstance = processService.findTaskInstanceById(taskInstanceId); return taskInstance.getState().typeIsFinished(); } /** * get TaskExecutionContext * * @param taskInstanceId taskInstanceId * @return TaskExecutionContext */ protected TaskExecutionContext getTaskExecutionContext(int taskInstanceId) { TaskInstance taskInstance = processService.getTaskInstanceDetailByTaskId(taskInstanceId); // task type TaskType taskType = TaskType.valueOf(taskInstance.getTaskType()); // task node TaskNode taskNode = JSONUtils.parseObject(taskInstance.getTaskJson(), TaskNode.class); Integer userId = taskInstance.getProcessDefine() == null ? 0 : taskInstance.getProcessDefine().getUserId(); Tenant tenant = processService.getTenantForProcess(taskInstance.getProcessInstance().getTenantId(), userId); // verify tenant is null if (verifyTenantIsNull(tenant, taskInstance)) { processService.changeTaskState(taskInstance, ExecutionStatus.FAILURE, taskInstance.getStartTime(), taskInstance.getHost(), null, null, taskInstance.getId()); return null; } // set queue for process instance, user-specified queue takes precedence over tenant queue String userQueue = processService.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId()); taskInstance.getProcessInstance().setQueue(StringUtils.isEmpty(userQueue) ? tenant.getQueue() : userQueue); taskInstance.getProcessInstance().setTenantCode(tenant.getTenantCode()); taskInstance.setResources(getResourceFullNames(taskNode)); SQLTaskExecutionContext sqlTaskExecutionContext = new SQLTaskExecutionContext(); DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); ProcedureTaskExecutionContext procedureTaskExecutionContext = new ProcedureTaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext(); // SQL task if (taskType == TaskType.SQL) { setSQLTaskRelation(sqlTaskExecutionContext, taskNode); } // DATAX task if (taskType == TaskType.DATAX) { setDataxTaskRelation(dataxTaskExecutionContext, taskNode); } // procedure task if (taskType == TaskType.PROCEDURE) { setProcedureTaskRelation(procedureTaskExecutionContext, taskNode); } if (taskType == TaskType.SQOOP) { setSqoopTaskRelation(sqoopTaskExecutionContext, taskNode); } return TaskExecutionContextBuilder.get() .buildTaskInstanceRelatedInfo(taskInstance) .buildProcessInstanceRelatedInfo(taskInstance.getProcessInstance()) .buildProcessDefinitionRelatedInfo(taskInstance.getProcessDefine()) .buildSQLTaskRelatedInfo(sqlTaskExecutionContext) .buildDataxTaskRelatedInfo(dataxTaskExecutionContext) .buildProcedureTaskRelatedInfo(procedureTaskExecutionContext) .buildSqoopTaskRelatedInfo(sqoopTaskExecutionContext) .create(); } /** * set procedure task relation * * @param procedureTaskExecutionContext procedureTaskExecutionContext * @param taskNode taskNode */ private void setProcedureTaskRelation(ProcedureTaskExecutionContext procedureTaskExecutionContext, TaskNode taskNode) { ProcedureParameters procedureParameters = JSONUtils.parseObject(taskNode.getParams(), ProcedureParameters.class); int datasourceId = procedureParameters.getDatasource(); DataSource datasource = processService.findDataSourceById(datasourceId); procedureTaskExecutionContext.setConnectionParams(datasource.getConnectionParams()); } /** * set datax task relation * * @param dataxTaskExecutionContext dataxTaskExecutionContext * @param taskNode taskNode */ protected void setDataxTaskRelation(DataxTaskExecutionContext dataxTaskExecutionContext, TaskNode taskNode) { DataxParameters dataxParameters = JSONUtils.parseObject(taskNode.getParams(), DataxParameters.class); DataSource dbSource = processService.findDataSourceById(dataxParameters.getDataSource()); DataSource dbTarget = processService.findDataSourceById(dataxParameters.getDataTarget()); if (dbSource != null) { dataxTaskExecutionContext.setDataSourceId(dataxParameters.getDataSource()); dataxTaskExecutionContext.setSourcetype(dbSource.getType().getCode()); dataxTaskExecutionContext.setSourceConnectionParams(dbSource.getConnectionParams()); } if (dbTarget != null) { dataxTaskExecutionContext.setDataTargetId(dataxParameters.getDataTarget()); dataxTaskExecutionContext.setTargetType(dbTarget.getType().getCode()); dataxTaskExecutionContext.setTargetConnectionParams(dbTarget.getConnectionParams()); } } /** * set sqoop task relation * * @param sqoopTaskExecutionContext sqoopTaskExecutionContext * @param taskNode taskNode */ private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) { SqoopParameters sqoopParameters = JSONUtils.parseObject(taskNode.getParams(), SqoopParameters.class); // sqoop job type is template set task relation if (sqoopParameters.getJobType().equals(SqoopJobType.TEMPLATE.getDescp())) { SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class); TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class); DataSource dataSource = processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource()); DataSource dataTarget = processService.findDataSourceById(targetMysqlParameter.getTargetDatasource()); if (dataSource != null) { sqoopTaskExecutionContext.setDataSourceId(dataSource.getId()); sqoopTaskExecutionContext.setSourcetype(dataSource.getType().getCode()); sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); } if (dataTarget != null) { sqoopTaskExecutionContext.setDataTargetId(dataTarget.getId()); sqoopTaskExecutionContext.setTargetType(dataTarget.getType().getCode()); sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); } } } /** * set SQL task relation * * @param sqlTaskExecutionContext sqlTaskExecutionContext * @param taskNode taskNode */ private void setSQLTaskRelation(SQLTaskExecutionContext sqlTaskExecutionContext, TaskNode taskNode) { SqlParameters sqlParameters = JSONUtils.parseObject(taskNode.getParams(), SqlParameters.class); int datasourceId = sqlParameters.getDatasource(); DataSource datasource = processService.findDataSourceById(datasourceId); sqlTaskExecutionContext.setConnectionParams(datasource.getConnectionParams()); // whether udf type boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) && StringUtils.isNotEmpty(sqlParameters.getUdfs()); if (udfTypeFlag) { String[] udfFunIds = sqlParameters.getUdfs().split(","); int[] udfFunIdsArray = new int[udfFunIds.length]; for (int i = 0; i < udfFunIds.length; i++) { udfFunIdsArray[i] = Integer.parseInt(udfFunIds[i]); } List<UdfFunc> udfFuncList = processService.queryUdfFunListByIds(udfFunIdsArray); Map<UdfFunc, String> udfFuncMap = new HashMap<>(); for (UdfFunc udfFunc : udfFuncList) { String tenantCode = processService.queryTenantCodeByResName(udfFunc.getResourceName(), ResourceType.UDF); udfFuncMap.put(udfFunc, tenantCode); } sqlTaskExecutionContext.setUdfFuncTenantCodeMap(udfFuncMap); } } /** * whehter tenant is null * * @param tenant tenant * @param taskInstance taskInstance * @return result */ protected boolean verifyTenantIsNull(Tenant tenant, TaskInstance taskInstance) { if (tenant == null) { logger.error("tenant not exists,process instance id : {},task instance id : {}", taskInstance.getProcessInstance().getId(), taskInstance.getId()); return true; } return false; } /** * get resource map key is full name and value is tenantCode */ protected Map<String, String> getResourceFullNames(TaskNode taskNode) { Map<String, String> resourcesMap = new HashMap<>(); AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams()); if (baseParam != null) { List<ResourceInfo> projectResourceFiles = baseParam.getResourceFilesList(); if (CollectionUtils.isNotEmpty(projectResourceFiles)) { // filter the resources that the resource id equals 0 Set<ResourceInfo> oldVersionResources = projectResourceFiles.stream().filter(t -> t.getId() == 0).collect(Collectors.toSet()); if (CollectionUtils.isNotEmpty(oldVersionResources)) { oldVersionResources.forEach( (t) -> resourcesMap.put(t.getRes(), processService.queryTenantCodeByResName(t.getRes(), ResourceType.FILE)) ); } // get the resource id in order to get the resource names in batch Stream<Integer> resourceIdStream = projectResourceFiles.stream().map(resourceInfo -> resourceInfo.getId()); Set<Integer> resourceIdsSet = resourceIdStream.collect(Collectors.toSet()); if (CollectionUtils.isNotEmpty(resourceIdsSet)) { Integer[] resourceIds = resourceIdsSet.toArray(new Integer[resourceIdsSet.size()]); List<Resource> resources = processService.listResourceByIds(resourceIds); resources.forEach( (t) -> resourcesMap.put(t.getFullName(), processService.queryTenantCodeByResName(t.getFullName(), ResourceType.FILE)) ); } } } return resourcesMap; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,310
[Improvement][Server] The log "load is too high or availablePhysicalMemorySize(G) is too low" is not clear
**Describe the question** The log `load is too high or availablePhysicalMemorySize(G) is too low` is not clear **What are the current deficiencies and the benefits of improvement** - The updated log `current cpu load average {} is too high or available memory {}G is too low, under max.cpuload.avg={} and reserved.memory={}G` will be more clear **Which version of DolphinScheduler:** -[1.3.x] -[dev] **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5310
https://github.com/apache/dolphinscheduler/pull/5311
e92e29ef9a126d1d5660011545056d4dd806d105
83519bdfc3a5cdd40f2d538d64ab6f8221bef246
"2021-04-16T16:01:16Z"
java
"2021-04-19T02:29:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.dispatch; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; import org.apache.dolphinscheduler.server.master.dispatch.executor.ExecutorManager; import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager; import org.apache.dolphinscheduler.server.master.dispatch.host.HostManager; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.concurrent.ConcurrentHashMap; /** * executor dispatcher */ @Service public class ExecutorDispatcher implements InitializingBean { /** * netty executor manager */ @Autowired private NettyExecutorManager nettyExecutorManager; /** * round robin host manager */ @Autowired private HostManager hostManager; /** * executor manager */ private final ConcurrentHashMap<ExecutorType, ExecutorManager<Boolean>> executorManagers; /** * constructor */ public ExecutorDispatcher(){ this.executorManagers = new ConcurrentHashMap<>(); } /** * task dispatch * * @param context context * @return result * @throws ExecuteException if error throws ExecuteException */ public Boolean dispatch(final ExecutionContext context) throws ExecuteException { /** * get executor manager */ ExecutorManager<Boolean> executorManager = this.executorManagers.get(context.getExecutorType()); if(executorManager == null){ throw new ExecuteException("no ExecutorManager for type : " + context.getExecutorType()); } /** * host select */ Host host = hostManager.select(context); if (StringUtils.isEmpty(host.getAddress())) { throw new ExecuteException(String.format("fail to execute : %s due to no suitable worker , " + "current task need to %s worker group execute", context.getCommand(),context.getWorkerGroup())); } context.setHost(host); executorManager.beforeExecute(context); try { /** * task execute */ return executorManager.execute(context); } finally { executorManager.afterExecute(context); } } /** * register init * @throws Exception if error throws Exception */ @Override public void afterPropertiesSet() throws Exception { register(ExecutorType.WORKER, nettyExecutorManager); register(ExecutorType.CLIENT, nettyExecutorManager); } /** * register * @param type executor type * @param executorManager executorManager */ public void register(ExecutorType type, ExecutorManager executorManager){ executorManagers.put(type, executorManager); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,310
[Improvement][Server] The log "load is too high or availablePhysicalMemorySize(G) is too low" is not clear
**Describe the question** The log `load is too high or availablePhysicalMemorySize(G) is too low` is not clear **What are the current deficiencies and the benefits of improvement** - The updated log `current cpu load average {} is too high or available memory {}G is too low, under max.cpuload.avg={} and reserved.memory={}G` will be more clear **Which version of DolphinScheduler:** -[1.3.x] -[dev] **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5310
https://github.com/apache/dolphinscheduler/pull/5311
e92e29ef9a126d1d5660011545056d4dd806d105
83519bdfc3a5cdd40f2d538d64ab6f8221bef246
"2021-04-16T16:01:16Z"
java
"2021-04-19T02:29:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.dispatch.host; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.ResInfo; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostWeight; import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostWorker; import org.apache.dolphinscheduler.server.master.dispatch.host.assign.LowerWeightRoundRobin; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * lower weight host manager */ public class LowerWeightHostManager extends CommonHostManager { private final Logger logger = LoggerFactory.getLogger(LowerWeightHostManager.class); /** * selector */ private LowerWeightRoundRobin selector; /** * worker host weights */ private ConcurrentHashMap<String, Set<HostWeight>> workerHostWeightsMap; /** * worker group host lock */ private Lock lock; /** * executor service */ private ScheduledExecutorService executorService; @PostConstruct public void init() { this.selector = new LowerWeightRoundRobin(); this.workerHostWeightsMap = new ConcurrentHashMap<>(); this.lock = new ReentrantLock(); this.executorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("LowerWeightHostManagerExecutor")); this.executorService.scheduleWithFixedDelay(new RefreshResourceTask(),0, 5, TimeUnit.SECONDS); } @PreDestroy public void close() { this.executorService.shutdownNow(); } /** * select host * @param context context * @return host */ @Override public Host select(ExecutionContext context) { Set<HostWeight> workerHostWeights = getWorkerHostWeights(context.getWorkerGroup()); if (CollectionUtils.isNotEmpty(workerHostWeights)) { return selector.select(workerHostWeights).getHost(); } return new Host(); } @Override public HostWorker select(Collection<HostWorker> nodes) { throw new UnsupportedOperationException("not support"); } private void syncWorkerHostWeight(Map<String, Set<HostWeight>> workerHostWeights) { lock.lock(); try { workerHostWeightsMap.clear(); workerHostWeightsMap.putAll(workerHostWeights); } finally { lock.unlock(); } } private Set<HostWeight> getWorkerHostWeights(String workerGroup) { lock.lock(); try { return workerHostWeightsMap.get(workerGroup); } finally { lock.unlock(); } } class RefreshResourceTask implements Runnable { @Override public void run() { try { Map<String, Set<HostWeight>> workerHostWeights = new HashMap<>(); Map<String, Set<String>> workerGroupNodes = serverNodeManager.getWorkerGroupNodes(); for (Map.Entry<String, Set<String>> entry : workerGroupNodes.entrySet()) { String workerGroup = entry.getKey(); Set<String> nodes = entry.getValue(); Set<HostWeight> hostWeights = new HashSet<>(nodes.size()); for (String node : nodes) { String heartbeat = serverNodeManager.getWorkerNodeInfo(node); HostWeight hostWeight = getHostWeight(node, workerGroup, heartbeat); if (hostWeight != null) { hostWeights.add(hostWeight); } } if (!hostWeights.isEmpty()) { workerHostWeights.put(workerGroup, hostWeights); } } syncWorkerHostWeight(workerHostWeights); } catch (Throwable ex) { logger.error("RefreshResourceTask error", ex); } } public HostWeight getHostWeight(String addr, String workerGroup, String heartbeat) { if (ResInfo.isValidHeartbeatForZKInfo(heartbeat)) { String[] parts = heartbeat.split(Constants.COMMA); int status = Integer.parseInt(parts[8]); if (status == Constants.ABNORMAL_NODE_STATUS) { logger.warn("load is too high or availablePhysicalMemorySize(G) is too low, it's availablePhysicalMemorySize(G):{},loadAvg:{}", Double.parseDouble(parts[3]), Double.parseDouble(parts[2])); return null; } double cpu = Double.parseDouble(parts[0]); double memory = Double.parseDouble(parts[1]); double loadAverage = Double.parseDouble(parts[2]); long startTime = DateUtils.stringToDate(parts[6]).getTime(); int weight = getWorkerHostWeightFromHeartbeat(heartbeat); return new HostWeight(HostWorker.of(addr, weight, workerGroup), cpu, memory, loadAverage, startTime); } return null; } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,310
[Improvement][Server] The log "load is too high or availablePhysicalMemorySize(G) is too low" is not clear
**Describe the question** The log `load is too high or availablePhysicalMemorySize(G) is too low` is not clear **What are the current deficiencies and the benefits of improvement** - The updated log `current cpu load average {} is too high or available memory {}G is too low, under max.cpuload.avg={} and reserved.memory={}G` will be more clear **Which version of DolphinScheduler:** -[1.3.x] -[dev] **Describe alternatives you've considered** A clear and concise description of any alternative improvement solutions you've considered.
https://github.com/apache/dolphinscheduler/issues/5310
https://github.com/apache/dolphinscheduler/pull/5311
e92e29ef9a126d1d5660011545056d4dd806d105
83519bdfc3a5cdd40f2d538d64ab6f8221bef246
"2021-04-16T16:01:16Z"
java
"2021-04-19T02:29:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.registry; import static org.apache.dolphinscheduler.remote.utils.Constants.COMMA; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import java.util.Date; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Heart beat task */ public class HeartBeatTask implements Runnable { private final Logger logger = LoggerFactory.getLogger(HeartBeatTask.class); private String startTime; private double maxCpuloadAvg; private double reservedMemory; private int hostWeight; // worker host weight private Set<String> heartBeatPaths; private String serverType; private ZookeeperRegistryCenter zookeeperRegistryCenter; // server stop or not protected IStoppable stoppable = null; public HeartBeatTask(String startTime, double maxCpuloadAvg, double reservedMemory, Set<String> heartBeatPaths, String serverType, ZookeeperRegistryCenter zookeeperRegistryCenter) { this.startTime = startTime; this.maxCpuloadAvg = maxCpuloadAvg; this.reservedMemory = reservedMemory; this.heartBeatPaths = heartBeatPaths; this.serverType = serverType; this.zookeeperRegistryCenter = zookeeperRegistryCenter; } public HeartBeatTask(String startTime, double maxCpuloadAvg, double reservedMemory, int hostWeight, Set<String> heartBeatPaths, String serverType, ZookeeperRegistryCenter zookeeperRegistryCenter) { this.startTime = startTime; this.maxCpuloadAvg = maxCpuloadAvg; this.reservedMemory = reservedMemory; this.hostWeight = hostWeight; this.heartBeatPaths = heartBeatPaths; this.serverType = serverType; this.zookeeperRegistryCenter = zookeeperRegistryCenter; } @Override public void run() { try { // check dead or not in zookeeper for (String heartBeatPath : heartBeatPaths) { if (zookeeperRegistryCenter.checkIsDeadServer(heartBeatPath, serverType)) { zookeeperRegistryCenter.getStoppable().stop("i was judged to death, release resources and stop myself"); return; } } double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); double loadAverage = OSUtils.loadAverage(); int status = Constants.NORMAL_NODE_STATUS; if (availablePhysicalMemorySize < reservedMemory || loadAverage > maxCpuloadAvg) { logger.warn("load is too high or availablePhysicalMemorySize(G) is too low, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize, loadAverage); status = Constants.ABNORMAL_NODE_STATUS; } StringBuilder builder = new StringBuilder(100); builder.append(OSUtils.cpuUsage()).append(COMMA); builder.append(OSUtils.memoryUsage()).append(COMMA); builder.append(OSUtils.loadAverage()).append(COMMA); builder.append(OSUtils.availablePhysicalMemorySize()).append(Constants.COMMA); builder.append(maxCpuloadAvg).append(Constants.COMMA); builder.append(reservedMemory).append(Constants.COMMA); builder.append(startTime).append(Constants.COMMA); builder.append(DateUtils.dateToString(new Date())).append(Constants.COMMA); builder.append(status).append(COMMA); // save process id builder.append(OSUtils.getProcessID()); // worker host weight if (Constants.WORKER_TYPE.equals(serverType)) { builder.append(Constants.COMMA).append(hostWeight); } for (String heartBeatPath : heartBeatPaths) { zookeeperRegistryCenter.getRegisterOperator().update(heartBeatPath, builder.toString()); } } catch (Throwable ex) { logger.error("error write heartbeat info", ex); } } /** * for stop server * * @param serverStoppable server stoppable interface */ public void setStoppable(IStoppable serverStoppable) { this.stoppable = serverStoppable; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,147
[Improvement][Master] this expression which always evaluates "true"
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** this expression which always evaluates "true" **What are the current deficiencies and the benefits of improvement** this expression which always evaluates "true" ,removeTaskLogFile method of org.apache.dolphinscheduler.service.process **Which version of DolphinScheduler:** -[1.3.5] **Describe alternatives you've considered** public void removeTaskLogFile(Integer processInstanceId) { LogClientService logClient = null; try { logClient = new LogClientService(); List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } for (TaskInstance taskInstance : taskInstanceList) { String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())) { continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); } catch (Exception e) { // compatible old version ip = taskInstance.getHost(); } // remove task log from loggerserver logClient.removeTaskLog(ip, port, taskLogPath); } } finally { logClient.close(); } }
https://github.com/apache/dolphinscheduler/issues/5147
https://github.com/apache/dolphinscheduler/pull/5156
fe144e46a3f23f5b484a3ce74552cc6faad9009c
513eb769196971956f15665640aefd782b9f69f3
"2021-03-25T07:29:28Z"
java
"2021-04-19T09:48:09Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.runner; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueueImpl; import java.util.Date; import java.util.concurrent.Callable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * master task exec base class */ public class MasterBaseTaskExecThread implements Callable<Boolean> { /** * logger of MasterBaseTaskExecThread */ protected Logger logger = LoggerFactory.getLogger(getClass()); /** * process service */ protected ProcessService processService; /** * alert database access */ protected AlertDao alertDao; /** * process instance */ protected ProcessInstance processInstance; /** * task instance */ protected TaskInstance taskInstance; /** * whether need cancel */ protected boolean cancel; /** * master config */ protected MasterConfig masterConfig; /** * taskUpdateQueue */ private TaskPriorityQueue taskUpdateQueue; /** * whether need check task time out. */ protected boolean checkTimeoutFlag = false; /** * task timeout parameters */ protected TaskTimeoutParameter taskTimeoutParameter; /** * constructor of MasterBaseTaskExecThread * * @param taskInstance task instance */ public MasterBaseTaskExecThread(TaskInstance taskInstance) { this.processService = SpringApplicationContext.getBean(ProcessService.class); this.alertDao = SpringApplicationContext.getBean(AlertDao.class); this.cancel = false; this.taskInstance = taskInstance; this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class); this.taskUpdateQueue = SpringApplicationContext.getBean(TaskPriorityQueueImpl.class); initTaskParams(); } /** * init task ordinary parameters */ private void initTaskParams() { initTimeoutParams(); } /** * init task timeout parameters */ private void initTimeoutParams() { String taskJson = taskInstance.getTaskJson(); TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); taskTimeoutParameter = taskNode.getTaskTimeoutParameter(); if (taskTimeoutParameter.getEnable()) { checkTimeoutFlag = true; } } /** * get task instance * * @return TaskInstance */ public TaskInstance getTaskInstance() { return this.taskInstance; } /** * kill master base task exec thread */ public void kill() { this.cancel = true; } /** * submit master base task exec thread * * @return TaskInstance */ protected TaskInstance submit() { Integer commitRetryTimes = masterConfig.getMasterTaskCommitRetryTimes(); Integer commitRetryInterval = masterConfig.getMasterTaskCommitInterval(); int retryTimes = 1; boolean submitDB = false; boolean submitTask = false; TaskInstance task = null; while (retryTimes <= commitRetryTimes) { try { if (!submitDB) { // submit task to db task = processService.submitTask(taskInstance); if (task != null && task.getId() != 0) { submitDB = true; } } if (submitDB && !submitTask) { // dispatch task submitTask = dispatchTask(task); } if (submitDB && submitTask) { return task; } if (!submitDB) { logger.error("task commit to db failed , taskId {} has already retry {} times, please check the database", taskInstance.getId(), retryTimes); } else if (!submitTask) { logger.error("task commit failed , taskId {} has already retry {} times, please check", taskInstance.getId(), retryTimes); } Thread.sleep(commitRetryInterval); } catch (Exception e) { logger.error("task commit to mysql and dispatcht task failed", e); } retryTimes += 1; } return task; } /** * dispatcht task * * @param taskInstance taskInstance * @return whether submit task success */ public Boolean dispatchTask(TaskInstance taskInstance) { try { if (taskInstance.isConditionsTask() || taskInstance.isDependTask() || taskInstance.isSubProcess()) { return true; } if (taskInstance.getState().typeIsFinished()) { logger.info(String.format("submit task , but task [%s] state [%s] is already finished. ", taskInstance.getName(), taskInstance.getState().toString())); return true; } // task cannot be submitted because its execution state is RUNNING or DELAY. if (taskInstance.getState() == ExecutionStatus.RUNNING_EXECUTION || taskInstance.getState() == ExecutionStatus.DELAY_EXECUTION) { logger.info("submit task, but the status of the task {} is already running or delayed.", taskInstance.getName()); return true; } logger.info("task ready to submit: {}", taskInstance); /** * taskPriority */ TaskPriority taskPriority = buildTaskPriority(processInstance.getProcessInstancePriority().getCode(), processInstance.getId(), taskInstance.getProcessInstancePriority().getCode(), taskInstance.getId(), org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP); taskUpdateQueue.put(taskPriority); logger.info(String.format("master submit success, task : %s", taskInstance.getName())); return true; } catch (Exception e) { logger.error("submit task Exception: ", e); logger.error("task error : %s", JSONUtils.toJsonString(taskInstance)); return false; } } /** * buildTaskPriority * * @param processInstancePriority processInstancePriority * @param processInstanceId processInstanceId * @param taskInstancePriority taskInstancePriority * @param taskInstanceId taskInstanceId * @param workerGroup workerGroup * @return TaskPriority */ private TaskPriority buildTaskPriority(int processInstancePriority, int processInstanceId, int taskInstancePriority, int taskInstanceId, String workerGroup) { return new TaskPriority(processInstancePriority, processInstanceId, taskInstancePriority, taskInstanceId, workerGroup); } /** * submit wait complete * * @return true */ protected Boolean submitWaitComplete() { return true; } /** * call * * @return boolean * @throws Exception exception */ @Override public Boolean call() throws Exception { this.processInstance = processService.findProcessInstanceById(taskInstance.getProcessInstanceId()); return submitWaitComplete(); } /** * alert time out */ protected boolean alertTimeout() { if (TaskTimeoutStrategy.FAILED == this.taskTimeoutParameter.getStrategy()) { return true; } logger.warn("process id:{} process name:{} task id: {},name:{} execution time out", processInstance.getId(), processInstance.getName(), taskInstance.getId(), taskInstance.getName()); // send warn mail ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(), processInstance.getId(), processInstance.getName(), taskInstance.getId(), taskInstance.getName()); return true; } /** * handle time out for time out strategy warn&&failed */ protected void handleTimeoutFailed() { if (TaskTimeoutStrategy.WARN == this.taskTimeoutParameter.getStrategy()) { return; } logger.info("process id:{} name:{} task id:{} name:{} cancel because of timeout.", processInstance.getId(), processInstance.getName(), taskInstance.getId(), taskInstance.getName()); this.cancel = true; } /** * check task remain time valid */ protected boolean checkTaskTimeout() { if (!checkTimeoutFlag || taskInstance.getStartTime() == null) { return false; } long remainTime = getRemainTime(taskTimeoutParameter.getInterval() * 60L); return remainTime <= 0; } /** * get remain time * * @return remain time */ protected long getRemainTime(long timeoutSeconds) { Date startTime = taskInstance.getStartTime(); long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; return timeoutSeconds - usedTime; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,147
[Improvement][Master] this expression which always evaluates "true"
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** this expression which always evaluates "true" **What are the current deficiencies and the benefits of improvement** this expression which always evaluates "true" ,removeTaskLogFile method of org.apache.dolphinscheduler.service.process **Which version of DolphinScheduler:** -[1.3.5] **Describe alternatives you've considered** public void removeTaskLogFile(Integer processInstanceId) { LogClientService logClient = null; try { logClient = new LogClientService(); List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } for (TaskInstance taskInstance : taskInstanceList) { String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())) { continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); } catch (Exception e) { // compatible old version ip = taskInstance.getHost(); } // remove task log from loggerserver logClient.removeTaskLog(ip, port, taskLogPath); } } finally { logClient.close(); } }
https://github.com/apache/dolphinscheduler/issues/5147
https://github.com/apache/dolphinscheduler/pull/5156
fe144e46a3f23f5b484a3ce74552cc6faad9009c
513eb769196971956f15665640aefd782b9f69f3
"2021-03-25T07:29:28Z"
java
"2021-04-19T09:48:09Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.service.log.LogClientService; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * mainly used to get the start command line of a process. */ public class ProcessUtils { /** * logger */ private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); /** * Initialization regularization, solve the problem of pre-compilation performance, * avoid the thread safety problem of multi-thread operation */ private static final Pattern MACPATTERN = Pattern.compile("-[+|-]-\\s(\\d+)"); /** * Expression of PID recognition in Windows scene */ private static final Pattern WINDOWSATTERN = Pattern.compile("\\w+\\((\\d+)\\)"); private static final String LOCAL_PROCESS_EXEC = "jdk.lang.Process.allowAmbiguousCommands"; /** * build command line characters. * * @param commandList command list * @return command */ public static String buildCommandStr(List<String> commandList) { String cmdstr; String[] cmd = commandList.toArray(new String[0]); SecurityManager security = System.getSecurityManager(); boolean allowAmbiguousCommands = isAllowAmbiguousCommands(security); if (allowAmbiguousCommands) { String executablePath = new File(cmd[0]).getPath(); if (needsEscaping(VERIFICATION_LEGACY, executablePath)) { executablePath = quoteString(executablePath); } cmdstr = createCommandLine( VERIFICATION_LEGACY, executablePath, cmd); } else { String executablePath; try { executablePath = getExecutablePath(cmd[0]); } catch (IllegalArgumentException e) { StringBuilder join = new StringBuilder(); for (String s : cmd) { join.append(s).append(' '); } cmd = getTokensFromCommand(join.toString()); executablePath = getExecutablePath(cmd[0]); // Check new executable name once more if (security != null) { security.checkExec(executablePath); } } cmdstr = createCommandLine( isShellFile(executablePath) ? VERIFICATION_CMD_BAT : VERIFICATION_WIN32, quoteString(executablePath), cmd); } return cmdstr; } /** * check is allow ambiguous commands * * @param security security manager * @return allow ambiguous command flag */ private static boolean isAllowAmbiguousCommands(SecurityManager security) { boolean allowAmbiguousCommands = false; if (security == null) { allowAmbiguousCommands = true; String value = System.getProperty(LOCAL_PROCESS_EXEC); if (value != null) { allowAmbiguousCommands = !Constants.STRING_FALSE.equalsIgnoreCase(value); } } return allowAmbiguousCommands; } /** * get executable path. * * @param path path * @return executable path */ private static String getExecutablePath(String path) { boolean pathIsQuoted = isQuoted(true, path, "Executable name has embedded quote, split the arguments"); File fileToRun = new File(pathIsQuoted ? path.substring(1, path.length() - 1) : path); return fileToRun.getPath(); } /** * whether is shell file. * * @param executablePath executable path * @return true if endsWith .CMD or .BAT */ private static boolean isShellFile(String executablePath) { String upPath = executablePath.toUpperCase(); return (upPath.endsWith(".CMD") || upPath.endsWith(".BAT")); } /** * quote string. * * @param arg argument * @return format arg */ private static String quoteString(String arg) { return '"' + arg + '"'; } /** * get tokens from command. * * @param command command * @return token string array */ private static String[] getTokensFromCommand(String command) { ArrayList<String> matchList = new ArrayList<>(8); Matcher regexMatcher = LazyPattern.PATTERN.matcher(command); while (regexMatcher.find()) { matchList.add(regexMatcher.group()); } return matchList.toArray(new String[0]); } /** * Lazy Pattern. */ private static class LazyPattern { /** * Escape-support version: * "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)"; */ private static final Pattern PATTERN = Pattern.compile("[^\\s\"]+|\"[^\"]*\""); } /** * verification cmd bat. */ private static final int VERIFICATION_CMD_BAT = 0; /** * verification win32. */ private static final int VERIFICATION_WIN32 = 1; /** * verification legacy. */ private static final int VERIFICATION_LEGACY = 2; /** * escape verification. */ private static final char[][] ESCAPE_VERIFICATION = {{' ', '\t', '<', '>', '&', '|', '^'}, {' ', '\t', '<', '>'}, {' ', '\t'}}; /** * create command line. * * @param verificationType verification type * @param executablePath executable path * @param cmd cmd * @return command line */ private static String createCommandLine(int verificationType, final String executablePath, final String[] cmd) { StringBuilder cmdbuf = new StringBuilder(80); cmdbuf.append(executablePath); for (int i = 1; i < cmd.length; ++i) { cmdbuf.append(' '); String s = cmd[i]; if (needsEscaping(verificationType, s)) { cmdbuf.append('"').append(s); if ((verificationType != VERIFICATION_CMD_BAT) && s.endsWith("\\")) { cmdbuf.append('\\'); } cmdbuf.append('"'); } else { cmdbuf.append(s); } } return cmdbuf.toString(); } /** * whether is quoted. * * @param noQuotesInside no quotes inside * @param arg arg * @param errorMessage error message * @return boolean */ private static boolean isQuoted(boolean noQuotesInside, String arg, String errorMessage) { int lastPos = arg.length() - 1; if (lastPos >= 1 && arg.charAt(0) == '"' && arg.charAt(lastPos) == '"') { // The argument has already been quoted. if (noQuotesInside && arg.indexOf('"', 1) != lastPos) { // There is ["] inside. throw new IllegalArgumentException(errorMessage); } return true; } if (noQuotesInside && arg.indexOf('"') >= 0) { // There is ["] inside. throw new IllegalArgumentException(errorMessage); } return false; } /** * whether needs escaping. * * @param verificationType verification type * @param arg arg * @return boolean */ private static boolean needsEscaping(int verificationType, String arg) { boolean argIsQuoted = isQuoted((verificationType == VERIFICATION_CMD_BAT), arg, "Argument has embedded quote, use the explicit CMD.EXE call."); if (!argIsQuoted) { char[] testEscape = ESCAPE_VERIFICATION[verificationType]; for (char c : testEscape) { if (arg.indexOf(c) >= 0) { return true; } } } return false; } /** * kill yarn application. * * @param appIds app id list * @param logger logger * @param tenantCode tenant code * @param executePath execute path */ public static void cancelApplication(List<String> appIds, Logger logger, String tenantCode, String executePath) { if (CollectionUtils.isNotEmpty(appIds)) { for (String appId : appIds) { try { ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); if (!applicationStatus.typeIsFinished()) { String commandFile = String .format("%s/%s.kill", executePath, appId); String cmd = getKerberosInitCommand() + "yarn application -kill " + appId; execYarnKillCommand(logger, tenantCode, appId, commandFile, cmd); } } catch (Exception e) { logger.error(String.format("Get yarn application app id [%s] status failed: [%s]", appId, e.getMessage())); } } } } /** * get kerberos init command */ public static String getKerberosInitCommand() { logger.info("get kerberos init command"); StringBuilder kerberosCommandBuilder = new StringBuilder(); boolean hadoopKerberosState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE,false); if (hadoopKerberosState) { kerberosCommandBuilder.append("export KRB5_CONFIG=") .append(PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)) .append("\n\n") .append(String.format("kinit -k -t %s %s || true",PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH),PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME))) .append("\n\n"); logger.info("kerberos init command: {}", kerberosCommandBuilder); } return kerberosCommandBuilder.toString(); } /** * build kill command for yarn application * * @param logger logger * @param tenantCode tenant code * @param appId app id * @param commandFile command file * @param cmd cmd */ private static void execYarnKillCommand(Logger logger, String tenantCode, String appId, String commandFile, String cmd) { try { StringBuilder sb = new StringBuilder(); sb.append("#!/bin/sh\n"); sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); sb.append("cd $BASEDIR\n"); if (CommonUtils.getSystemEnvPath() != null) { sb.append("source ").append(CommonUtils.getSystemEnvPath()).append("\n"); } sb.append("\n\n"); sb.append(cmd); File f = new File(commandFile); if (!f.exists()) { FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8); } String runCmd = String.format("%s %s", Constants.SH, commandFile); runCmd = OSUtils.getSudoCmd(tenantCode, runCmd); logger.info("kill cmd:{}", runCmd); OSUtils.exeCmd(runCmd); } catch (Exception e) { logger.error(String.format("Kill yarn application app id [%s] failed: [%s]", appId, e.getMessage())); } } /** * kill tasks according to different task types. * * @param taskExecutionContext taskExecutionContext */ public static void kill(TaskExecutionContext taskExecutionContext) { try { int processId = taskExecutionContext.getProcessId(); if (processId == 0) { logger.error("process kill failed, process id :{}, task id:{}", processId, taskExecutionContext.getTaskInstanceId()); return; } String pidsStr = getPidsStr(processId); if (StringUtils.isNotEmpty(pidsStr)) { String cmd = String.format("kill -9 %s", pidsStr); cmd = OSUtils.getSudoCmd(taskExecutionContext.getTenantCode(), cmd); logger.info("process id:{}, cmd:{}", processId, cmd); OSUtils.exeCmd(cmd); } } catch (Exception e) { logger.error("kill task failed", e); } // find log and kill yarn job killYarnJob(taskExecutionContext); } /** * get pids str. * * @param processId process id * @return pids pid String * @throws Exception exception */ public static String getPidsStr(int processId) throws Exception { List<String> pidList = new ArrayList<>(); Matcher mat = null; // pstree pid get sub pids if (OSUtils.isMacOS()) { String pids = OSUtils.exeCmd(String.format("%s -sp %d", Constants.PSTREE, processId)); if (null != pids) { mat = MACPATTERN.matcher(pids); } } else { String pids = OSUtils.exeCmd(String.format("%s -p %d", Constants.PSTREE, processId)); mat = WINDOWSATTERN.matcher(pids); } if (null != mat) { while (mat.find()) { pidList.add(mat.group(1)); } } if (CommonUtils.isSudoEnable() && !pidList.isEmpty()) { pidList = pidList.subList(1, pidList.size()); } return String.join(" ", pidList).trim(); } /** * find logs and kill yarn tasks. * @param taskExecutionContext taskExecutionContext * @return yarn application ids */ public static List<String> killYarnJob(TaskExecutionContext taskExecutionContext) { try { Thread.sleep(Constants.SLEEP_TIME_MILLIS); LogClientService logClient = null; String log; try { logClient = new LogClientService(); log = logClient.viewLog(Host.of(taskExecutionContext.getHost()).getIp(), Constants.RPC_PORT, taskExecutionContext.getLogPath()); } finally { if (logClient != null) { logClient.close(); } } if (StringUtils.isNotEmpty(log)) { List<String> appIds = LoggerUtils.getAppIds(log, logger); String workerDir = taskExecutionContext.getExecutePath(); if (StringUtils.isEmpty(workerDir)) { logger.error("task instance work dir is empty"); throw new RuntimeException("task instance work dir is empty"); } if (CollectionUtils.isNotEmpty(appIds)) { cancelApplication(appIds, logger, taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); return appIds; } } } catch (Exception e) { logger.error("kill yarn job failure", e); } return Collections.emptyList(); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,147
[Improvement][Master] this expression which always evaluates "true"
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** this expression which always evaluates "true" **What are the current deficiencies and the benefits of improvement** this expression which always evaluates "true" ,removeTaskLogFile method of org.apache.dolphinscheduler.service.process **Which version of DolphinScheduler:** -[1.3.5] **Describe alternatives you've considered** public void removeTaskLogFile(Integer processInstanceId) { LogClientService logClient = null; try { logClient = new LogClientService(); List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } for (TaskInstance taskInstance : taskInstanceList) { String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())) { continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); } catch (Exception e) { // compatible old version ip = taskInstance.getHost(); } // remove task log from loggerserver logClient.removeTaskLog(ip, port, taskLogPath); } } finally { logClient.close(); } }
https://github.com/apache/dolphinscheduler/issues/5147
https://github.com/apache/dolphinscheduler/pull/5156
fe144e46a3f23f5b484a3ce74552cc6faad9009c
513eb769196971956f15665640aefd782b9f69f3
"2021-03-25T07:29:28Z"
java
"2021-04-19T09:48:09Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.processor; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.Preconditions; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskKillRequestCommand; import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.Pair; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.log.LogClientService; import java.util.Collections; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.channel.Channel; /** * task kill processor */ public class TaskKillProcessor implements NettyRequestProcessor { private final Logger logger = LoggerFactory.getLogger(TaskKillProcessor.class); /** * worker config */ private final WorkerConfig workerConfig; /** * task callback service */ private final TaskCallbackService taskCallbackService; /** * taskExecutionContextCacheManager */ private TaskExecutionContextCacheManager taskExecutionContextCacheManager; /* * task execute manager */ private final WorkerManagerThread workerManager; public TaskKillProcessor() { this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); this.workerManager = SpringApplicationContext.getBean(WorkerManagerThread.class); } /** * task kill process * * @param channel channel channel * @param command command command */ @Override public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.TASK_KILL_REQUEST == command.getType(), String.format("invalid command type : %s", command.getType())); TaskKillRequestCommand killCommand = JSONUtils.parseObject(command.getBody(), TaskKillRequestCommand.class); logger.info("received kill command : {}", killCommand); Pair<Boolean, List<String>> result = doKill(killCommand); taskCallbackService.addRemoteChannel(killCommand.getTaskInstanceId(), new NettyRemoteChannel(channel, command.getOpaque())); TaskKillResponseCommand taskKillResponseCommand = buildKillTaskResponseCommand(killCommand, result); taskCallbackService.sendResult(taskKillResponseCommand.getTaskInstanceId(), taskKillResponseCommand.convert2Command()); taskExecutionContextCacheManager.removeByTaskInstanceId(taskKillResponseCommand.getTaskInstanceId()); } /** * do kill * * @param killCommand * @return kill result */ private Pair<Boolean, List<String>> doKill(TaskKillRequestCommand killCommand) { boolean processFlag = true; List<String> appIds = Collections.emptyList(); int taskInstanceId = killCommand.getTaskInstanceId(); TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId); try { Integer processId = taskExecutionContext.getProcessId(); if (processId.equals(0)) { workerManager.killTaskBeforeExecuteByInstanceId(taskInstanceId); taskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); logger.info("the task has not been executed and has been cancelled, task id:{}", taskInstanceId); return Pair.of(true, appIds); } String pidsStr = ProcessUtils.getPidsStr(taskExecutionContext.getProcessId()); if (StringUtils.isNotEmpty(pidsStr)) { String cmd = String.format("kill -9 %s", pidsStr); cmd = OSUtils.getSudoCmd(taskExecutionContext.getTenantCode(), cmd); logger.info("process id:{}, cmd:{}", taskExecutionContext.getProcessId(), cmd); OSUtils.exeCmd(cmd); } } catch (Exception e) { processFlag = false; logger.error("kill task error", e); } // find log and kill yarn job Pair<Boolean, List<String>> yarnResult = killYarnJob(Host.of(taskExecutionContext.getHost()).getIp(), taskExecutionContext.getLogPath(), taskExecutionContext.getExecutePath(), taskExecutionContext.getTenantCode()); return Pair.of(processFlag && yarnResult.getLeft(), yarnResult.getRight()); } /** * build TaskKillResponseCommand * * @param killCommand kill command * @param result exe result * @return build TaskKillResponseCommand */ private TaskKillResponseCommand buildKillTaskResponseCommand(TaskKillRequestCommand killCommand, Pair<Boolean, List<String>> result) { TaskKillResponseCommand taskKillResponseCommand = new TaskKillResponseCommand(); taskKillResponseCommand.setStatus(result.getLeft() ? ExecutionStatus.SUCCESS.getCode() : ExecutionStatus.FAILURE.getCode()); taskKillResponseCommand.setAppIds(result.getRight()); TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId()); if (taskExecutionContext != null) { taskKillResponseCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); taskKillResponseCommand.setHost(taskExecutionContext.getHost()); taskKillResponseCommand.setProcessId(taskExecutionContext.getProcessId()); } return taskKillResponseCommand; } /** * kill yarn job * * @param host host * @param logPath logPath * @param executePath executePath * @param tenantCode tenantCode * @return Pair<Boolean, List<String>> yarn kill result */ private Pair<Boolean, List<String>> killYarnJob(String host, String logPath, String executePath, String tenantCode) { LogClientService logClient = null; try { logClient = new LogClientService(); logger.info("view log host : {},logPath : {}", host, logPath); String log = logClient.viewLog(host, Constants.RPC_PORT, logPath); List<String> appIds = Collections.emptyList(); if (StringUtils.isNotEmpty(log)) { appIds = LoggerUtils.getAppIds(log, logger); if (StringUtils.isEmpty(executePath)) { logger.error("task instance execute path is empty"); throw new RuntimeException("task instance execute path is empty"); } if (appIds.size() > 0) { ProcessUtils.cancelApplication(appIds, logger, tenantCode, executePath); } } return Pair.of(true, appIds); } catch (Exception e) { logger.error("kill yarn job error", e); } finally { if (logClient != null) { logClient.close(); } } return Pair.of(false, Collections.emptyList()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,147
[Improvement][Master] this expression which always evaluates "true"
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** this expression which always evaluates "true" **What are the current deficiencies and the benefits of improvement** this expression which always evaluates "true" ,removeTaskLogFile method of org.apache.dolphinscheduler.service.process **Which version of DolphinScheduler:** -[1.3.5] **Describe alternatives you've considered** public void removeTaskLogFile(Integer processInstanceId) { LogClientService logClient = null; try { logClient = new LogClientService(); List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } for (TaskInstance taskInstance : taskInstanceList) { String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())) { continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); } catch (Exception e) { // compatible old version ip = taskInstance.getHost(); } // remove task log from loggerserver logClient.removeTaskLog(ip, port, taskLogPath); } } finally { logClient.close(); } }
https://github.com/apache/dolphinscheduler/issues/5147
https://github.com/apache/dolphinscheduler/pull/5156
fe144e46a3f23f5b484a3ce74552cc6faad9009c
513eb769196971956f15665640aefd782b9f69f3
"2021-03-25T07:29:28Z"
java
"2021-04-19T09:48:09Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.service.log; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.log.GetLogBytesRequestCommand; import org.apache.dolphinscheduler.remote.command.log.GetLogBytesResponseCommand; import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogRequestCommand; import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogResponseCommand; import org.apache.dolphinscheduler.remote.command.log.RollViewLogRequestCommand; import org.apache.dolphinscheduler.remote.command.log.RollViewLogResponseCommand; import org.apache.dolphinscheduler.remote.command.log.ViewLogRequestCommand; import org.apache.dolphinscheduler.remote.command.log.ViewLogResponseCommand; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.remote.utils.Host; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * log client */ public class LogClientService { private static final Logger logger = LoggerFactory.getLogger(LogClientService.class); private final NettyClientConfig clientConfig; private final NettyRemotingClient client; private volatile boolean isRunning; /** * request time out */ private static final long LOG_REQUEST_TIMEOUT = 10 * 1000L; /** * construct client */ public LogClientService() { this.clientConfig = new NettyClientConfig(); this.clientConfig.setWorkerThreads(4); this.client = new NettyRemotingClient(clientConfig); this.isRunning = true; } /** * close */ public void close() { this.client.close(); this.isRunning = false; logger.info("logger client closed"); } /** * roll view log * * @param host host * @param port port * @param path path * @param skipLineNum skip line number * @param limit limit * @return log content */ public String rollViewLog(String host, int port, String path, int skipLineNum, int limit) { logger.info("roll view log, host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path, skipLineNum, limit); RollViewLogRequestCommand request = new RollViewLogRequestCommand(path, skipLineNum, limit); String result = ""; final Host address = new Host(host, port); try { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { RollViewLogResponseCommand rollReviewLog = JSONUtils.parseObject( response.getBody(), RollViewLogResponseCommand.class); return rollReviewLog.getMsg(); } } catch (Exception e) { logger.error("roll view log error", e); } finally { this.client.closeChannel(address); } return result; } /** * view log * * @param host host * @param port port * @param path path * @return log content */ public String viewLog(String host, int port, String path) { logger.info("view log path {}", path); ViewLogRequestCommand request = new ViewLogRequestCommand(path); String result = ""; final Host address = new Host(host, port); try { if (NetUtils.getHost().equals(host)) { result = LoggerUtils.readWholeFileContent(request.getPath()); } else { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { ViewLogResponseCommand viewLog = JSONUtils.parseObject( response.getBody(), ViewLogResponseCommand.class); result = viewLog.getMsg(); } } } catch (Exception e) { logger.error("view log error", e); } finally { this.client.closeChannel(address); } return result; } /** * get log size * * @param host host * @param port port * @param path log path * @return log content bytes */ public byte[] getLogBytes(String host, int port, String path) { logger.info("log path {}", path); GetLogBytesRequestCommand request = new GetLogBytesRequestCommand(path); byte[] result = null; final Host address = new Host(host, port); try { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { GetLogBytesResponseCommand getLog = JSONUtils.parseObject( response.getBody(), GetLogBytesResponseCommand.class); return getLog.getData(); } } catch (Exception e) { logger.error("get log size error", e); } finally { this.client.closeChannel(address); } return result; } /** * remove task log * * @param host host * @param port port * @param path path * @return remove task status */ public Boolean removeTaskLog(String host, int port, String path) { logger.info("log path {}", path); RemoveTaskLogRequestCommand request = new RemoveTaskLogRequestCommand(path); Boolean result = false; final Host address = new Host(host, port); try { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { RemoveTaskLogResponseCommand taskLogResponse = JSONUtils.parseObject( response.getBody(), RemoveTaskLogResponseCommand.class); return taskLogResponse.getStatus(); } } catch (Exception e) { logger.error("remove task log error", e); } finally { this.client.closeChannel(address); } return result; } public boolean isRunning() { return isRunning; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,147
[Improvement][Master] this expression which always evaluates "true"
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/community/development/issue.html when describe an issue.* **Describe the question** this expression which always evaluates "true" **What are the current deficiencies and the benefits of improvement** this expression which always evaluates "true" ,removeTaskLogFile method of org.apache.dolphinscheduler.service.process **Which version of DolphinScheduler:** -[1.3.5] **Describe alternatives you've considered** public void removeTaskLogFile(Integer processInstanceId) { LogClientService logClient = null; try { logClient = new LogClientService(); List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } for (TaskInstance taskInstance : taskInstanceList) { String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())) { continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); } catch (Exception e) { // compatible old version ip = taskInstance.getHost(); } // remove task log from loggerserver logClient.removeTaskLog(ip, port, taskLogPath); } } finally { logClient.close(); } }
https://github.com/apache/dolphinscheduler/issues/5147
https://github.com/apache/dolphinscheduler/pull/5156
fe144e46a3f23f5b484a3ce74552cc6faad9009c
513eb769196971956f15665640aefd782b9f69f3
"2021-03-25T07:29:28Z"
java
"2021-04-19T09:48:09Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.service.process; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_EMPTY_SUB_PROCESS; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_FATHER_PARAMS; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID; import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS; import static java.util.stream.Collectors.toSet; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CycleEnum; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.CycleDependency; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ErrorCommand; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.CommandMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import com.cronutils.model.Cron; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * process relative dao that some mappers in this. */ @Component public class ProcessService { private final Logger logger = LoggerFactory.getLogger(getClass()); private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), ExecutionStatus.RUNNING_EXECUTION.ordinal(), ExecutionStatus.DELAY_EXECUTION.ordinal(), ExecutionStatus.READY_PAUSE.ordinal(), ExecutionStatus.READY_STOP.ordinal()}; @Autowired private UserMapper userMapper; @Autowired private ProcessDefinitionMapper processDefineMapper; @Autowired private ProcessInstanceMapper processInstanceMapper; @Autowired private DataSourceMapper dataSourceMapper; @Autowired private ProcessInstanceMapMapper processInstanceMapMapper; @Autowired private TaskInstanceMapper taskInstanceMapper; @Autowired private CommandMapper commandMapper; @Autowired private ScheduleMapper scheduleMapper; @Autowired private UdfFuncMapper udfFuncMapper; @Autowired private ResourceMapper resourceMapper; @Autowired private ResourceUserMapper resourceUserMapper; @Autowired private ErrorCommandMapper errorCommandMapper; @Autowired private TenantMapper tenantMapper; @Autowired private ProjectMapper projectMapper; /** * handle Command (construct ProcessInstance from Command) , wrapped in transaction * * @param logger logger * @param host host * @param validThreadNum validThreadNum * @param command found command * @return process instance */ @Transactional(rollbackFor = Exception.class) public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) { ProcessInstance processInstance = constructProcessInstance(command, host); // cannot construct process instance, return null if (processInstance == null) { logger.error("scan command, command parameter is error: {}", command); moveToErrorCommand(command, "process instance is null"); return null; } if (!checkThreadNum(command, validThreadNum)) { logger.info("there is not enough thread for this command: {}", command); return setWaitingThreadProcess(command, processInstance); } processInstance.setCommandType(command.getCommandType()); processInstance.addHistoryCmd(command.getCommandType()); saveProcessInstance(processInstance); this.setSubProcessParam(processInstance); delCommandById(command.getId()); return processInstance; } /** * save error command, and delete original command * * @param command command * @param message message */ @Transactional(rollbackFor = Exception.class) public void moveToErrorCommand(Command command, String message) { ErrorCommand errorCommand = new ErrorCommand(command, message); this.errorCommandMapper.insert(errorCommand); delCommandById(command.getId()); } /** * set process waiting thread * * @param command command * @param processInstance processInstance * @return process instance */ private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) { processInstance.setState(ExecutionStatus.WAITTING_THREAD); if (command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD) { processInstance.addHistoryCmd(command.getCommandType()); } saveProcessInstance(processInstance); this.setSubProcessParam(processInstance); createRecoveryWaitingThreadCommand(command, processInstance); return null; } /** * check thread num * * @param command command * @param validThreadNum validThreadNum * @return if thread is enough */ private boolean checkThreadNum(Command command, int validThreadNum) { int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId()); return validThreadNum >= commandThreadCount; } /** * insert one command * * @param command command * @return create result */ public int createCommand(Command command) { int result = 0; if (command != null) { result = commandMapper.insert(command); } return result; } /** * find one command from queue list * * @return command */ public Command findOneCommand() { return commandMapper.getOneToRun(); } /** * check the input command exists in queue list * * @param command command * @return create command result */ public boolean verifyIsNeedCreateCommand(Command command) { boolean isNeedCreate = true; EnumMap<CommandType, Integer> cmdTypeMap = new EnumMap<>(CommandType.class); cmdTypeMap.put(CommandType.REPEAT_RUNNING, 1); cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, 1); cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS, 1); CommandType commandType = command.getCommandType(); if (cmdTypeMap.containsKey(commandType)) { ObjectNode cmdParamObj = JSONUtils.parseObject(command.getCommandParam()); int processInstanceId = cmdParamObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt(); List<Command> commands = commandMapper.selectList(null); // for all commands for (Command tmpCommand : commands) { if (cmdTypeMap.containsKey(tmpCommand.getCommandType())) { ObjectNode tempObj = JSONUtils.parseObject(tmpCommand.getCommandParam()); if (tempObj != null && processInstanceId == tempObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt()) { isNeedCreate = false; break; } } } } return isNeedCreate; } /** * find process instance detail by id * * @param processId processId * @return process instance */ public ProcessInstance findProcessInstanceDetailById(int processId) { return processInstanceMapper.queryDetailById(processId); } /** * get task node list by definitionId */ public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId) { ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); if (processDefinition == null) { logger.info("process define not exists"); return null; } String processDefinitionJson = processDefinition.getProcessDefinitionJson(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); //process data check if (null == processData) { logger.error("process data is null"); return new ArrayList<>(); } return processData.getTasks(); } /** * find process instance by id * * @param processId processId * @return process instance */ public ProcessInstance findProcessInstanceById(int processId) { return processInstanceMapper.selectById(processId); } /** * find process define by id. * * @param processDefinitionId processDefinitionId * @return process definition */ public ProcessDefinition findProcessDefineById(int processDefinitionId) { return processDefineMapper.selectById(processDefinitionId); } /** * delete work process instance by id * * @param processInstanceId processInstanceId * @return delete process instance result */ public int deleteWorkProcessInstanceById(int processInstanceId) { return processInstanceMapper.deleteById(processInstanceId); } /** * delete all sub process by parent instance id * * @param processInstanceId processInstanceId * @return delete all sub process instance result */ public int deleteAllSubWorkProcessByParentId(int processInstanceId) { List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId); for (Integer subId : subProcessIdList) { deleteAllSubWorkProcessByParentId(subId); deleteWorkProcessMapByParentId(subId); removeTaskLogFile(subId); deleteWorkProcessInstanceById(subId); } return 1; } /** * remove task log file * * @param processInstanceId processInstanceId */ public void removeTaskLogFile(Integer processInstanceId) { LogClientService logClient = null; try { logClient = new LogClientService(); List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } for (TaskInstance taskInstance : taskInstanceList) { String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())) { continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); } catch (Exception e) { // compatible old version ip = taskInstance.getHost(); } // remove task log from loggerserver logClient.removeTaskLog(ip, port, taskLogPath); } } finally { if (logClient != null) { logClient.close(); } } } /** * calculate sub process number in the process define. * * @param processDefinitionId processDefinitionId * @return process thread num count */ private Integer workProcessThreadNumCount(Integer processDefinitionId) { List<Integer> ids = new ArrayList<>(); recurseFindSubProcessId(processDefinitionId, ids); return ids.size() + 1; } /** * recursive query sub process definition id by parent id. * * @param parentId parentId * @param ids ids */ public void recurseFindSubProcessId(int parentId, List<Integer> ids) { ProcessDefinition processDefinition = processDefineMapper.selectById(parentId); String processDefinitionJson = processDefinition.getProcessDefinitionJson(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); List<TaskNode> taskNodeList = processData.getTasks(); if (taskNodeList != null && !taskNodeList.isEmpty()) { for (TaskNode taskNode : taskNodeList) { String parameter = taskNode.getParams(); ObjectNode parameterJson = JSONUtils.parseObject(parameter); if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_ID) != null) { SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class); ids.add(subProcessParam.getProcessDefinitionId()); recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(), ids); } } } } /** * create recovery waiting thread command when thread pool is not enough for the process instance. * sub work process instance need not to create recovery command. * create recovery waiting thread command and delete origin command at the same time. * if the recovery command is exists, only update the field update_time * * @param originCommand originCommand * @param processInstance processInstance */ public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) { // sub process doesnot need to create wait command if (processInstance.getIsSubProcess() == Flag.YES) { if (originCommand != null) { commandMapper.deleteById(originCommand.getId()); } return; } Map<String, String> cmdParam = new HashMap<>(); cmdParam.put(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD, String.valueOf(processInstance.getId())); // process instance quit by "waiting thread" state if (originCommand == null) { Command command = new Command( CommandType.RECOVER_WAITTING_THREAD, processInstance.getTaskDependType(), processInstance.getFailureStrategy(), processInstance.getExecutorId(), processInstance.getProcessDefinitionId(), JSONUtils.toJsonString(cmdParam), processInstance.getWarningType(), processInstance.getWarningGroupId(), processInstance.getScheduleTime(), processInstance.getWorkerGroup(), processInstance.getProcessInstancePriority() ); saveCommand(command); return; } // update the command time if current command if recover from waiting if (originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD) { originCommand.setUpdateTime(new Date()); saveCommand(originCommand); } else { // delete old command and create new waiting thread command commandMapper.deleteById(originCommand.getId()); originCommand.setId(0); originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD); originCommand.setUpdateTime(new Date()); originCommand.setCommandParam(JSONUtils.toJsonString(cmdParam)); originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority()); saveCommand(originCommand); } } /** * get schedule time from command * * @param command command * @param cmdParam cmdParam map * @return date */ private Date getScheduleTime(Command command, Map<String, String> cmdParam) { Date scheduleTime = command.getScheduleTime(); if (scheduleTime == null && cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)) { scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); } return scheduleTime; } /** * generate a new work process instance from command. * * @param processDefinition processDefinition * @param command command * @param cmdParam cmdParam map * @return process instance */ private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition, Command command, Map<String, String> cmdParam) { ProcessInstance processInstance = new ProcessInstance(processDefinition); processInstance.setState(ExecutionStatus.RUNNING_EXECUTION); processInstance.setRecovery(Flag.NO); processInstance.setStartTime(new Date()); processInstance.setRunTimes(1); processInstance.setMaxTryTimes(0); processInstance.setProcessDefinitionId(command.getProcessDefinitionId()); processInstance.setCommandParam(command.getCommandParam()); processInstance.setCommandType(command.getCommandType()); processInstance.setIsSubProcess(Flag.NO); processInstance.setTaskDependType(command.getTaskDependType()); processInstance.setFailureStrategy(command.getFailureStrategy()); processInstance.setExecutorId(command.getExecutorId()); WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType(); processInstance.setWarningType(warningType); Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId(); processInstance.setWarningGroupId(warningGroupId); // schedule time Date scheduleTime = getScheduleTime(command, cmdParam); if (scheduleTime != null) { processInstance.setScheduleTime(scheduleTime); } processInstance.setCommandStartTime(command.getStartTime()); processInstance.setLocations(processDefinition.getLocations()); processInstance.setConnects(processDefinition.getConnects()); // reset global params while there are start parameters setGlobalParamIfCommanded(processDefinition,cmdParam); // curing global params processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processDefinition.getGlobalParamMap(), processDefinition.getGlobalParamList(), getCommandTypeIfComplement(processInstance, command), processInstance.getScheduleTime())); //copy process define json to process instance processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); // set process instance priority processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup(); processInstance.setWorkerGroup(workerGroup); processInstance.setTimeout(processDefinition.getTimeout()); processInstance.setTenantId(processDefinition.getTenantId()); return processInstance; } private void setGlobalParamIfCommanded(ProcessDefinition processDefinition, Map<String, String> cmdParam) { // get start params from command param Map<String, String> startParamMap = new HashMap<>(); if (cmdParam != null && cmdParam.containsKey(Constants.CMD_PARAM_START_PARAMS)) { String startParamJson = cmdParam.get(Constants.CMD_PARAM_START_PARAMS); startParamMap = JSONUtils.toMap(startParamJson); } Map<String, String> fatherParamMap = new HashMap<>(); if (cmdParam != null && cmdParam.containsKey(Constants.CMD_PARAM_FATHER_PARAMS)) { String fatherParamJson = cmdParam.get(Constants.CMD_PARAM_FATHER_PARAMS); fatherParamMap = JSONUtils.toMap(fatherParamJson); } startParamMap.putAll(fatherParamMap); // set start param into global params if (startParamMap.size() > 0 && processDefinition.getGlobalParamMap() != null) { for (Map.Entry<String, String> param : processDefinition.getGlobalParamMap().entrySet()) { String val = startParamMap.get(param.getKey()); if (val != null) { param.setValue(val); } } } } /** * get process tenant * there is tenant id in definition, use the tenant of the definition. * if there is not tenant id in the definiton or the tenant not exist * use definition creator's tenant. * * @param tenantId tenantId * @param userId userId * @return tenant */ public Tenant getTenantForProcess(int tenantId, int userId) { Tenant tenant = null; if (tenantId >= 0) { tenant = tenantMapper.queryById(tenantId); } if (userId == 0) { return null; } if (tenant == null) { User user = userMapper.selectById(userId); tenant = tenantMapper.queryById(user.getTenantId()); } return tenant; } /** * check command parameters is valid * * @param command command * @param cmdParam cmdParam map * @return whether command param is valid */ private Boolean checkCmdParam(Command command, Map<String, String> cmdParam) { if (command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType() == TaskDependType.TASK_PRE) { if (cmdParam == null || !cmdParam.containsKey(Constants.CMD_PARAM_START_NODE_NAMES) || cmdParam.get(Constants.CMD_PARAM_START_NODE_NAMES).isEmpty()) { logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); return false; } } return true; } /** * construct process instance according to one command. * * @param command command * @param host host * @return process instance */ private ProcessInstance constructProcessInstance(Command command, String host) { ProcessInstance processInstance = null; CommandType commandType = command.getCommandType(); Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam()); ProcessDefinition processDefinition = null; if (command.getProcessDefinitionId() != 0) { processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId()); if (processDefinition == null) { logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId()); return null; } } if (cmdParam != null) { Integer processInstanceId = 0; // recover from failure or pause tasks if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING)) { String processId = cmdParam.get(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING); processInstanceId = Integer.parseInt(processId); if (processInstanceId == 0) { logger.error("command parameter is error, [ ProcessInstanceId ] is 0"); return null; } } else if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { // sub process map String pId = cmdParam.get(Constants.CMD_PARAM_SUB_PROCESS); processInstanceId = Integer.parseInt(pId); } else if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD)) { // waiting thread command String pId = cmdParam.get(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD); processInstanceId = Integer.parseInt(pId); } if (processInstanceId == 0) { processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); } else { processInstance = this.findProcessInstanceDetailById(processInstanceId); CommandType commandTypeIfComplement = getCommandTypeIfComplement(processInstance, command); // reset global params while repeat running is needed by cmdParam if (commandTypeIfComplement == CommandType.REPEAT_RUNNING) { setGlobalParamIfCommanded(processDefinition, cmdParam); } } processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId()); processInstance.setProcessDefinition(processDefinition); //reset command parameter if (processInstance.getCommandParam() != null) { Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam()); for (Map.Entry<String, String> entry : processCmdParam.entrySet()) { if (!cmdParam.containsKey(entry.getKey())) { cmdParam.put(entry.getKey(), entry.getValue()); } } } // reset command parameter if sub process if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { processInstance.setCommandParam(command.getCommandParam()); } } else { // generate one new process instance processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); } if (Boolean.FALSE.equals(checkCmdParam(command, cmdParam))) { logger.error("command parameter check failed!"); return null; } if (command.getScheduleTime() != null) { processInstance.setScheduleTime(command.getScheduleTime()); } processInstance.setHost(host); ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXECUTION; int runTime = processInstance.getRunTimes(); switch (commandType) { case START_PROCESS: break; case START_FAILURE_TASK_PROCESS: // find failed tasks and init these tasks List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE); List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE); List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); failedList.addAll(killedList); failedList.addAll(toleranceList); for (Integer taskId : failedList) { initTaskInstance(this.findTaskInstanceById(taskId)); } cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(Constants.COMMA, convertIntListToString(failedList))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; case START_CURRENT_TASK_PROCESS: break; case RECOVER_WAITTING_THREAD: break; case RECOVER_SUSPENDED_PROCESS: // find pause tasks and init task's state cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE); List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); suspendedNodeList.addAll(stopNodeList); for (Integer taskId : suspendedNodeList) { // initialize the pause state initTaskInstance(this.findTaskInstanceById(taskId)); } cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; case RECOVER_TOLERANCE_FAULT_PROCESS: // recover tolerance fault process processInstance.setRecovery(Flag.YES); runStatus = processInstance.getState(); break; case COMPLEMENT_DATA: // delete all the valid tasks when complement data List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId()); for (TaskInstance taskInstance : taskInstanceList) { taskInstance.setFlag(Flag.NO); this.updateTaskInstance(taskInstance); } initComplementDataParam(processDefinition, processInstance, cmdParam); break; case REPEAT_RUNNING: // delete the recover task names from command parameter if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) { cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); } // delete all the valid tasks when repeat running List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId()); for (TaskInstance taskInstance : validTaskList) { taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); } processInstance.setStartTime(new Date()); processInstance.setEndTime(null); processInstance.setRunTimes(runTime + 1); initComplementDataParam(processDefinition, processInstance, cmdParam); break; case SCHEDULER: break; default: break; } processInstance.setState(runStatus); return processInstance; } /** * return complement data if the process start with complement data * * @param processInstance processInstance * @param command command * @return command type */ private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command) { if (CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()) { return CommandType.COMPLEMENT_DATA; } else { return command.getCommandType(); } } /** * initialize complement data parameters * * @param processDefinition processDefinition * @param processInstance processInstance * @param cmdParam cmdParam */ private void initComplementDataParam(ProcessDefinition processDefinition, ProcessInstance processInstance, Map<String, String> cmdParam) { if (!processInstance.isComplementData()) { return; } Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE), YYYY_MM_DD_HH_MM_SS); if (Flag.NO == processInstance.getIsSubProcess()) { processInstance.setScheduleTime(startComplementTime); } processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processDefinition.getGlobalParamMap(), processDefinition.getGlobalParamList(), CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); } /** * set sub work process parameters. * handle sub work process instance, update relation table and command parameters * set sub work process flag, extends parent work process command parameters * * @param subProcessInstance subProcessInstance * @return process instance */ public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance) { String cmdParam = subProcessInstance.getCommandParam(); if (StringUtils.isEmpty(cmdParam)) { return subProcessInstance; } Map<String, String> paramMap = JSONUtils.toMap(cmdParam); // write sub process id into cmd param. if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS) && CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) { paramMap.remove(CMD_PARAM_SUB_PROCESS); paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); subProcessInstance.setCommandParam(JSONUtils.toJsonString(paramMap)); subProcessInstance.setIsSubProcess(Flag.YES); this.saveProcessInstance(subProcessInstance); } // copy parent instance user def params to sub process.. String parentInstanceId = paramMap.get(CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID); if (StringUtils.isNotEmpty(parentInstanceId)) { ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); if (parentInstance != null) { subProcessInstance.setGlobalParams( joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); this.saveProcessInstance(subProcessInstance); } else { logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); } } ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class); if (processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0) { return subProcessInstance; } // update sub process id to process map table processInstanceMap.setProcessInstanceId(subProcessInstance.getId()); this.updateWorkProcessInstanceMap(processInstanceMap); return subProcessInstance; } /** * join parent global params into sub process. * only the keys doesn't in sub process global would be joined. * * @param parentGlobalParams parentGlobalParams * @param subGlobalParams subGlobalParams * @return global params join */ private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) { List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class); List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class); Map<String, String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); for (Property parent : parentPropertyList) { if (!subMap.containsKey(parent.getProp())) { subPropertyList.add(parent); } } return JSONUtils.toJsonString(subPropertyList); } /** * initialize task instance * * @param taskInstance taskInstance */ private void initTaskInstance(TaskInstance taskInstance) { if (!taskInstance.isSubProcess() && (taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure())) { taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); return; } taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); updateTaskInstance(taskInstance); } /** * submit task to db * submit sub process to command * * @param taskInstance taskInstance * @return task instance */ @Transactional(rollbackFor = Exception.class) public TaskInstance submitTask(TaskInstance taskInstance) { ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); logger.info("start submit task : {}, instance id:{}, state: {}", taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); //submit to db TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance); if (task == null) { logger.error("end submit task to db error, task name:{}, process id:{} state: {} ", taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState()); return task; } if (!task.getState().typeIsFinished()) { createSubWorkProcess(processInstance, task); } logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ", taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState()); return task; } /** * set work process instance map * consider o * repeat running does not generate new sub process instance * set map {parent instance id, task instance id, 0(child instance id)} * * @param parentInstance parentInstance * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask) { ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId()); if (processMap != null) { return processMap; } if (parentInstance.getCommandType() == CommandType.REPEAT_RUNNING) { // update current task id to map processMap = findPreviousTaskProcessMap(parentInstance, parentTask); if (processMap != null) { processMap.setParentTaskInstanceId(parentTask.getId()); updateWorkProcessInstanceMap(processMap); return processMap; } } // new task processMap = new ProcessInstanceMap(); processMap.setParentProcessInstanceId(parentInstance.getId()); processMap.setParentTaskInstanceId(parentTask.getId()); createWorkProcessInstanceMap(processMap); return processMap; } /** * find previous task work process map. * * @param parentProcessInstance parentProcessInstance * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance, TaskInstance parentTask) { Integer preTaskId = 0; List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId()); for (TaskInstance task : preTaskList) { if (task.getName().equals(parentTask.getName())) { preTaskId = task.getId(); ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId); if (map != null) { return map; } } } logger.info("sub process instance is not found,parent task:{},parent instance:{}", parentTask.getId(), parentProcessInstance.getId()); return null; } /** * create sub work process command * * @param parentProcessInstance parentProcessInstance * @param task task */ public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) { if (!task.isSubProcess()) { return; } //check create sub work flow firstly ProcessInstanceMap instanceMap = findWorkProcessMapByParent(parentProcessInstance.getId(), task.getId()); if (null != instanceMap && CommandType.RECOVER_TOLERANCE_FAULT_PROCESS == parentProcessInstance.getCommandType()) { // recover failover tolerance would not create a new command when the sub command already have been created return; } instanceMap = setProcessInstanceMap(parentProcessInstance, task); ProcessInstance childInstance = null; if (instanceMap.getProcessInstanceId() != 0) { childInstance = findProcessInstanceById(instanceMap.getProcessInstanceId()); } Command subProcessCommand = createSubProcessCommand(parentProcessInstance, childInstance, instanceMap, task); updateSubProcessDefinitionByParent(parentProcessInstance, subProcessCommand.getProcessDefinitionId()); initSubInstanceState(childInstance); createCommand(subProcessCommand); logger.info("sub process command created: {} ", subProcessCommand); } /** * complement data needs transform parent parameter to child. */ private String getSubWorkFlowParam(ProcessInstanceMap instanceMap, ProcessInstance parentProcessInstance,Map<String,String> fatherParams) { // set sub work process command String processMapStr = JSONUtils.toJsonString(instanceMap); Map<String, String> cmdParam = JSONUtils.toMap(processMapStr); if (parentProcessInstance.isComplementData()) { Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam()); String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE); String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime); processMapStr = JSONUtils.toJsonString(cmdParam); } if (fatherParams.size() != 0) { cmdParam.put(CMD_PARAM_FATHER_PARAMS, JSONUtils.toJsonString(fatherParams)); processMapStr = JSONUtils.toJsonString(cmdParam); } return processMapStr; } public Map<String, String> getGlobalParamMap(String globalParams) { List<Property> propList; Map<String, String> globalParamMap = new HashMap<>(); if (StringUtils.isNotEmpty(globalParams)) { propList = JSONUtils.toList(globalParams, Property.class); globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); } return globalParamMap; } /** * create sub work process command */ public Command createSubProcessCommand(ProcessInstance parentProcessInstance, ProcessInstance childInstance, ProcessInstanceMap instanceMap, TaskInstance task) { CommandType commandType = getSubCommandType(parentProcessInstance, childInstance); TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class); Map<String, Object> subProcessParam = JSONUtils.toMap(taskNode.getParams(), String.class, Object.class); Integer childDefineId = Integer.parseInt(String.valueOf(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID))); Object localParams = subProcessParam.get(Constants.LOCAL_PARAMS); List<Property> allParam = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); Map<String, String> globalMap = this.getGlobalParamMap(parentProcessInstance.getGlobalParams()); Map<String,String> fatherParams = new HashMap<>(); if (CollectionUtils.isNotEmpty(allParam)) { for (Property info : allParam) { fatherParams.put(info.getProp(), globalMap.get(info.getProp())); } } String processParam = getSubWorkFlowParam(instanceMap, parentProcessInstance,fatherParams); return new Command( commandType, TaskDependType.TASK_POST, parentProcessInstance.getFailureStrategy(), parentProcessInstance.getExecutorId(), childDefineId, processParam, parentProcessInstance.getWarningType(), parentProcessInstance.getWarningGroupId(), parentProcessInstance.getScheduleTime(), task.getWorkerGroup(), parentProcessInstance.getProcessInstancePriority() ); } /** * initialize sub work flow state * child instance state would be initialized when 'recovery from pause/stop/failure' */ private void initSubInstanceState(ProcessInstance childInstance) { if (childInstance != null) { childInstance.setState(ExecutionStatus.RUNNING_EXECUTION); updateProcessInstance(childInstance); } } /** * get sub work flow command type * child instance exist: child command = fatherCommand * child instance not exists: child command = fatherCommand[0] */ private CommandType getSubCommandType(ProcessInstance parentProcessInstance, ProcessInstance childInstance) { CommandType commandType = parentProcessInstance.getCommandType(); if (childInstance == null) { String fatherHistoryCommand = parentProcessInstance.getHistoryCmd(); commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]); } return commandType; } /** * update sub process definition * * @param parentProcessInstance parentProcessInstance * @param childDefinitionId childDefinitionId */ private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) { ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId()); ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId); if (childDefinition != null && fatherDefinition != null) { childDefinition.setWarningGroupId(fatherDefinition.getWarningGroupId()); processDefineMapper.updateById(childDefinition); } } /** * submit task to mysql * * @param taskInstance taskInstance * @param processInstance processInstance * @return task instance */ public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) { ExecutionStatus processInstanceState = processInstance.getState(); if (taskInstance.getState().typeIsFailure()) { if (taskInstance.isSubProcess()) { taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1); } else { if (processInstanceState != ExecutionStatus.READY_STOP && processInstanceState != ExecutionStatus.READY_PAUSE) { // failure task set invalid taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); // crate new task instance if (taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE) { taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1); } taskInstance.setSubmitTime(null); taskInstance.setStartTime(null); taskInstance.setEndTime(null); taskInstance.setFlag(Flag.YES); taskInstance.setHost(null); taskInstance.setId(0); } } } taskInstance.setExecutorId(processInstance.getExecutorId()); taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority()); taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState)); if (taskInstance.getSubmitTime() == null) { taskInstance.setSubmitTime(new Date()); } if (taskInstance.getFirstSubmitTime() == null) { taskInstance.setFirstSubmitTime(taskInstance.getSubmitTime()); } boolean saveResult = saveTaskInstance(taskInstance); if (!saveResult) { return null; } return taskInstance; } /** * get submit task instance state by the work process state * cannot modify the task state when running/kill/submit success, or this * task instance is already exists in task queue . * return pause if work process state is ready pause * return stop if work process state is ready stop * if all of above are not satisfied, return submit success * * @param taskInstance taskInstance * @param processInstanceState processInstanceState * @return process instance state */ public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState) { ExecutionStatus state = taskInstance.getState(); // running, delayed or killed // the task already exists in task queue // return state if ( state == ExecutionStatus.RUNNING_EXECUTION || state == ExecutionStatus.DELAY_EXECUTION || state == ExecutionStatus.KILL ) { return state; } //return pasue /stop if process instance state is ready pause / stop // or return submit success if (processInstanceState == ExecutionStatus.READY_PAUSE) { state = ExecutionStatus.PAUSE; } else if (processInstanceState == ExecutionStatus.READY_STOP || !checkProcessStrategy(taskInstance)) { state = ExecutionStatus.KILL; } else { state = ExecutionStatus.SUBMITTED_SUCCESS; } return state; } /** * check process instance strategy * * @param taskInstance taskInstance * @return check strategy result */ private boolean checkProcessStrategy(TaskInstance taskInstance) { ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); FailureStrategy failureStrategy = processInstance.getFailureStrategy(); if (failureStrategy == FailureStrategy.CONTINUE) { return true; } List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId()); for (TaskInstance task : taskInstances) { if (task.getState() == ExecutionStatus.FAILURE) { return false; } } return true; } /** * create a new process instance * * @param processInstance processInstance */ public void createProcessInstance(ProcessInstance processInstance) { if (processInstance != null) { processInstanceMapper.insert(processInstance); } } /** * insert or update work process instance to data base * * @param processInstance processInstance */ public void saveProcessInstance(ProcessInstance processInstance) { if (processInstance == null) { logger.error("save error, process instance is null!"); return; } if (processInstance.getId() != 0) { processInstanceMapper.updateById(processInstance); } else { createProcessInstance(processInstance); } } /** * insert or update command * * @param command command * @return save command result */ public int saveCommand(Command command) { if (command.getId() != 0) { return commandMapper.updateById(command); } else { return commandMapper.insert(command); } } /** * insert or update task instance * * @param taskInstance taskInstance * @return save task instance result */ public boolean saveTaskInstance(TaskInstance taskInstance) { if (taskInstance.getId() != 0) { return updateTaskInstance(taskInstance); } else { return createTaskInstance(taskInstance); } } /** * insert task instance * * @param taskInstance taskInstance * @return create task instance result */ public boolean createTaskInstance(TaskInstance taskInstance) { int count = taskInstanceMapper.insert(taskInstance); return count > 0; } /** * update task instance * * @param taskInstance taskInstance * @return update task instance result */ public boolean updateTaskInstance(TaskInstance taskInstance) { int count = taskInstanceMapper.updateById(taskInstance); return count > 0; } /** * delete a command by id * * @param id id */ public void delCommandById(int id) { commandMapper.deleteById(id); } /** * find task instance by id * * @param taskId task id * @return task intance */ public TaskInstance findTaskInstanceById(Integer taskId) { return taskInstanceMapper.selectById(taskId); } /** * package task instance,associate processInstance and processDefine * * @param taskInstId taskInstId * @return task instance */ public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId) { // get task instance TaskInstance taskInstance = findTaskInstanceById(taskInstId); if (taskInstance == null) { return taskInstance; } // get process instance ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); // get process define ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId()); taskInstance.setProcessInstance(processInstance); taskInstance.setProcessDefine(processDefine); return taskInstance; } /** * get id list by task state * * @param instanceId instanceId * @param state state * @return task instance states */ public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state) { return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal()); } /** * find valid task list by process definition id * * @param processInstanceId processInstanceId * @return task instance list */ public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId) { return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES); } /** * find previous task list by work process id * * @param processInstanceId processInstanceId * @return task instance list */ public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId) { return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO); } /** * update work process instance map * * @param processInstanceMap processInstanceMap * @return update process instance result */ public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) { return processInstanceMapMapper.updateById(processInstanceMap); } /** * create work process instance map * * @param processInstanceMap processInstanceMap * @return create process instance result */ public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) { int count = 0; if (processInstanceMap != null) { return processInstanceMapMapper.insert(processInstanceMap); } return count; } /** * find work process map by parent process id and parent task id. * * @param parentWorkProcessId parentWorkProcessId * @param parentTaskId parentTaskId * @return process instance map */ public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId) { return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId); } /** * delete work process map by parent process id * * @param parentWorkProcessId parentWorkProcessId * @return delete process map result */ public int deleteWorkProcessMapByParentId(int parentWorkProcessId) { return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId); } /** * find sub process instance * * @param parentProcessId parentProcessId * @param parentTaskId parentTaskId * @return process instance */ public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId) { ProcessInstance processInstance = null; ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId); if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) { return processInstance; } processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId()); return processInstance; } /** * find parent process instance * * @param subProcessId subProcessId * @return process instance */ public ProcessInstance findParentProcessInstance(Integer subProcessId) { ProcessInstance processInstance = null; ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId); if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) { return processInstance; } processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId()); return processInstance; } /** * change task state * * @param state state * @param startTime startTime * @param host host * @param executePath executePath * @param logPath logPath * @param taskInstId taskInstId */ public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host, String executePath, String logPath, int taskInstId) { taskInstance.setState(state); taskInstance.setStartTime(startTime); taskInstance.setHost(host); taskInstance.setExecutePath(executePath); taskInstance.setLogPath(logPath); saveTaskInstance(taskInstance); } /** * update process instance * * @param processInstance processInstance * @return update process instance result */ public int updateProcessInstance(ProcessInstance processInstance) { return processInstanceMapper.updateById(processInstance); } /** * update the process instance * * @param processInstanceId processInstanceId * @param processJson processJson * @param globalParams globalParams * @param scheduleTime scheduleTime * @param flag flag * @param locations locations * @param connects connects * @return update process instance result */ public int updateProcessInstance(Integer processInstanceId, String processJson, String globalParams, Date scheduleTime, Flag flag, String locations, String connects) { ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); if (processInstance != null) { processInstance.setProcessInstanceJson(processJson); processInstance.setGlobalParams(globalParams); processInstance.setScheduleTime(scheduleTime); processInstance.setLocations(locations); processInstance.setConnects(connects); return processInstanceMapper.updateById(processInstance); } return 0; } /** * change task state * * @param state state * @param endTime endTime * @param taskInstId taskInstId * @param varPool varPool */ public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date endTime, int processId, String appIds, int taskInstId, String varPool, String result) { taskInstance.setPid(processId); taskInstance.setAppLink(appIds); taskInstance.setState(state); taskInstance.setEndTime(endTime); taskInstance.setVarPool(varPool); changeOutParam(result, taskInstance); saveTaskInstance(taskInstance); } public void changeOutParam(String result, TaskInstance taskInstance) { if (StringUtils.isEmpty(result)) { return; } List<Map<String, String>> workerResultParam = getListMapByString(result); if (CollectionUtils.isEmpty(workerResultParam)) { return; } //if the result more than one line,just get the first . Map<String, String> row = workerResultParam.get(0); if (row == null || row.size() == 0) { return; } TaskNode taskNode = JSONUtils.parseObject(taskInstance.getTaskJson(), TaskNode.class); Map<String, Object> taskParams = JSONUtils.toMap(taskNode.getParams(), String.class, Object.class); Object localParams = taskParams.get(LOCAL_PARAMS); if (localParams == null) { return; } ProcessInstance processInstance = this.processInstanceMapper.queryDetailById(taskInstance.getProcessInstanceId()); List<Property> params4Property = JSONUtils.toList(processInstance.getGlobalParams(), Property.class); Map<String, Property> allParamMap = params4Property.stream().collect(Collectors.toMap(Property::getProp, Property -> Property)); List<Property> allParam = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); for (Property info : allParam) { if (info.getDirect() == Direct.OUT) { String paramName = info.getProp(); Property property = allParamMap.get(paramName); if (property == null) { continue; } String value = String.valueOf(row.get(paramName)); if (StringUtils.isNotEmpty(value)) { property.setValue(value); info.setValue(value); } } } taskParams.put(LOCAL_PARAMS, allParam); taskNode.setParams(JSONUtils.toJsonString(taskParams)); // task instance node json taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode)); String params4ProcessString = JSONUtils.toJsonString(params4Property); int updateCount = this.processInstanceMapper.updateGlobalParamsById(params4ProcessString, processInstance.getId()); logger.info("updateCount:{}, params4Process:{}, processInstanceId:{}", updateCount, params4ProcessString, processInstance.getId()); } public List<Map<String, String>> getListMapByString(String json) { List<Map<String, String>> allParams = new ArrayList<>(); ArrayNode paramsByJson = JSONUtils.parseArray(json); Iterator<JsonNode> listIterator = paramsByJson.iterator(); while (listIterator.hasNext()) { Map<String, String> param = JSONUtils.toMap(listIterator.next().toString(), String.class, String.class); allParams.add(param); } return allParams; } /** * convert integer list to string list * * @param intList intList * @return string list */ public List<String> convertIntListToString(List<Integer> intList) { if (intList == null) { return new ArrayList<>(); } List<String> result = new ArrayList<>(intList.size()); for (Integer intVar : intList) { result.add(String.valueOf(intVar)); } return result; } /** * query schedule by id * * @param id id * @return schedule */ public Schedule querySchedule(int id) { return scheduleMapper.selectById(id); } /** * query Schedule by processDefinitionId * * @param processDefinitionId processDefinitionId * @see Schedule */ public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) { return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); } /** * query need failover process instance * * @param host host * @return process instance list */ public List<ProcessInstance> queryNeedFailoverProcessInstances(String host) { return processInstanceMapper.queryByHostAndStatus(host, stateArray); } /** * process need failover process instance * * @param processInstance processInstance */ @Transactional(rollbackFor = RuntimeException.class) public void processNeedFailoverProcessInstances(ProcessInstance processInstance) { //1 update processInstance host is null processInstance.setHost(Constants.NULL); processInstanceMapper.updateById(processInstance); //2 insert into recover command Command cmd = new Command(); cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId()); cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); cmd.setExecutorId(processInstance.getExecutorId()); cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); createCommand(cmd); } /** * query all need failover task instances by host * * @param host host * @return task instance list */ public List<TaskInstance> queryNeedFailoverTaskInstances(String host) { return taskInstanceMapper.queryByHostAndStatus(host, stateArray); } /** * find data source by id * * @param id id * @return datasource */ public DataSource findDataSourceById(int id) { return dataSourceMapper.selectById(id); } /** * update process instance state by id * * @param processInstanceId processInstanceId * @param executionStatus executionStatus * @return update process result */ public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { ProcessInstance instance = processInstanceMapper.selectById(processInstanceId); instance.setState(executionStatus); return processInstanceMapper.updateById(instance); } /** * find process instance by the task id * * @param taskId taskId * @return process instance */ public ProcessInstance findProcessInstanceByTaskId(int taskId) { TaskInstance taskInstance = taskInstanceMapper.selectById(taskId); if (taskInstance != null) { return processInstanceMapper.selectById(taskInstance.getProcessInstanceId()); } return null; } /** * find udf function list by id list string * * @param ids ids * @return udf function list */ public List<UdfFunc> queryUdfFunListByIds(int[] ids) { return udfFuncMapper.queryUdfByIdStr(ids, null); } /** * find tenant code by resource name * * @param resName resource name * @param resourceType resource type * @return tenant code */ public String queryTenantCodeByResName(String resName, ResourceType resourceType) { // in order to query tenant code successful although the version is older String fullName = resName.startsWith("/") ? resName : String.format("/%s", resName); List<Resource> resourceList = resourceMapper.queryResource(fullName, resourceType.ordinal()); if (CollectionUtils.isEmpty(resourceList)) { return StringUtils.EMPTY; } int userId = resourceList.get(0).getUserId(); User user = userMapper.selectById(userId); if (Objects.isNull(user)) { return StringUtils.EMPTY; } Tenant tenant = tenantMapper.selectById(user.getTenantId()); if (Objects.isNull(tenant)) { return StringUtils.EMPTY; } return tenant.getTenantCode(); } /** * find schedule list by process define id. * * @param ids ids * @return schedule list */ public List<Schedule> selectAllByProcessDefineId(int[] ids) { return scheduleMapper.selectAllByProcessDefineArray( ids); } /** * get dependency cycle by work process define id and scheduler fire time * * @param masterId masterId * @param processDefinitionId processDefinitionId * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency * @throws Exception if error throws Exception */ public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception { List<CycleDependency> list = getCycleDependencies(masterId, new int[]{processDefinitionId}, scheduledFireTime); return !list.isEmpty() ? list.get(0) : null; } /** * get dependency cycle list by work process define id list and scheduler fire time * * @param masterId masterId * @param ids ids * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency list * @throws Exception if error throws Exception */ public List<CycleDependency> getCycleDependencies(int masterId, int[] ids, Date scheduledFireTime) throws Exception { List<CycleDependency> cycleDependencyList = new ArrayList<>(); if (null == ids || ids.length == 0) { logger.warn("ids[] is empty!is invalid!"); return cycleDependencyList; } if (scheduledFireTime == null) { logger.warn("scheduledFireTime is null!is invalid!"); return cycleDependencyList; } String strCrontab = ""; CronExpression depCronExpression; Cron depCron; List<Date> list; List<Schedule> schedules = this.selectAllByProcessDefineId(ids); // for all scheduling information for (Schedule depSchedule : schedules) { strCrontab = depSchedule.getCrontab(); depCronExpression = CronUtils.parse2CronExpression(strCrontab); depCron = CronUtils.parse2Cron(strCrontab); CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron); if (cycleEnum == null) { logger.error("{} is not valid", strCrontab); continue; } Calendar calendar = Calendar.getInstance(); switch (cycleEnum) { case HOUR: calendar.add(Calendar.HOUR, -25); break; case DAY: case WEEK: calendar.add(Calendar.DATE, -32); break; case MONTH: calendar.add(Calendar.MONTH, -13); break; default: String cycleName = cycleEnum.name(); logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleName); continue; } Date start = calendar.getTime(); if (depSchedule.getProcessDefinitionId() == masterId) { list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression); } else { list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression); } if (!list.isEmpty()) { start = list.get(list.size() - 1); CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(), start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum); cycleDependencyList.add(dependency); } } return cycleDependencyList; } /** * find last scheduler process instance in the date interval * * @param definitionId definitionId * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastSchedulerProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); } /** * find last manual process instance interval * * @param definitionId process definition id * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastManualProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); } /** * find last running process instance * * @param definitionId process definition id * @param startTime start time * @param endTime end time * @return process instance */ public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) { return processInstanceMapper.queryLastRunningProcess(definitionId, startTime, endTime, stateArray); } /** * query user queue by process instance id * * @param processInstanceId processInstanceId * @return queue */ public String queryUserQueueByProcessInstanceId(int processInstanceId) { String queue = ""; ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId); if (processInstance == null) { return queue; } User executor = userMapper.selectById(processInstance.getExecutorId()); if (executor != null) { queue = executor.getQueue(); } return queue; } /** * query project name and user name by processInstanceId. * * @param processInstanceId processInstanceId * @return projectName and userName */ public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) { return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId); } /** * get task worker group * * @param taskInstance taskInstance * @return workerGroupId */ public String getTaskWorkerGroup(TaskInstance taskInstance) { String workerGroup = taskInstance.getWorkerGroup(); if (StringUtils.isNotBlank(workerGroup)) { return workerGroup; } int processInstanceId = taskInstance.getProcessInstanceId(); ProcessInstance processInstance = findProcessInstanceById(processInstanceId); if (processInstance != null) { return processInstance.getWorkerGroup(); } logger.info("task : {} will use default worker group", taskInstance.getId()); return Constants.DEFAULT_WORKER_GROUP; } /** * get have perm project list * * @param userId userId * @return project list */ public List<Project> getProjectListHavePerm(int userId) { List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId); List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId); if (createProjects == null) { createProjects = new ArrayList<>(); } if (authedProjects != null) { createProjects.addAll(authedProjects); } return createProjects; } /** * get have perm project ids * * @param userId userId * @return project ids */ public List<Integer> getProjectIdListHavePerm(int userId) { List<Integer> projectIdList = new ArrayList<>(); for (Project project : getProjectListHavePerm(userId)) { projectIdList.add(project.getId()); } return projectIdList; } /** * list unauthorized udf function * * @param userId user id * @param needChecks data source id array * @return unauthorized udf function list */ public <T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) { List<T> resultList = new ArrayList<>(); if (Objects.nonNull(needChecks) && needChecks.length > 0) { Set<T> originResSet = new HashSet<>(Arrays.asList(needChecks)); switch (authorizationType) { case RESOURCE_FILE_ID: case UDF_FILE: List<Resource> ownUdfResources = resourceMapper.listAuthorizedResourceById(userId, needChecks); addAuthorizedResources(ownUdfResources, userId); Set<Integer> authorizedResourceFiles = ownUdfResources.stream().map(Resource::getId).collect(toSet()); originResSet.removeAll(authorizedResourceFiles); break; case RESOURCE_FILE_NAME: List<Resource> ownResources = resourceMapper.listAuthorizedResource(userId, needChecks); addAuthorizedResources(ownResources, userId); Set<String> authorizedResources = ownResources.stream().map(Resource::getFullName).collect(toSet()); originResSet.removeAll(authorizedResources); break; case DATASOURCE: Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId, needChecks).stream().map(DataSource::getId).collect(toSet()); originResSet.removeAll(authorizedDatasources); break; case UDF: Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(UdfFunc::getId).collect(toSet()); originResSet.removeAll(authorizedUdfs); break; default: break; } resultList.addAll(originResSet); } return resultList; } /** * get user by user id * * @param userId user id * @return User */ public User getUserById(int userId) { return userMapper.selectById(userId); } /** * get resource by resoruce id * * @param resoruceId resource id * @return Resource */ public Resource getResourceById(int resoruceId) { return resourceMapper.selectById(resoruceId); } /** * list resources by ids * * @param resIds resIds * @return resource list */ public List<Resource> listResourceByIds(Integer[] resIds) { return resourceMapper.listResourceByIds(resIds); } /** * format task app id in task instance */ public String formatTaskAppId(TaskInstance taskInstance) { ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId()); ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); if (definition == null || processInstanceById == null) { return ""; } return String.format("%s_%s_%s", definition.getId(), processInstanceById.getId(), taskInstance.getId()); } /** * solve the branch rename bug * * @param processData * @param oldJson * @return String */ public String changeJson(ProcessData processData, String oldJson) { ProcessData oldProcessData = JSONUtils.parseObject(oldJson, ProcessData.class); HashMap<String, String> oldNameTaskId = new HashMap<>(); List<TaskNode> oldTasks = oldProcessData.getTasks(); for (int i = 0; i < oldTasks.size(); i++) { TaskNode taskNode = oldTasks.get(i); String oldName = taskNode.getName(); String oldId = taskNode.getId(); oldNameTaskId.put(oldName, oldId); } // take the processdefinitionjson saved this time, and then save the taskid and name HashMap<String, String> newNameTaskId = new HashMap<>(); List<TaskNode> newTasks = processData.getTasks(); for (int i = 0; i < newTasks.size(); i++) { TaskNode taskNode = newTasks.get(i); String newId = taskNode.getId(); String newName = taskNode.getName(); newNameTaskId.put(newId, newName); } // replace the previous conditionresult with a new one List<TaskNode> tasks = processData.getTasks(); for (int i = 0; i < tasks.size(); i++) { TaskNode taskNode = newTasks.get(i); String type = taskNode.getType(); if (TaskType.CONDITIONS.getDescp().equalsIgnoreCase(type)) { ConditionsParameters conditionsParameters = JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class); String oldSuccessNodeName = conditionsParameters.getSuccessNode().get(0); String oldFailedNodeName = conditionsParameters.getFailedNode().get(0); String newSuccessNodeName = newNameTaskId.get(oldNameTaskId.get(oldSuccessNodeName)); String newFailedNodeName = newNameTaskId.get(oldNameTaskId.get(oldFailedNodeName)); if (newSuccessNodeName != null) { ArrayList<String> successNode = new ArrayList<>(); successNode.add(newSuccessNodeName); conditionsParameters.setSuccessNode(successNode); } if (newFailedNodeName != null) { ArrayList<String> failedNode = new ArrayList<>(); failedNode.add(newFailedNodeName); conditionsParameters.setFailedNode(failedNode); } String conditionResultStr = conditionsParameters.getConditionResult(); taskNode.setConditionResult(conditionResultStr); tasks.set(i, taskNode); } } return JSONUtils.toJsonString(processData); } /** * add authorized resources * @param ownResources own resources * @param userId userId */ private void addAuthorizedResources(List<Resource> ownResources, int userId) { List<Integer> relationResourceIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, 7); List<Resource> relationResources = CollectionUtils.isNotEmpty(relationResourceIds) ? resourceMapper.queryResourceListById(relationResourceIds) : new ArrayList<>(); ownResources.addAll(relationResources); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,292
There is a vulnerability in postgresql 42.1.4 ,upgrade recommended
https://github.com/apache/dolphinscheduler/blob/ac15f45b0b51b115ebd02aa3dd739d53c99523b5/pom.xml#L92 CVE-2020-13692 CVE-2018-10936 Recommended upgrade version:42.2.13
https://github.com/apache/dolphinscheduler/issues/5292
https://github.com/apache/dolphinscheduler/pull/5318
837c9ba2cc48377654031721c9da2ab7410e5390
f82da57277c825d4588a6a0654b7730d333a5b18
"2021-04-16T03:13:46Z"
java
"2021-04-20T07:04:29Z"
dolphinscheduler-dist/release-docs/LICENSE
Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ======================================================================= Apache DolphinScheduler Subcomponents: The Apache DolphinScheduler project contains subcomponents with separate copyright notices and license terms. Your use of the source code for the these subcomponents is subject to the terms and conditions of the following licenses. ======================================================================== Apache 2.0 licenses ======================================================================== The following components are provided under the Apache License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. apacheds-i18n 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-i18n/2.0.0-M15, Apache 2.0 apacheds-kerberos-codec 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-kerberos-codec/2.0.0-M15, Apache 2.0 apache-el 8.5.35.1: https://mvnrepository.com/artifact/org.mortbay.jasper/apache-el/8.5.35.1, Apache 2.0 api-asn1-api 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-asn1-api/1.0.0-M20, Apache 2.0 api-util 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-util/1.0.0-M20, Apache 2.0 async-http-client 1.6.5: https://mvnrepository.com/artifact/com.ning/async-http-client, Apache 2.0 audience-annotations 0.5.0: https://mvnrepository.com/artifact/org.apache.yetus/audience-annotations/0.5.0, Apache 2.0 avro 1.7.4: https://github.com/apache/avro, Apache 2.0 aws-sdk-java 1.7.4: https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk/1.7.4, Apache 2.0 bonecp 0.8.0.RELEASE: https://github.com/wwadge/bonecp, Apache 2.0 byte-buddy 1.9.16: https://mvnrepository.com/artifact/net.bytebuddy/byte-buddy/1.9.16, Apache 2.0 classmate 1.4.0: https://mvnrepository.com/artifact/com.fasterxml/classmate/1.4.0, Apache 2.0 clickhouse-jdbc 0.1.52: https://mvnrepository.com/artifact/ru.yandex.clickhouse/clickhouse-jdbc/0.1.52, Apache 2.0 commons-beanutils 1.9.4 https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils/1.9.4, Apache 2.0 commons-cli 1.2: https://mvnrepository.com/artifact/commons-cli/commons-cli/1.2, Apache 2.0 commons-codec 1.11: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.11, Apache 2.0 commons-collections 3.2.2: https://mvnrepository.com/artifact/commons-collections/commons-collections/3.2.2, Apache 2.0 commons-collections4 4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-collections4/4.1, Apache 2.0 commons-compress 1.4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.4.1, Apache 2.0 commons-configuration 1.10: https://mvnrepository.com/artifact/commons-configuration/commons-configuration/1.10, Apache 2.0 commons-daemon 1.0.13 https://mvnrepository.com/artifact/commons-daemon/commons-daemon/1.0.13, Apache 2.0 commons-dbcp 1.4: https://github.com/apache/commons-dbcp, Apache 2.0 commons-email 1.5: https://github.com/apache/commons-email, Apache 2.0 commons-httpclient 3.0.1: https://mvnrepository.com/artifact/commons-httpclient/commons-httpclient/3.0.1, Apache 2.0 commons-io 2.4: https://github.com/apache/commons-io, Apache 2.0 commons-lang 2.6: https://github.com/apache/commons-lang, Apache 2.0 commons-logging 1.1.1: https://github.com/apache/commons-logging, Apache 2.0 commons-math3 3.1.1: https://github.com/apache/commons-math, Apache 2.0 commons-net 3.1: https://github.com/apache/commons-net, Apache 2.0 commons-pool 1.6: https://github.com/apache/commons-pool, Apache 2.0 cron-utils 5.0.5: https://mvnrepository.com/artifact/com.cronutils/cron-utils/5.0.5, Apache 2.0 curator-client 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-client/4.3.0, Apache 2.0 curator-framework 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-framework/4.3.0, Apache 2.0 curator-recipes 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-recipes/4.3.0, Apache 2.0 datanucleus-api-jdo 4.2.1: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-api-jdo/4.2.1, Apache 2.0 datanucleus-core 4.1.6: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-core/4.1.6, Apache 2.0 datanucleus-rdbms 4.1.7: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-rdbms/4.1.7, Apache 2.0 derby 10.14.2.0: https://github.com/apache/derby, Apache 2.0 druid 1.1.14: https://mvnrepository.com/artifact/com.alibaba/druid/1.1.14, Apache 2.0 error_prone_annotations 2.1.3 https://mvnrepository.com/artifact/com.google.errorprone/error_prone_annotations/2.1.3, Apache 2.0 gson 2.8.5: https://github.com/google/gson, Apache 2.0 guava 24.1-jre: https://mvnrepository.com/artifact/com.google.guava/guava/24.1-jre, Apache 2.0 guice 3.0: https://mvnrepository.com/artifact/com.google.inject/guice/3.0, Apache 2.0 guice-servlet 3.0: https://mvnrepository.com/artifact/com.google.inject.extensions/guice-servlet/3.0, Apache 2.0 hadoop-annotations 2.7.3:https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-annotations/2.7.3, Apache 2.0 hadoop-auth 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-auth/2.7.3, Apache 2.0 hadoop-aws 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/2.7.3, Apache 2.0 hadoop-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client/2.7.3, Apache 2.0 hadoop-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common/2.7.3, Apache 2.0 hadoop-hdfs 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs/2.7.3, Apache 2.0 hadoop-mapreduce-client-app 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-app/2.7.3, Apache 2.0 hadoop-mapreduce-client-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-common/2.7.3, Apache 2.0 hadoop-mapreduce-client-core 2.7.3: https://mvnrepository.com/artifact/io.hops/hadoop-mapreduce-client-core/2.7.3, Apache 2.0 hadoop-mapreduce-client-jobclient 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-jobclient/2.7.3, Apache 2.0 hadoop-mapreduce-client-shuffle 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-shuffle/2.7.3, Apache 2.0 hadoop-yarn-api 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-api/2.7.3, Apache 2.0 hadoop-yarn-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-client/2.7.3, Apache 2.0 hadoop-yarn-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-common/2.7.3, Apache 2.0 hadoop-yarn-server-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-server-common/2.7.3, Apache 2.0 hibernate-validator 6.0.14.Final: https://github.com/hibernate/hibernate-validator, Apache 2.0 HikariCP 3.2.0: https://mvnrepository.com/artifact/com.zaxxer/HikariCP/3.2.0, Apache 2.0 hive-common 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-common/2.1.0, Apache 2.0 hive-jdbc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc/2.1.0, Apache 2.0 hive-metastore 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-metastore/2.1.0, Apache 2.0 hive-orc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-orc/2.1.0, Apache 2.0 hive-serde 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-serde/2.1.0, Apache 2.0 hive-service 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service/2.1.0, Apache 2.0 hive-service-rpc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service-rpc/2.1.0, Apache 2.0 hive-storage-api 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-storage-api/2.1.0, Apache 2.0 htrace-core 3.1.0-incubating: https://mvnrepository.com/artifact/org.apache.htrace/htrace-core/3.1.0-incubating, Apache 2.0 httpclient 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient/4.4.1, Apache 2.0 httpcore 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore/4.4.1, Apache 2.0 httpmime 4.5.7: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpmime/4.5.7, Apache 2.0 jackson-annotations 2.10.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations/2.10.5, Apache 2.0 jackson-core 2.10.5: https://github.com/FasterXML/jackson-core, Apache 2.0 jackson-core-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-core-asl/1.9.13, Apache 2.0 jackson-databind 2.10.5: https://github.com/FasterXML/jackson-databind, Apache 2.0 jackson-datatype-jdk8 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.9.10, Apache 2.0 jackson-datatype-jsr310 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.9.10, Apache 2.0 jackson-jaxrs 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-jaxrs/1.9.13, Apache 2.0 and LGPL 2.1 jackson-mapper-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-mapper-asl/1.9.13, Apache 2.0 jackson-module-parameter-names 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.9.10, Apache 2.0 jackson-xc 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-xc/1.9.13, Apache 2.0 and LGPL 2.1 javax.inject 1: https://mvnrepository.com/artifact/javax.inject/javax.inject/1, Apache 2.0 javax.jdo-3.2.0-m3: https://mvnrepository.com/artifact/org.datanucleus/javax.jdo/3.2.0-m3, Apache 2.0 java-xmlbuilder 0.4 : https://mvnrepository.com/artifact/com.jamesmurty.utils/java-xmlbuilder/0.4, Apache 2.0 jboss-logging 3.3.2.Final: https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging/3.3.2.Final, Apache 2.0 jdo-api 3.0.1: https://mvnrepository.com/artifact/javax.jdo/jdo-api/3.0.1, Apache 2.0 jets3t 0.9.0: https://mvnrepository.com/artifact/net.java.dev.jets3t/jets3t/0.9.0, Apache 2.0 jettison 1.1: https://github.com/jettison-json/jettison, Apache 2.0 jetty 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty/6.1.26, Apache 2.0 and EPL 1.0 jetty-continuation 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-continuation/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-http 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-http/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-io 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-io/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-security 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-security/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-server 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-servlet 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-servlets 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlets/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-util 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty-util/6.1.26, Apache 2.0 and EPL 1.0 jetty-util 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-webapp 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jetty-xml 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml/9.4.14.v20181114, Apache 2.0 and EPL 1.0 jna 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna/4.5.2, Apache 2.0 and LGPL 2.1 jna-platform 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna-platform/4.5.2, Apache 2.0 and LGPL 2.1 joda-time 2.10.1: https://github.com/JodaOrg/joda-time, Apache 2.0 jpam 1.1: https://mvnrepository.com/artifact/net.sf.jpam/jpam/1.1, Apache 2.0 jsqlparser 2.1: https://github.com/JSQLParser/JSqlParser, Apache 2.0 or LGPL 2.1 jsr305 3.0.0: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0 jsr305 1.3.9: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0 j2objc-annotations 1.1 https://mvnrepository.com/artifact/com.google.j2objc/j2objc-annotations/1.1, Apache 2.0 libfb303 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libfb303/0.9.3, Apache 2.0 libthrift 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libthrift/0.9.3, Apache 2.0 log4j-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api/2.11.2, Apache 2.0 log4j-core-2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core/2.11.2, Apache 2.0 log4j 1.2.17: https://mvnrepository.com/artifact/log4j/log4j/1.2.17, Apache 2.0 log4j-1.2-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-1.2-api/2.11.2, Apache 2.0 lz4 1.3.0: https://mvnrepository.com/artifact/net.jpountz.lz4/lz4/1.3.0, Apache 2.0 mapstruct 1.2.0.Final: https://github.com/mapstruct/mapstruct, Apache 2.0 maven-aether-provider 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-aether-provider/3.0.4, Apache 2.0 maven-artifact 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-artifact/3.0.4, Apache 2.0 maven-compat 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-compat/3.0.4, Apache 2.0 maven-core 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-core/3.0.4, Apache 2.0 maven-embedder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-embedder/3.0.4, Apache 2.0 maven-model 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-model/3.0.4, Apache 2.0 maven-model-builder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-model-builder/3.0.4, Apache 2.0 maven-plugin-api 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-plugin-api/3.0.4, Apache 2.0 maven-repository-metadata 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-repository-metadata/3.0.4, Apache 2.0 maven-settings 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-settings/3.0.4, Apache 2.0 maven-settings-builder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-settings-builder/3.0.4, Apache 2.0 mybatis 3.5.2 https://mvnrepository.com/artifact/org.mybatis/mybatis/3.5.2, Apache 2.0 mybatis-plus 3.2.0: https://github.com/baomidou/mybatis-plus, Apache 2.0 mybatis-plus-annotation 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-annotation/3.2.0, Apache 2.0 mybatis-plus-boot-starter 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-boot-starter/3.2.0, Apache 2.0 mybatis-plus-core 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-core/3.2.0, Apache 2.0 mybatis-plus-extension 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-extension/3.2.0, Apache 2.0 mybatis-spring 2.0.2: https://mvnrepository.com/artifact/org.mybatis/mybatis-spring/2.0.2, Apache 2.0 netty 3.6.2.Final: https://github.com/netty/netty, Apache 2.0 netty-all 4.1.33.Final: https://github.com/netty/netty/blob/netty-4.1.33.Final/LICENSE.txt, Apache 2.0 opencsv 2.3: https://mvnrepository.com/artifact/net.sf.opencsv/opencsv/2.3, Apache 2.0 parquet-hadoop-bundle 1.8.1: https://mvnrepository.com/artifact/org.apache.parquet/parquet-hadoop-bundle/1.8.1, Apache 2.0 poi 3.17: https://mvnrepository.com/artifact/org.apache.poi/poi/3.17, Apache 2.0 plexus-cipher 1.7.0: https://mvnrepository.com/artifact/org.sonatype.plexus/plexus-cipher/1.7.0, Apache 2.0 plexus-classworlds 2.4: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-classworlds/2.4, Apache 2.0 plexus-component-annotations 1.5.5: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-component-annotations/1.5.5, Apache 2.0 plexus-container-default 1.5.5: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-container-default/1.5.5, Apache 2.0 plexus-interpolation 1.14: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-interpolation/1.14, Apache 2.0 plexus-sec-dispatcher 1.3: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-sec-dispatcher/1.3, Apache 2.0 plexus-utils 2.0.6: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-utils/2.0.6, Apache 2.0 quartz 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.3.0, Apache 2.0 quartz-jobs 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.3.0, Apache 2.0 resolver 1.5: https://mvnrepository.com/artifact/io.airlift.resolver/resolver/1.5 Apache 2.0 snakeyaml 1.23: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.23, Apache 2.0 snappy 0.2: https://mvnrepository.com/artifact/org.iq80.snappy/snappy/0.2, Apache 2.0 snappy-java 1.0.4.1: https://github.com/xerial/snappy-java, Apache 2.0 spring-aop 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.1.18.RELEASE, Apache 2.0 spring-beans 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.1.18.RELEASE, Apache 2.0 spring-boot 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.1.17.RELEASE, Apache 2.0 spring-boot-autoconfigure 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.1.17.RELEASE, Apache 2.0 spring-boot-starter 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-aop 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-jdbc 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-jetty 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-json 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-logging 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.1.17.RELEASE, Apache 2.0 spring-boot-starter-web 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.1.17.RELEASE, Apache 2.0 spring-context 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-context/5.1.18.RELEASE, Apache 2.0 spring-core 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-core/5.1.18.RELEASE, Apache 2.0 spring-expression 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-expression/5.1.18.RELEASE, Apache 2.0 springfox-core 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-core, Apache 2.0 springfox-schema 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-schema, Apache 2.0 springfox-spi 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spi, Apache 2.0 springfox-spring-web 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spring-web/2.9.2, Apache 2.0 springfox-swagger2 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger2/2.9.2, Apache 2.0 springfox-swagger-common 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-common/2.9.2, Apache 2.0 springfox-swagger-ui 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui/2.9.2, Apache 2.0 spring-jcl 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.1.18.RELEASE, Apache 2.0 spring-jdbc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.1.18.RELEASE, Apache 2.0 spring-plugin-core 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-core/1.2.0.RELEASE, Apache 2.0 spring-plugin-metadata 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-metadata/1.2.0.RELEASE, Apache 2.0 spring-tx 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.1.18.RELEASE, Apache 2.0 spring-web 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-web/5.1.18.RELEASE, Apache 2.0 spring-webmvc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.1.18.RELEASE, Apache 2.0 swagger-annotations 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-annotations/1.5.20, Apache 2.0 swagger-bootstrap-ui 1.9.3: https://mvnrepository.com/artifact/com.github.xiaoymin/swagger-bootstrap-ui/1.9.3, Apache 2.0 swagger-models 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-models/1.5.20, Apache 2.0 tephra-api 0.6.0: https://mvnrepository.com/artifact/co.cask.tephra/tephra-api/0.6.0, Apache 2.0 validation-api 2.0.1.Final: https://mvnrepository.com/artifact/javax.validation/validation-api/2.0.1.Final, Apache 2.0 wagon-provider-api 2.2: https://mvnrepository.com/artifact/org.apache.maven.wagon/wagon-provider-api/2.2, Apache 2.0 xbean-reflect 3.4: https://mvnrepository.com/artifact/org.apache.xbean/xbean-reflect/3.4, Apache 2.0 xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0 xml-apis 1.4.01: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.4.01, Apache 2.0 and W3C zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0 presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1 protostuff-core 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0 protostuff-runtime 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0 protostuff-api 1.7.2: https://github.com/protostuff/protostuff/protostuff-api Apache-2.0 protostuff-collectionschema 1.7.2: https://github.com/protostuff/protostuff/protostuff-collectionschema Apache-2.0 ======================================================================== BSD licenses ======================================================================== The following components are provided under a BSD license. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. asm 3.1: https://github.com/jdf/javalin/tree/master/lib/asm-3.1, BSD javolution 5.5.1: https://mvnrepository.com/artifact/javolution/javolution/5.5.1, BSD jline 0.9.94: https://github.com/jline/jline3, BSD jsch 0.1.42: https://mvnrepository.com/artifact/com.jcraft/jsch/0.1.42, BSD leveldbjni-all 1.8: https://github.com/fusesource/leveldbjni, BSD-3-Clause postgresql 42.1.4: https://mvnrepository.com/artifact/org.postgresql/postgresql/42.1.4, BSD 2-clause protobuf-java 2.5.0: https://mvnrepository.com/artifact/com.google.protobuf/protobuf-java/2.5.0, BSD 2-clause paranamer 2.3: https://mvnrepository.com/artifact/com.thoughtworks.paranamer/paranamer/2.3, BSD threetenbp 1.3.6: https://mvnrepository.com/artifact/org.threeten/threetenbp/1.3.6, BSD 3-clause xmlenc 0.52: https://mvnrepository.com/artifact/xmlenc/xmlenc/0.52, BSD hamcrest-core 1.3: https://mvnrepository.com/artifact/org.hamcrest/hamcrest-core/1.3, BSD 2-Clause ======================================================================== CDDL licenses ======================================================================== The following components are provided under the CDDL License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. activation 1.1: https://mvnrepository.com/artifact/javax.activation/activation/1.1 CDDL 1.0 javax.activation-api 1.2.0: https://mvnrepository.com/artifact/javax.activation/javax.activation-api/1.2.0, CDDL and LGPL 2.0 javax.annotation-api 1.3.2: https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api/1.3.2, CDDL + GPLv2 javax.mail 1.6.2: https://mvnrepository.com/artifact/com.sun.mail/javax.mail/1.6.2, CDDL/GPLv2 javax.servlet-api 3.1.0: https://mvnrepository.com/artifact/javax.servlet/javax.servlet-api/3.1.0, CDDL + GPLv2 jaxb-api 2.3.1: https://mvnrepository.com/artifact/javax.xml.bind/jaxb-api/2.3.1, CDDL 1.1 jaxb-impl 2.2.3-1: https://mvnrepository.com/artifact/com.sun.xml.bind/jaxb-impl/2.2.3-1, CDDL and GPL 1.1 jersey-client 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-client/1.9, CDDL 1.1 and GPL 1.1 jersey-core 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-core/1.9, CDDL 1.1 and GPL 1.1 jersey-guice 1.9: https://mvnrepository.com/artifact/com.sun.jersey.contribs/jersey-guice/1.9, CDDL 1.1 and GPL 1.1 jersey-json 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-json/1.9, CDDL 1.1 and GPL 1.1 jersey-server 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-server/1.9, CDDL 1.1 and GPL 1.1 jta 1.1: https://mvnrepository.com/artifact/javax.transaction/jta/1.1, CDDL 1.0 transaction-api 1.1: https://mvnrepository.com/artifact/javax.transaction/transaction-api/1.1, CDDL 1.0 ======================================================================== EPL licenses ======================================================================== The following components are provided under the EPL License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. aether-api 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-api/1.13.1, EPL 1.0 aether-connector-asynchttpclient 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-connector-asynchttpclient/1.13.1, EPL 1.0 aether-connector-file 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-connector-file/1.13.1, EPL 1.0 aether-impl 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-impl/1.13.1, EPL 1.0 aether-spi 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-spi/1.13.1, EPL 1.0 aether-util 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-util/1.13.1, EPL 1.0 aspectjweaver 1.9.2:https://mvnrepository.com/artifact/org.aspectj/aspectjweaver/1.9.2, EPL 1.0 logback-classic 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-classic/1.2.3, EPL 1.0 and LGPL 2.1 logback-core 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-core/1.2.3, EPL 1.0 and LGPL 2.1 oshi-core 3.9.1: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/3.9.1, EPL 1.0 junit 4.12: https://mvnrepository.com/artifact/junit/junit/4.12, EPL 1.0 h2-1.4.200 https://github.com/h2database/h2database/blob/master/LICENSE.txt, MPL 2.0 or EPL 1.0 ======================================================================== MIT licenses ======================================================================== The following components are provided under a MIT 2.0 license. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. jul-to-slf4j 1.7.25: https://mvnrepository.com/artifact/org.slf4j/jul-to-slf4j/1.7.25, MIT mssql-jdbc 6.1.0.jre8: https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc/6.1.0.jre8, MIT slf4j-api 1.7.5: https://mvnrepository.com/artifact/org.slf4j/slf4j-api/1.7.5, MIT animal-sniffer-annotations 1.14 https://mvnrepository.com/artifact/org.codehaus.mojo/animal-sniffer-annotations/1.14, MIT checker-compat-qual 2.0.0 https://mvnrepository.com/artifact/org.checkerframework/checker-compat-qual/2.0.0, MIT + GPLv2 ======================================================================== MPL 1.1 licenses ======================================================================== The following components are provided under a MPL 1.1 license. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. jamon-runtime 2.3.1: https://mvnrepository.com/artifact/org.jamon/jamon-runtime/2.3.1, MPL-1.1 javassist 3.26.0-GA: https://github.com/jboss-javassist/javassist, MPL-1.1 ======================================================================== Public Domain licenses ======================================================================== xz 1.0: https://mvnrepository.com/artifact/org.tukaani/xz/1.0, Public Domain aopalliance 1.0: https://mvnrepository.com/artifact/aopalliance/aopalliance/1.0, Public Domain ======================================== WTFPL License ======================================== reflections 0.9.12: https://github.com/ronmamo/reflections WTFPL ======================================================================== UI related licenses ======================================================================== The following components are used in UI.See project link for details. The text of each license is also included at licenses/ui-licenses/LICENSE-[project].txt. ======================================== MIT licenses ======================================== @form-create/element-ui 1.0.18: https://github.com/xaboy/form-create MIT axios 0.16.2: https://github.com/axios/axios MIT bootstrap 3.3.7: https://github.com/twbs/bootstrap MIT canvg 1.5.1: https://github.com/canvg/canvg MIT clipboard 2.0.1: https://github.com/zenorocha/clipboard.js MIT codemirror 5.43.0: https://github.com/codemirror/CodeMirror MIT dayjs 1.7.8: https://github.com/iamkun/dayjs MIT element-ui 2.13.2: https://github.com/ElemeFE/element MIT html2canvas 0.5.0-beta4: https://github.com/niklasvh/html2canvas MIT jquery 3.3.1: https://github.com/jquery/jquery MIT jquery-ui 1.12.1: https://github.com/jquery/jquery-ui MIT js-cookie 2.2.1: https://github.com/js-cookie/js-cookie MIT jsplumb 2.8.6: https://github.com/jsplumb/jsplumb MIT and GPLv2 lodash 4.17.11: https://github.com/lodash/lodash MIT vue-treeselect 0.4.0: https://github.com/riophae/vue-treeselect MIT vue 2.5.17: https://github.com/vuejs/vue MIT vue-router 2.7.0: https://github.com/vuejs/vue-router MIT vuex 3.0.0: https://github.com/vuejs/vuex MIT vuex-router-sync 4.1.2: https://github.com/vuejs/vuex-router-sync MIT dagre 0.8.5: https://github.com/dagrejs/dagre MIT ======================================== Apache 2.0 licenses ======================================== echarts 4.1.0: https://github.com/apache/incubator-echarts Apache-2.0 remixicon 2.5.0 https://github.com/Remix-Design/remixicon Apache-2.0 ======================================== BSD licenses ======================================== d3 3.5.17: https://github.com/d3/d3 BSD-3-Clause
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,292
There is a vulnerability in postgresql 42.1.4 ,upgrade recommended
https://github.com/apache/dolphinscheduler/blob/ac15f45b0b51b115ebd02aa3dd739d53c99523b5/pom.xml#L92 CVE-2020-13692 CVE-2018-10936 Recommended upgrade version:42.2.13
https://github.com/apache/dolphinscheduler/issues/5292
https://github.com/apache/dolphinscheduler/pull/5318
837c9ba2cc48377654031721c9da2ab7410e5390
f82da57277c825d4588a6a0654b7730d333a5b18
"2021-04-16T03:13:46Z"
java
"2021-04-20T07:04:29Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.18.RELEASE</spring.version> <spring.boot.version>2.1.17.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.1.4</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/DingTalkUtilsTest.java</include> <include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,292
There is a vulnerability in postgresql 42.1.4 ,upgrade recommended
https://github.com/apache/dolphinscheduler/blob/ac15f45b0b51b115ebd02aa3dd739d53c99523b5/pom.xml#L92 CVE-2020-13692 CVE-2018-10936 Recommended upgrade version:42.2.13
https://github.com/apache/dolphinscheduler/issues/5292
https://github.com/apache/dolphinscheduler/pull/5318
837c9ba2cc48377654031721c9da2ab7410e5390
f82da57277c825d4588a6a0654b7730d333a5b18
"2021-04-16T03:13:46Z"
java
"2021-04-20T07:04:29Z"
tools/dependencies/known-dependencies.txt
HikariCP-3.2.0.jar activation-1.1.jar aether-api-1.13.1.jar aether-connector-asynchttpclient-1.13.1.jar aether-connector-file-1.13.1.jar aether-impl-1.13.1.jar aether-spi-1.13.1.jar aether-util-1.13.1.jar animal-sniffer-annotations-1.14.jar aopalliance-1.0.jar apache-el-8.5.54.jar apacheds-i18n-2.0.0-M15.jar apacheds-kerberos-codec-2.0.0-M15.jar api-asn1-api-1.0.0-M20.jar api-util-1.0.0-M20.jar asm-3.1.jar asm-6.2.1.jar aspectjweaver-1.9.6.jar async-http-client-1.6.5.jar audience-annotations-0.5.0.jar avro-1.7.4.jar aws-java-sdk-1.7.4.jar bonecp-0.8.0.RELEASE.jar byte-buddy-1.9.16.jar checker-compat-qual-2.0.0.jar classmate-1.4.0.jar clickhouse-jdbc-0.1.52.jar commons-email-1.5.jar commons-cli-1.2.jar commons-codec-1.11.jar commons-collections-3.2.2.jar commons-collections4-4.1.jar commons-compress-1.4.1.jar commons-compiler-3.0.16.jar commons-configuration-1.10.jar commons-daemon-1.0.13.jar commons-beanutils-1.9.4.jar commons-dbcp-1.4.jar commons-httpclient-3.0.1.jar commons-io-2.4.jar commons-lang-2.6.jar commons-logging-1.1.1.jar commons-math3-3.1.1.jar commons-net-3.1.jar commons-pool-1.6.jar cron-utils-5.0.5.jar curator-client-4.3.0.jar curator-framework-4.3.0.jar curator-recipes-4.3.0.jar datanucleus-api-jdo-4.2.1.jar datanucleus-core-4.1.6.jar datanucleus-rdbms-4.1.7.jar derby-10.14.2.0.jar error_prone_annotations-2.1.3.jar druid-1.1.22.jar gson-2.8.6.jar guava-24.1-jre.jar guava-retrying-2.0.0.jar guice-3.0.jar guice-servlet-3.0.jar h2-1.4.200.jar hadoop-annotations-2.7.3.jar hadoop-auth-2.7.3.jar hadoop-aws-2.7.3.jar hadoop-client-2.7.3.jar hadoop-common-2.7.3.jar hadoop-hdfs-2.7.3.jar hadoop-mapreduce-client-app-2.7.3.jar hadoop-mapreduce-client-common-2.7.3.jar hadoop-mapreduce-client-core-2.7.3.jar hadoop-mapreduce-client-jobclient-2.7.3.jar hadoop-mapreduce-client-shuffle-2.7.3.jar hadoop-yarn-api-2.7.3.jar hadoop-yarn-client-2.7.3.jar hadoop-yarn-common-2.7.3.jar hadoop-yarn-server-common-2.7.3.jar hamcrest-core-1.3.jar hibernate-validator-6.0.20.Final.jar hive-common-2.1.0.jar hive-jdbc-2.1.0.jar hive-metastore-2.1.0.jar hive-orc-2.1.0.jar hive-serde-2.1.0.jar hive-service-2.1.0.jar hive-service-rpc-2.1.0.jar hive-storage-api-2.1.0.jar htrace-core-3.1.0-incubating.jar httpclient-4.4.1.jar httpcore-4.4.1.jar httpmime-4.5.12.jar j2objc-annotations-1.1.jar jackson-annotations-2.10.5.jar jackson-core-2.10.5.jar jackson-core-asl-1.9.13.jar jackson-databind-2.10.5.jar jackson-datatype-jdk8-2.9.10.jar jackson-datatype-jsr310-2.9.10.jar jackson-jaxrs-1.9.13.jar jackson-mapper-asl-1.9.13.jar jackson-module-parameter-names-2.9.10.jar jackson-xc-1.9.13.jar jamon-runtime-2.3.1.jar janino-3.0.16.jar java-xmlbuilder-0.4.jar javassist-3.26.0-GA.jar javax.activation-api-1.2.0.jar javax.annotation-api-1.3.2.jar javax.inject-1.jar javax.jdo-3.2.0-m3.jar javax.mail-1.6.2.jar javax.servlet-api-3.1.0.jar javolution-5.5.1.jar jaxb-api-2.3.1.jar jaxb-impl-2.2.3-1.jar jboss-logging-3.3.3.Final.jar jdo-api-3.0.1.jar jersey-client-1.9.jar jersey-core-1.9.jar jersey-guice-1.9.jar jersey-json-1.9.jar jersey-server-1.9.jar jets3t-0.9.0.jar jettison-1.1.jar jetty-6.1.26.jar jetty-continuation-9.4.31.v20200723.jar jetty-http-9.4.31.v20200723.jar jetty-io-9.4.31.v20200723.jar jetty-security-9.4.31.v20200723.jar jetty-server-9.4.31.v20200723.jar jetty-servlet-9.4.31.v20200723.jar jetty-servlets-9.4.31.v20200723.jar jetty-util-6.1.26.jar jetty-util-9.4.31.v20200723.jar jetty-webapp-9.4.31.v20200723.jar jetty-xml-9.4.31.v20200723.jar jline-0.9.94.jar jna-4.5.2.jar jna-platform-4.5.2.jar joda-time-2.10.6.jar jpam-1.1.jar jsch-0.1.42.jar jsp-api-2.1.jar jsqlparser-2.1.jar jsr305-1.3.9.jar jsr305-3.0.0.jar jta-1.1.jar jul-to-slf4j-1.7.30.jar junit-4.12.jar leveldbjni-all-1.8.jar libfb303-0.9.3.jar libthrift-0.9.3.jar log4j-1.2-api-2.11.2.jar log4j-1.2.17.jar log4j-api-2.11.2.jar log4j-core-2.11.2.jar logback-classic-1.2.3.jar logback-core-1.2.3.jar lz4-1.3.0.jar mapstruct-1.2.0.Final.jar maven-aether-provider-3.0.4.jar maven-artifact-3.0.4.jar maven-compat-3.0.4.jar maven-core-3.0.4.jar maven-embedder-3.0.4.jar maven-model-3.0.4.jar maven-model-builder-3.0.4.jar maven-plugin-api-3.0.4.jar maven-repository-metadata-3.0.4.jar maven-settings-3.0.4.jar maven-settings-builder-3.0.4.jar mssql-jdbc-6.1.0.jre8.jar mybatis-3.5.2.jar mybatis-plus-3.2.0.jar mybatis-plus-annotation-3.2.0.jar mybatis-plus-boot-starter-3.2.0.jar mybatis-plus-core-3.2.0.jar mybatis-plus-extension-3.2.0.jar mybatis-spring-2.0.2.jar netty-3.6.2.Final.jar netty-all-4.1.52.Final.jar opencsv-2.3.jar oshi-core-3.9.1.jar paranamer-2.3.jar parquet-hadoop-bundle-1.8.1.jar plexus-cipher-1.7.jar plexus-classworlds-2.4.jar plexus-component-annotations-1.5.5.jar plexus-container-default-1.5.5.jar plexus-interpolation-1.14.jar plexus-sec-dispatcher-1.3.jar plexus-utils-2.0.6.jar poi-3.17.jar postgresql-42.1.4.jar presto-jdbc-0.238.1.jar protobuf-java-2.5.0.jar protostuff-core-1.7.2.jar protostuff-runtime-1.7.2.jar protostuff-api-1.7.2.jar protostuff-collectionschema-1.7.2.jar quartz-2.3.0.jar quartz-jobs-2.3.0.jar reflections-0.9.12.jar resolver-1.5.jar slf4j-api-1.7.5.jar snakeyaml-1.23.jar snappy-0.2.jar snappy-java-1.0.4.1.jar spring-aop-5.1.18.RELEASE.jar spring-beans-5.1.18.RELEASE.jar spring-boot-2.1.17.RELEASE.jar spring-boot-autoconfigure-2.1.17.RELEASE.jar spring-boot-starter-2.1.17.RELEASE.jar spring-boot-starter-aop-2.1.17.RELEASE.jar spring-boot-starter-jdbc-2.1.17.RELEASE.jar spring-boot-starter-jetty-2.1.17.RELEASE.jar spring-boot-starter-json-2.1.17.RELEASE.jar spring-boot-starter-logging-2.1.17.RELEASE.jar spring-boot-starter-web-2.1.17.RELEASE.jar spring-context-5.1.18.RELEASE.jar spring-core-5.1.18.RELEASE.jar spring-expression-5.1.18.RELEASE.jar spring-jcl-5.1.18.RELEASE.jar spring-jdbc-5.1.18.RELEASE.jar spring-plugin-core-1.2.0.RELEASE.jar spring-plugin-metadata-1.2.0.RELEASE.jar spring-tx-5.1.18.RELEASE.jar spring-web-5.1.18.RELEASE.jar spring-webmvc-5.1.18.RELEASE.jar springfox-core-2.9.2.jar springfox-schema-2.9.2.jar springfox-spi-2.9.2.jar springfox-spring-web-2.9.2.jar springfox-swagger-common-2.9.2.jar springfox-swagger-ui-2.9.2.jar springfox-swagger2-2.9.2.jar swagger-annotations-1.5.20.jar swagger-bootstrap-ui-1.9.3.jar swagger-models-1.5.24.jar tephra-api-0.6.0.jar transaction-api-1.1.jar validation-api-2.0.1.Final.jar wagon-provider-api-2.2.jar xbean-reflect-3.4.jar xercesImpl-2.9.1.jar xml-apis-1.4.01.jar xmlenc-0.52.jar xz-1.0.jar zookeeper-3.4.14.jar
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,261
[Bug][server-api] copy the node ,the preTasks is error
When we copy a node form process definition,the value preTasks is the source node .when we link the noe previous node ,this value would't change .when start a process instance from fail node ,this error may cause that some copy node which agter the failed node will not execute.
https://github.com/apache/dolphinscheduler/issues/5261
https://github.com/apache/dolphinscheduler/pull/5264
f82da57277c825d4588a6a0654b7730d333a5b18
5f1fc462c9ba321736f9fe9a1bfc057d1da894d0
"2021-04-12T12:33:58Z"
java
"2021-04-20T07:17:17Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.model; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.dolphinscheduler.common.utils.StringUtils; import java.io.IOException; import java.util.List; import java.util.Objects; public class TaskNode { /** * task node id */ private String id; /** * task node name */ private String name; /** * task node description */ private String desc; /** * task node type */ private String type; /** * the run flag has two states, NORMAL or FORBIDDEN */ private String runFlag; /** * the front field */ private String loc; /** * maximum number of retries */ private int maxRetryTimes; /** * Unit of retry interval: points */ private int retryInterval; /** * params information */ @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String params; /** * inner dependency information */ @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String preTasks; /** * users store additional information */ @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String extras; /** * node dependency list */ private List<String> depList; /** * outer dependency information */ @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String dependence; @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String conditionResult; /** * task instance priority */ private Priority taskInstancePriority; /** * worker group */ private String workerGroup; /** * worker group id */ private Integer workerGroupId; /** * task time out */ @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String timeout; /** * delay execution time. */ private int delayTime; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDesc() { return desc; } public void setDesc(String desc) { this.desc = desc; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getParams() { return params; } public void setParams(String params) { this.params = params; } public String getPreTasks() { return preTasks; } public void setPreTasks(String preTasks) throws IOException { this.preTasks = preTasks; this.depList = JSONUtils.toList(preTasks, String.class); } public String getExtras() { return extras; } public void setExtras(String extras) { this.extras = extras; } public List<String> getDepList() { return depList; } public void setDepList(List<String> depList) throws JsonProcessingException { this.depList = depList; this.preTasks = JSONUtils.toJsonString(depList); } public String getLoc() { return loc; } public void setLoc(String loc) { this.loc = loc; } public String getRunFlag(){ return runFlag; } public void setRunFlag(String runFlag) { this.runFlag = runFlag; } public Boolean isForbidden(){ return (StringUtils.isNotEmpty(this.runFlag) && this.runFlag.equals(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN)); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TaskNode taskNode = (TaskNode) o; return Objects.equals(name, taskNode.name) && Objects.equals(desc, taskNode.desc) && Objects.equals(type, taskNode.type) && Objects.equals(params, taskNode.params) && Objects.equals(preTasks, taskNode.preTasks) && Objects.equals(extras, taskNode.extras) && Objects.equals(runFlag, taskNode.runFlag) && Objects.equals(dependence, taskNode.dependence) && Objects.equals(workerGroup, taskNode.workerGroup) && Objects.equals(conditionResult, taskNode.conditionResult) && CollectionUtils.equalLists(depList, taskNode.depList); } @Override public int hashCode() { return Objects.hash(name, desc, type, params, preTasks, extras, depList, runFlag); } public String getDependence() { return dependence; } public void setDependence(String dependence) { this.dependence = dependence; } public int getMaxRetryTimes() { return maxRetryTimes; } public void setMaxRetryTimes(int maxRetryTimes) { this.maxRetryTimes = maxRetryTimes; } public int getRetryInterval() { return retryInterval; } public void setRetryInterval(int retryInterval) { this.retryInterval = retryInterval; } public Priority getTaskInstancePriority() { return taskInstancePriority; } public void setTaskInstancePriority(Priority taskInstancePriority) { this.taskInstancePriority = taskInstancePriority; } public String getTimeout() { return timeout; } public void setTimeout(String timeout) { this.timeout = timeout; } /** * get task time out parameter * @return task time out parameter */ public TaskTimeoutParameter getTaskTimeoutParameter() { if(StringUtils.isNotEmpty(this.getTimeout())){ String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name()); String taskTimeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name()); return JSONUtils.parseObject(taskTimeout,TaskTimeoutParameter.class); } return new TaskTimeoutParameter(false); } public boolean isConditionsTask(){ return TaskType.CONDITIONS.toString().equalsIgnoreCase(this.getType()); } @Override public String toString() { return "TaskNode{" + "id='" + id + '\'' + ", name='" + name + '\'' + ", desc='" + desc + '\'' + ", type='" + type + '\'' + ", runFlag='" + runFlag + '\'' + ", loc='" + loc + '\'' + ", maxRetryTimes=" + maxRetryTimes + ", retryInterval=" + retryInterval + ", params='" + params + '\'' + ", preTasks='" + preTasks + '\'' + ", extras='" + extras + '\'' + ", depList=" + depList + ", dependence='" + dependence + '\'' + ", taskInstancePriority=" + taskInstancePriority + ", timeout='" + timeout + '\'' + ", workerGroup='" + workerGroup + '\'' + ", delayTime=" + delayTime + '}'; } public String getWorkerGroup() { return workerGroup; } public void setWorkerGroup(String workerGroup) { this.workerGroup = workerGroup; } public String getConditionResult() { return conditionResult; } public void setConditionResult(String conditionResult) { this.conditionResult = conditionResult; } public Integer getWorkerGroupId() { return workerGroupId; } public void setWorkerGroupId(Integer workerGroupId) { this.workerGroupId = workerGroupId; } public int getDelayTime() { return delayTime; } public void setDelayTime(int delayTime) { this.delayTime = delayTime; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,261
[Bug][server-api] copy the node ,the preTasks is error
When we copy a node form process definition,the value preTasks is the source node .when we link the noe previous node ,this value would't change .when start a process instance from fail node ,this error may cause that some copy node which agter the failed node will not execute.
https://github.com/apache/dolphinscheduler/issues/5261
https://github.com/apache/dolphinscheduler/pull/5264
f82da57277c825d4588a6a0654b7730d333a5b18
5f1fc462c9ba321736f9fe9a1bfc057d1da894d0
"2021-04-12T12:33:58Z"
java
"2021-04-20T07:17:17Z"
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.utils; import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * dag helper test */ public class DagHelperTest { /** * test task node can submit * * @throws JsonProcessingException if error throws JsonProcessingException */ @Test public void testTaskNodeCanSubmit() throws JsonProcessingException { //1->2->3->5->7 //4->3->6 DAG<String, TaskNode, TaskNodeRelation> dag = generateDag(); TaskNode taskNode3 = dag.getNode("3"); Map<String, TaskInstance> completeTaskList = new HashMap<>(); Map<String, TaskNode> skipNodeList = new HashMap<>(); completeTaskList.putIfAbsent("1", new TaskInstance()); Boolean canSubmit = false; // 2/4 are forbidden submit 3 TaskNode node2 = dag.getNode("2"); node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); TaskNode nodex = dag.getNode("4"); nodex.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList); Assert.assertEquals(canSubmit, true); // 2forbidden, 3 cannot be submit completeTaskList.putIfAbsent("2", new TaskInstance()); TaskNode nodey = dag.getNode("4"); nodey.setRunFlag(""); canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList); Assert.assertEquals(canSubmit, false); // 2/3 forbidden submit 5 TaskNode node3 = dag.getNode("3"); node3.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); TaskNode node8 = dag.getNode("8"); node8.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); TaskNode node5 = dag.getNode("5"); canSubmit = DagHelper.allDependsForbiddenOrEnd(node5, dag, skipNodeList, completeTaskList); Assert.assertEquals(canSubmit, true); } /** * test parse post node list */ @Test public void testParsePostNodeList() throws JsonProcessingException { DAG<String, TaskNode, TaskNodeRelation> dag = generateDag(); Map<String, TaskInstance> completeTaskList = new HashMap<>(); Map<String, TaskNode> skipNodeList = new HashMap<>(); Set<String> postNodes = null; //complete : null // expect post: 1/4 postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("1")); Assert.assertTrue(postNodes.contains("4")); //complete : 1 // expect post: 2/4 completeTaskList.put("1", new TaskInstance()); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("2")); Assert.assertTrue(postNodes.contains("4")); // complete : 1/2 // expect post: 4 completeTaskList.put("2", new TaskInstance()); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("4")); Assert.assertTrue(postNodes.contains("8")); // complete : 1/2/4 // expect post: 3 completeTaskList.put("4", new TaskInstance()); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("3")); Assert.assertTrue(postNodes.contains("8")); // complete : 1/2/4/3 // expect post: 8/6 completeTaskList.put("3", new TaskInstance()); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("8")); Assert.assertTrue(postNodes.contains("6")); // complete : 1/2/4/3/8 // expect post: 6/5 completeTaskList.put("8", new TaskInstance()); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("5")); Assert.assertTrue(postNodes.contains("6")); // complete : 1/2/4/3/5/6/8 // expect post: 7 completeTaskList.put("6", new TaskInstance()); completeTaskList.put("5", new TaskInstance()); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(1, postNodes.size()); Assert.assertTrue(postNodes.contains("7")); } /** * test forbidden post node * * @throws JsonProcessingException */ @Test public void testForbiddenPostNode() throws JsonProcessingException { DAG<String, TaskNode, TaskNodeRelation> dag = generateDag(); Map<String, TaskInstance> completeTaskList = new HashMap<>(); Map<String, TaskNode> skipNodeList = new HashMap<>(); Set<String> postNodes = null; // dag: 1-2-3-5-7 4-3-6 2-8-5-7 // forbid:2 complete:1 post:4/8 completeTaskList.put("1", new TaskInstance()); TaskNode node2 = dag.getNode("2"); node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("4")); Assert.assertTrue(postNodes.contains("8")); //forbid:2/4 complete:1 post:3/8 TaskNode node4 = dag.getNode("4"); node4.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(2, postNodes.size()); Assert.assertTrue(postNodes.contains("3")); Assert.assertTrue(postNodes.contains("8")); //forbid:2/4/5 complete:1/8 post:3 completeTaskList.put("8", new TaskInstance()); TaskNode node5 = dag.getNode("5"); node5.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(1, postNodes.size()); Assert.assertTrue(postNodes.contains("3")); } /** * test condition post node * * @throws JsonProcessingException */ @Test public void testConditionPostNode() throws JsonProcessingException { DAG<String, TaskNode, TaskNodeRelation> dag = generateDag(); Map<String, TaskInstance> completeTaskList = new HashMap<>(); Map<String, TaskNode> skipNodeList = new HashMap<>(); Set<String> postNodes = null; // dag: 1-2-3-5-7 4-3-6 2-8-5-7 // 3-if completeTaskList.put("1", new TaskInstance()); completeTaskList.put("2", new TaskInstance()); completeTaskList.put("4", new TaskInstance()); TaskNode node3 = dag.getNode("3"); node3.setType("CONDITIONS"); node3.setConditionResult("{\n" + " \"successNode\": [5\n" + " ],\n" + " \"failedNode\": [6\n" + " ]\n" + " }"); completeTaskList.remove("3"); TaskInstance taskInstance = new TaskInstance(); taskInstance.setState(ExecutionStatus.SUCCESS); //complete 1/2/3/4 expect:8 completeTaskList.put("3", taskInstance); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(1, postNodes.size()); Assert.assertTrue(postNodes.contains("8")); //2.complete 1/2/3/4/8 expect:5 skip:6 completeTaskList.put("8", new TaskInstance()); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertTrue(postNodes.contains("5")); Assert.assertEquals(1, skipNodeList.size()); Assert.assertTrue(skipNodeList.containsKey("6")); // 3.complete 1/2/3/4/5/8 expect post:7 skip:6 skipNodeList.clear(); TaskInstance taskInstance1 = new TaskInstance(); taskInstance.setState(ExecutionStatus.SUCCESS); completeTaskList.put("5", taskInstance1); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(1, postNodes.size()); Assert.assertTrue(postNodes.contains("7")); Assert.assertEquals(1, skipNodeList.size()); Assert.assertTrue(skipNodeList.containsKey("6")); // dag: 1-2-3-5-7 4-3-6 // 3-if , complete:1/2/3/4 // 1.failure:3 expect post:6 skip:5/7 skipNodeList.clear(); completeTaskList.remove("3"); taskInstance = new TaskInstance(); taskInstance.setState(ExecutionStatus.FAILURE); completeTaskList.put("3", taskInstance); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assert.assertEquals(1, postNodes.size()); Assert.assertTrue(postNodes.contains("6")); Assert.assertEquals(2, skipNodeList.size()); Assert.assertTrue(skipNodeList.containsKey("5")); Assert.assertTrue(skipNodeList.containsKey("7")); } /** * 1->2->3->5->7 * 4->3->6 * 2->8->5->7 * * @return dag * @throws JsonProcessingException if error throws JsonProcessingException */ private DAG<String, TaskNode, TaskNodeRelation> generateDag() throws JsonProcessingException { List<TaskNode> taskNodeList = new ArrayList<>(); TaskNode node1 = new TaskNode(); node1.setId("1"); node1.setName("1"); node1.setType("SHELL"); taskNodeList.add(node1); TaskNode node2 = new TaskNode(); node2.setId("2"); node2.setName("2"); node2.setType("SHELL"); List<String> dep2 = new ArrayList<>(); dep2.add("1"); node2.setDepList(dep2); taskNodeList.add(node2); TaskNode node4 = new TaskNode(); node4.setId("4"); node4.setName("4"); node4.setType("SHELL"); taskNodeList.add(node4); TaskNode node3 = new TaskNode(); node3.setId("3"); node3.setName("3"); node3.setType("SHELL"); List<String> dep3 = new ArrayList<>(); dep3.add("2"); dep3.add("4"); node3.setDepList(dep3); taskNodeList.add(node3); TaskNode node5 = new TaskNode(); node5.setId("5"); node5.setName("5"); node5.setType("SHELL"); List<String> dep5 = new ArrayList<>(); dep5.add("3"); dep5.add("8"); node5.setDepList(dep5); taskNodeList.add(node5); TaskNode node6 = new TaskNode(); node6.setId("6"); node6.setName("6"); node6.setType("SHELL"); List<String> dep6 = new ArrayList<>(); dep6.add("3"); node6.setDepList(dep6); taskNodeList.add(node6); TaskNode node7 = new TaskNode(); node7.setId("7"); node7.setName("7"); node7.setType("SHELL"); List<String> dep7 = new ArrayList<>(); dep7.add("5"); node7.setDepList(dep7); taskNodeList.add(node7); TaskNode node8 = new TaskNode(); node8.setId("8"); node8.setName("8"); node8.setType("SHELL"); List<String> dep8 = new ArrayList<>(); dep8.add("2"); node8.setDepList(dep8); taskNodeList.add(node8); List<String> startNodes = new ArrayList<>(); List<String> recoveryNodes = new ArrayList<>(); List<TaskNode> destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, startNodes, recoveryNodes, TaskDependType.TASK_POST); List<TaskNodeRelation> taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); ProcessDag processDag = new ProcessDag(); processDag.setEdges(taskNodeRelations); processDag.setNodes(destTaskNodeList); return DagHelper.buildDagGraph(processDag); } @Test public void testBuildDagGraph() { String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; ProcessData processData = JSONUtils.parseObject(shellJson, ProcessData.class); assert processData != null; List<TaskNode> taskNodeList = processData.getTasks(); ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); DAG<String, TaskNode, TaskNodeRelation> dag = DagHelper.buildDagGraph(processDag); Assert.assertNotNull(dag); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,328
[Bug][MasterServer] process contain depend task is always running
**For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! ** **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** Steps to reproduce the behavior, for example: 1. Will the process scheduling up regularly, and must be "failed policies" to "end" 2. The first mission there is no corresponding directory file (for the first time this failure) (namely the execution of the catalog file to a certain consensus ikkkk and fb both task execution of the same file) 3. Then retry the second time, in the corresponding directory to create executable files, so that it was successful 4. Observe the state of the process
https://github.com/apache/dolphinscheduler/issues/5328
https://github.com/apache/dolphinscheduler/pull/5344
5f1fc462c9ba321736f9fe9a1bfc057d1da894d0
e12a26289bce0b38bd65ac74a3349ecadd8a9141
"2021-04-20T06:07:08Z"
java
"2021-04-21T14:29:53Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.service.process; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_EMPTY_SUB_PROCESS; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_FATHER_PARAMS; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID; import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS; import static java.util.stream.Collectors.toSet; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CycleEnum; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.CycleDependency; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ErrorCommand; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.CommandMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import com.cronutils.model.Cron; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * process relative dao that some mappers in this. */ @Component public class ProcessService { private final Logger logger = LoggerFactory.getLogger(getClass()); private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), ExecutionStatus.RUNNING_EXECUTION.ordinal(), ExecutionStatus.DELAY_EXECUTION.ordinal(), ExecutionStatus.READY_PAUSE.ordinal(), ExecutionStatus.READY_STOP.ordinal()}; @Autowired private UserMapper userMapper; @Autowired private ProcessDefinitionMapper processDefineMapper; @Autowired private ProcessInstanceMapper processInstanceMapper; @Autowired private DataSourceMapper dataSourceMapper; @Autowired private ProcessInstanceMapMapper processInstanceMapMapper; @Autowired private TaskInstanceMapper taskInstanceMapper; @Autowired private CommandMapper commandMapper; @Autowired private ScheduleMapper scheduleMapper; @Autowired private UdfFuncMapper udfFuncMapper; @Autowired private ResourceMapper resourceMapper; @Autowired private ResourceUserMapper resourceUserMapper; @Autowired private ErrorCommandMapper errorCommandMapper; @Autowired private TenantMapper tenantMapper; @Autowired private ProjectMapper projectMapper; /** * handle Command (construct ProcessInstance from Command) , wrapped in transaction * * @param logger logger * @param host host * @param validThreadNum validThreadNum * @param command found command * @return process instance */ @Transactional(rollbackFor = Exception.class) public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) { ProcessInstance processInstance = constructProcessInstance(command, host); // cannot construct process instance, return null if (processInstance == null) { logger.error("scan command, command parameter is error: {}", command); moveToErrorCommand(command, "process instance is null"); return null; } if (!checkThreadNum(command, validThreadNum)) { logger.info("there is not enough thread for this command: {}", command); return setWaitingThreadProcess(command, processInstance); } processInstance.setCommandType(command.getCommandType()); processInstance.addHistoryCmd(command.getCommandType()); saveProcessInstance(processInstance); this.setSubProcessParam(processInstance); delCommandById(command.getId()); return processInstance; } /** * save error command, and delete original command * * @param command command * @param message message */ @Transactional(rollbackFor = Exception.class) public void moveToErrorCommand(Command command, String message) { ErrorCommand errorCommand = new ErrorCommand(command, message); this.errorCommandMapper.insert(errorCommand); delCommandById(command.getId()); } /** * set process waiting thread * * @param command command * @param processInstance processInstance * @return process instance */ private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) { processInstance.setState(ExecutionStatus.WAITTING_THREAD); if (command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD) { processInstance.addHistoryCmd(command.getCommandType()); } saveProcessInstance(processInstance); this.setSubProcessParam(processInstance); createRecoveryWaitingThreadCommand(command, processInstance); return null; } /** * check thread num * * @param command command * @param validThreadNum validThreadNum * @return if thread is enough */ private boolean checkThreadNum(Command command, int validThreadNum) { int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId()); return validThreadNum >= commandThreadCount; } /** * insert one command * * @param command command * @return create result */ public int createCommand(Command command) { int result = 0; if (command != null) { result = commandMapper.insert(command); } return result; } /** * find one command from queue list * * @return command */ public Command findOneCommand() { return commandMapper.getOneToRun(); } /** * check the input command exists in queue list * * @param command command * @return create command result */ public boolean verifyIsNeedCreateCommand(Command command) { boolean isNeedCreate = true; EnumMap<CommandType, Integer> cmdTypeMap = new EnumMap<>(CommandType.class); cmdTypeMap.put(CommandType.REPEAT_RUNNING, 1); cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, 1); cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS, 1); CommandType commandType = command.getCommandType(); if (cmdTypeMap.containsKey(commandType)) { ObjectNode cmdParamObj = JSONUtils.parseObject(command.getCommandParam()); int processInstanceId = cmdParamObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt(); List<Command> commands = commandMapper.selectList(null); // for all commands for (Command tmpCommand : commands) { if (cmdTypeMap.containsKey(tmpCommand.getCommandType())) { ObjectNode tempObj = JSONUtils.parseObject(tmpCommand.getCommandParam()); if (tempObj != null && processInstanceId == tempObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt()) { isNeedCreate = false; break; } } } } return isNeedCreate; } /** * find process instance detail by id * * @param processId processId * @return process instance */ public ProcessInstance findProcessInstanceDetailById(int processId) { return processInstanceMapper.queryDetailById(processId); } /** * get task node list by definitionId */ public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId) { ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); if (processDefinition == null) { logger.info("process define not exists"); return null; } String processDefinitionJson = processDefinition.getProcessDefinitionJson(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); //process data check if (null == processData) { logger.error("process data is null"); return new ArrayList<>(); } return processData.getTasks(); } /** * find process instance by id * * @param processId processId * @return process instance */ public ProcessInstance findProcessInstanceById(int processId) { return processInstanceMapper.selectById(processId); } /** * find process define by id. * * @param processDefinitionId processDefinitionId * @return process definition */ public ProcessDefinition findProcessDefineById(int processDefinitionId) { return processDefineMapper.selectById(processDefinitionId); } /** * delete work process instance by id * * @param processInstanceId processInstanceId * @return delete process instance result */ public int deleteWorkProcessInstanceById(int processInstanceId) { return processInstanceMapper.deleteById(processInstanceId); } /** * delete all sub process by parent instance id * * @param processInstanceId processInstanceId * @return delete all sub process instance result */ public int deleteAllSubWorkProcessByParentId(int processInstanceId) { List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId); for (Integer subId : subProcessIdList) { deleteAllSubWorkProcessByParentId(subId); deleteWorkProcessMapByParentId(subId); removeTaskLogFile(subId); deleteWorkProcessInstanceById(subId); } return 1; } /** * remove task log file * * @param processInstanceId processInstanceId */ public void removeTaskLogFile(Integer processInstanceId) { List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } try (LogClientService logClient = new LogClientService()) { for (TaskInstance taskInstance : taskInstanceList) { String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())) { continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); } catch (Exception e) { // compatible old version ip = taskInstance.getHost(); } // remove task log from loggerserver logClient.removeTaskLog(ip, port, taskLogPath); } } } /** * calculate sub process number in the process define. * * @param processDefinitionId processDefinitionId * @return process thread num count */ private Integer workProcessThreadNumCount(Integer processDefinitionId) { List<Integer> ids = new ArrayList<>(); recurseFindSubProcessId(processDefinitionId, ids); return ids.size() + 1; } /** * recursive query sub process definition id by parent id. * * @param parentId parentId * @param ids ids */ public void recurseFindSubProcessId(int parentId, List<Integer> ids) { ProcessDefinition processDefinition = processDefineMapper.selectById(parentId); String processDefinitionJson = processDefinition.getProcessDefinitionJson(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); List<TaskNode> taskNodeList = processData.getTasks(); if (taskNodeList != null && !taskNodeList.isEmpty()) { for (TaskNode taskNode : taskNodeList) { String parameter = taskNode.getParams(); ObjectNode parameterJson = JSONUtils.parseObject(parameter); if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_ID) != null) { SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class); ids.add(subProcessParam.getProcessDefinitionId()); recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(), ids); } } } } /** * create recovery waiting thread command when thread pool is not enough for the process instance. * sub work process instance need not to create recovery command. * create recovery waiting thread command and delete origin command at the same time. * if the recovery command is exists, only update the field update_time * * @param originCommand originCommand * @param processInstance processInstance */ public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) { // sub process doesnot need to create wait command if (processInstance.getIsSubProcess() == Flag.YES) { if (originCommand != null) { commandMapper.deleteById(originCommand.getId()); } return; } Map<String, String> cmdParam = new HashMap<>(); cmdParam.put(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD, String.valueOf(processInstance.getId())); // process instance quit by "waiting thread" state if (originCommand == null) { Command command = new Command( CommandType.RECOVER_WAITTING_THREAD, processInstance.getTaskDependType(), processInstance.getFailureStrategy(), processInstance.getExecutorId(), processInstance.getProcessDefinitionId(), JSONUtils.toJsonString(cmdParam), processInstance.getWarningType(), processInstance.getWarningGroupId(), processInstance.getScheduleTime(), processInstance.getWorkerGroup(), processInstance.getProcessInstancePriority() ); saveCommand(command); return; } // update the command time if current command if recover from waiting if (originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD) { originCommand.setUpdateTime(new Date()); saveCommand(originCommand); } else { // delete old command and create new waiting thread command commandMapper.deleteById(originCommand.getId()); originCommand.setId(0); originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD); originCommand.setUpdateTime(new Date()); originCommand.setCommandParam(JSONUtils.toJsonString(cmdParam)); originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority()); saveCommand(originCommand); } } /** * get schedule time from command * * @param command command * @param cmdParam cmdParam map * @return date */ private Date getScheduleTime(Command command, Map<String, String> cmdParam) { Date scheduleTime = command.getScheduleTime(); if (scheduleTime == null && cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)) { scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); } return scheduleTime; } /** * generate a new work process instance from command. * * @param processDefinition processDefinition * @param command command * @param cmdParam cmdParam map * @return process instance */ private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition, Command command, Map<String, String> cmdParam) { ProcessInstance processInstance = new ProcessInstance(processDefinition); processInstance.setState(ExecutionStatus.RUNNING_EXECUTION); processInstance.setRecovery(Flag.NO); processInstance.setStartTime(new Date()); processInstance.setRunTimes(1); processInstance.setMaxTryTimes(0); processInstance.setProcessDefinitionId(command.getProcessDefinitionId()); processInstance.setCommandParam(command.getCommandParam()); processInstance.setCommandType(command.getCommandType()); processInstance.setIsSubProcess(Flag.NO); processInstance.setTaskDependType(command.getTaskDependType()); processInstance.setFailureStrategy(command.getFailureStrategy()); processInstance.setExecutorId(command.getExecutorId()); WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType(); processInstance.setWarningType(warningType); Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId(); processInstance.setWarningGroupId(warningGroupId); // schedule time Date scheduleTime = getScheduleTime(command, cmdParam); if (scheduleTime != null) { processInstance.setScheduleTime(scheduleTime); } processInstance.setCommandStartTime(command.getStartTime()); processInstance.setLocations(processDefinition.getLocations()); processInstance.setConnects(processDefinition.getConnects()); // reset global params while there are start parameters setGlobalParamIfCommanded(processDefinition, cmdParam); // curing global params processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processDefinition.getGlobalParamMap(), processDefinition.getGlobalParamList(), getCommandTypeIfComplement(processInstance, command), processInstance.getScheduleTime())); //copy process define json to process instance processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); // set process instance priority processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup(); processInstance.setWorkerGroup(workerGroup); processInstance.setTimeout(processDefinition.getTimeout()); processInstance.setTenantId(processDefinition.getTenantId()); return processInstance; } private void setGlobalParamIfCommanded(ProcessDefinition processDefinition, Map<String, String> cmdParam) { // get start params from command param Map<String, String> startParamMap = new HashMap<>(); if (cmdParam != null && cmdParam.containsKey(Constants.CMD_PARAM_START_PARAMS)) { String startParamJson = cmdParam.get(Constants.CMD_PARAM_START_PARAMS); startParamMap = JSONUtils.toMap(startParamJson); } Map<String, String> fatherParamMap = new HashMap<>(); if (cmdParam != null && cmdParam.containsKey(Constants.CMD_PARAM_FATHER_PARAMS)) { String fatherParamJson = cmdParam.get(Constants.CMD_PARAM_FATHER_PARAMS); fatherParamMap = JSONUtils.toMap(fatherParamJson); } startParamMap.putAll(fatherParamMap); // set start param into global params if (startParamMap.size() > 0 && processDefinition.getGlobalParamMap() != null) { for (Map.Entry<String, String> param : processDefinition.getGlobalParamMap().entrySet()) { String val = startParamMap.get(param.getKey()); if (val != null) { param.setValue(val); } } } } /** * get process tenant * there is tenant id in definition, use the tenant of the definition. * if there is not tenant id in the definiton or the tenant not exist * use definition creator's tenant. * * @param tenantId tenantId * @param userId userId * @return tenant */ public Tenant getTenantForProcess(int tenantId, int userId) { Tenant tenant = null; if (tenantId >= 0) { tenant = tenantMapper.queryById(tenantId); } if (userId == 0) { return null; } if (tenant == null) { User user = userMapper.selectById(userId); tenant = tenantMapper.queryById(user.getTenantId()); } return tenant; } /** * check command parameters is valid * * @param command command * @param cmdParam cmdParam map * @return whether command param is valid */ private Boolean checkCmdParam(Command command, Map<String, String> cmdParam) { if (command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType() == TaskDependType.TASK_PRE) { if (cmdParam == null || !cmdParam.containsKey(Constants.CMD_PARAM_START_NODE_NAMES) || cmdParam.get(Constants.CMD_PARAM_START_NODE_NAMES).isEmpty()) { logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); return false; } } return true; } /** * construct process instance according to one command. * * @param command command * @param host host * @return process instance */ private ProcessInstance constructProcessInstance(Command command, String host) { ProcessInstance processInstance = null; CommandType commandType = command.getCommandType(); Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam()); ProcessDefinition processDefinition = null; if (command.getProcessDefinitionId() != 0) { processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId()); if (processDefinition == null) { logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId()); return null; } } if (cmdParam != null) { Integer processInstanceId = 0; // recover from failure or pause tasks if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING)) { String processId = cmdParam.get(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING); processInstanceId = Integer.parseInt(processId); if (processInstanceId == 0) { logger.error("command parameter is error, [ ProcessInstanceId ] is 0"); return null; } } else if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { // sub process map String pId = cmdParam.get(Constants.CMD_PARAM_SUB_PROCESS); processInstanceId = Integer.parseInt(pId); } else if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD)) { // waiting thread command String pId = cmdParam.get(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD); processInstanceId = Integer.parseInt(pId); } if (processInstanceId == 0) { processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); } else { processInstance = this.findProcessInstanceDetailById(processInstanceId); CommandType commandTypeIfComplement = getCommandTypeIfComplement(processInstance, command); // reset global params while repeat running is needed by cmdParam if (commandTypeIfComplement == CommandType.REPEAT_RUNNING) { setGlobalParamIfCommanded(processDefinition, cmdParam); } } processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId()); processInstance.setProcessDefinition(processDefinition); //reset command parameter if (processInstance.getCommandParam() != null) { Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam()); for (Map.Entry<String, String> entry : processCmdParam.entrySet()) { if (!cmdParam.containsKey(entry.getKey())) { cmdParam.put(entry.getKey(), entry.getValue()); } } } // reset command parameter if sub process if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { processInstance.setCommandParam(command.getCommandParam()); } } else { // generate one new process instance processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); } if (Boolean.FALSE.equals(checkCmdParam(command, cmdParam))) { logger.error("command parameter check failed!"); return null; } if (command.getScheduleTime() != null) { processInstance.setScheduleTime(command.getScheduleTime()); } processInstance.setHost(host); ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXECUTION; int runTime = processInstance.getRunTimes(); switch (commandType) { case START_PROCESS: break; case START_FAILURE_TASK_PROCESS: // find failed tasks and init these tasks List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE); List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE); List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); failedList.addAll(killedList); failedList.addAll(toleranceList); for (Integer taskId : failedList) { initTaskInstance(this.findTaskInstanceById(taskId)); } cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(Constants.COMMA, convertIntListToString(failedList))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; case START_CURRENT_TASK_PROCESS: break; case RECOVER_WAITTING_THREAD: break; case RECOVER_SUSPENDED_PROCESS: // find pause tasks and init task's state cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE); List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); suspendedNodeList.addAll(stopNodeList); for (Integer taskId : suspendedNodeList) { // initialize the pause state initTaskInstance(this.findTaskInstanceById(taskId)); } cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; case RECOVER_TOLERANCE_FAULT_PROCESS: // recover tolerance fault process processInstance.setRecovery(Flag.YES); runStatus = processInstance.getState(); break; case COMPLEMENT_DATA: // delete all the valid tasks when complement data List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId()); for (TaskInstance taskInstance : taskInstanceList) { taskInstance.setFlag(Flag.NO); this.updateTaskInstance(taskInstance); } initComplementDataParam(processDefinition, processInstance, cmdParam); break; case REPEAT_RUNNING: // delete the recover task names from command parameter if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) { cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); } // delete all the valid tasks when repeat running List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId()); for (TaskInstance taskInstance : validTaskList) { taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); } processInstance.setStartTime(new Date()); processInstance.setEndTime(null); processInstance.setRunTimes(runTime + 1); initComplementDataParam(processDefinition, processInstance, cmdParam); break; case SCHEDULER: break; default: break; } processInstance.setState(runStatus); return processInstance; } /** * return complement data if the process start with complement data * * @param processInstance processInstance * @param command command * @return command type */ private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command) { if (CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()) { return CommandType.COMPLEMENT_DATA; } else { return command.getCommandType(); } } /** * initialize complement data parameters * * @param processDefinition processDefinition * @param processInstance processInstance * @param cmdParam cmdParam */ private void initComplementDataParam(ProcessDefinition processDefinition, ProcessInstance processInstance, Map<String, String> cmdParam) { if (!processInstance.isComplementData()) { return; } Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE), YYYY_MM_DD_HH_MM_SS); if (Flag.NO == processInstance.getIsSubProcess()) { processInstance.setScheduleTime(startComplementTime); } processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processDefinition.getGlobalParamMap(), processDefinition.getGlobalParamList(), CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); } /** * set sub work process parameters. * handle sub work process instance, update relation table and command parameters * set sub work process flag, extends parent work process command parameters * * @param subProcessInstance subProcessInstance * @return process instance */ public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance) { String cmdParam = subProcessInstance.getCommandParam(); if (StringUtils.isEmpty(cmdParam)) { return subProcessInstance; } Map<String, String> paramMap = JSONUtils.toMap(cmdParam); // write sub process id into cmd param. if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS) && CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) { paramMap.remove(CMD_PARAM_SUB_PROCESS); paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); subProcessInstance.setCommandParam(JSONUtils.toJsonString(paramMap)); subProcessInstance.setIsSubProcess(Flag.YES); this.saveProcessInstance(subProcessInstance); } // copy parent instance user def params to sub process.. String parentInstanceId = paramMap.get(CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID); if (StringUtils.isNotEmpty(parentInstanceId)) { ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); if (parentInstance != null) { subProcessInstance.setGlobalParams( joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); this.saveProcessInstance(subProcessInstance); } else { logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); } } ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class); if (processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0) { return subProcessInstance; } // update sub process id to process map table processInstanceMap.setProcessInstanceId(subProcessInstance.getId()); this.updateWorkProcessInstanceMap(processInstanceMap); return subProcessInstance; } /** * join parent global params into sub process. * only the keys doesn't in sub process global would be joined. * * @param parentGlobalParams parentGlobalParams * @param subGlobalParams subGlobalParams * @return global params join */ private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) { List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class); List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class); Map<String, String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); for (Property parent : parentPropertyList) { if (!subMap.containsKey(parent.getProp())) { subPropertyList.add(parent); } } return JSONUtils.toJsonString(subPropertyList); } /** * initialize task instance * * @param taskInstance taskInstance */ private void initTaskInstance(TaskInstance taskInstance) { if (!taskInstance.isSubProcess() && (taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure())) { taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); return; } taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); updateTaskInstance(taskInstance); } /** * submit task to db * submit sub process to command * * @param taskInstance taskInstance * @return task instance */ @Transactional(rollbackFor = Exception.class) public TaskInstance submitTask(TaskInstance taskInstance) { ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); logger.info("start submit task : {}, instance id:{}, state: {}", taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); //submit to db TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance); if (task == null) { logger.error("end submit task to db error, task name:{}, process id:{} state: {} ", taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState()); return task; } if (!task.getState().typeIsFinished()) { createSubWorkProcess(processInstance, task); } logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ", taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState()); return task; } /** * set work process instance map * consider o * repeat running does not generate new sub process instance * set map {parent instance id, task instance id, 0(child instance id)} * * @param parentInstance parentInstance * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask) { ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId()); if (processMap != null) { return processMap; } if (parentInstance.getCommandType() == CommandType.REPEAT_RUNNING) { // update current task id to map processMap = findPreviousTaskProcessMap(parentInstance, parentTask); if (processMap != null) { processMap.setParentTaskInstanceId(parentTask.getId()); updateWorkProcessInstanceMap(processMap); return processMap; } } // new task processMap = new ProcessInstanceMap(); processMap.setParentProcessInstanceId(parentInstance.getId()); processMap.setParentTaskInstanceId(parentTask.getId()); createWorkProcessInstanceMap(processMap); return processMap; } /** * find previous task work process map. * * @param parentProcessInstance parentProcessInstance * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance, TaskInstance parentTask) { Integer preTaskId = 0; List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId()); for (TaskInstance task : preTaskList) { if (task.getName().equals(parentTask.getName())) { preTaskId = task.getId(); ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId); if (map != null) { return map; } } } logger.info("sub process instance is not found,parent task:{},parent instance:{}", parentTask.getId(), parentProcessInstance.getId()); return null; } /** * create sub work process command * * @param parentProcessInstance parentProcessInstance * @param task task */ public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) { if (!task.isSubProcess()) { return; } //check create sub work flow firstly ProcessInstanceMap instanceMap = findWorkProcessMapByParent(parentProcessInstance.getId(), task.getId()); if (null != instanceMap && CommandType.RECOVER_TOLERANCE_FAULT_PROCESS == parentProcessInstance.getCommandType()) { // recover failover tolerance would not create a new command when the sub command already have been created return; } instanceMap = setProcessInstanceMap(parentProcessInstance, task); ProcessInstance childInstance = null; if (instanceMap.getProcessInstanceId() != 0) { childInstance = findProcessInstanceById(instanceMap.getProcessInstanceId()); } Command subProcessCommand = createSubProcessCommand(parentProcessInstance, childInstance, instanceMap, task); updateSubProcessDefinitionByParent(parentProcessInstance, subProcessCommand.getProcessDefinitionId()); initSubInstanceState(childInstance); createCommand(subProcessCommand); logger.info("sub process command created: {} ", subProcessCommand); } /** * complement data needs transform parent parameter to child. */ private String getSubWorkFlowParam(ProcessInstanceMap instanceMap, ProcessInstance parentProcessInstance, Map<String, String> fatherParams) { // set sub work process command String processMapStr = JSONUtils.toJsonString(instanceMap); Map<String, String> cmdParam = JSONUtils.toMap(processMapStr); if (parentProcessInstance.isComplementData()) { Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam()); String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE); String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime); processMapStr = JSONUtils.toJsonString(cmdParam); } if (fatherParams.size() != 0) { cmdParam.put(CMD_PARAM_FATHER_PARAMS, JSONUtils.toJsonString(fatherParams)); processMapStr = JSONUtils.toJsonString(cmdParam); } return processMapStr; } public Map<String, String> getGlobalParamMap(String globalParams) { List<Property> propList; Map<String, String> globalParamMap = new HashMap<>(); if (StringUtils.isNotEmpty(globalParams)) { propList = JSONUtils.toList(globalParams, Property.class); globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); } return globalParamMap; } /** * create sub work process command */ public Command createSubProcessCommand(ProcessInstance parentProcessInstance, ProcessInstance childInstance, ProcessInstanceMap instanceMap, TaskInstance task) { CommandType commandType = getSubCommandType(parentProcessInstance, childInstance); TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class); Map<String, Object> subProcessParam = JSONUtils.toMap(taskNode.getParams(), String.class, Object.class); Integer childDefineId = Integer.parseInt(String.valueOf(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID))); Object localParams = subProcessParam.get(Constants.LOCAL_PARAMS); List<Property> allParam = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); Map<String, String> globalMap = this.getGlobalParamMap(parentProcessInstance.getGlobalParams()); Map<String, String> fatherParams = new HashMap<>(); if (CollectionUtils.isNotEmpty(allParam)) { for (Property info : allParam) { fatherParams.put(info.getProp(), globalMap.get(info.getProp())); } } String processParam = getSubWorkFlowParam(instanceMap, parentProcessInstance, fatherParams); return new Command( commandType, TaskDependType.TASK_POST, parentProcessInstance.getFailureStrategy(), parentProcessInstance.getExecutorId(), childDefineId, processParam, parentProcessInstance.getWarningType(), parentProcessInstance.getWarningGroupId(), parentProcessInstance.getScheduleTime(), task.getWorkerGroup(), parentProcessInstance.getProcessInstancePriority() ); } /** * initialize sub work flow state * child instance state would be initialized when 'recovery from pause/stop/failure' */ private void initSubInstanceState(ProcessInstance childInstance) { if (childInstance != null) { childInstance.setState(ExecutionStatus.RUNNING_EXECUTION); updateProcessInstance(childInstance); } } /** * get sub work flow command type * child instance exist: child command = fatherCommand * child instance not exists: child command = fatherCommand[0] */ private CommandType getSubCommandType(ProcessInstance parentProcessInstance, ProcessInstance childInstance) { CommandType commandType = parentProcessInstance.getCommandType(); if (childInstance == null) { String fatherHistoryCommand = parentProcessInstance.getHistoryCmd(); commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]); } return commandType; } /** * update sub process definition * * @param parentProcessInstance parentProcessInstance * @param childDefinitionId childDefinitionId */ private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) { ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId()); ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId); if (childDefinition != null && fatherDefinition != null) { childDefinition.setWarningGroupId(fatherDefinition.getWarningGroupId()); processDefineMapper.updateById(childDefinition); } } /** * submit task to mysql * * @param taskInstance taskInstance * @param processInstance processInstance * @return task instance */ public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) { ExecutionStatus processInstanceState = processInstance.getState(); if (taskInstance.getState().typeIsFailure()) { if (taskInstance.isSubProcess()) { taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1); } else { if (processInstanceState != ExecutionStatus.READY_STOP && processInstanceState != ExecutionStatus.READY_PAUSE) { // failure task set invalid taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); // crate new task instance if (taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE) { taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1); } taskInstance.setSubmitTime(null); taskInstance.setStartTime(null); taskInstance.setEndTime(null); taskInstance.setFlag(Flag.YES); taskInstance.setHost(null); taskInstance.setId(0); } } } taskInstance.setExecutorId(processInstance.getExecutorId()); taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority()); taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState)); if (taskInstance.getSubmitTime() == null) { taskInstance.setSubmitTime(new Date()); } if (taskInstance.getFirstSubmitTime() == null) { taskInstance.setFirstSubmitTime(taskInstance.getSubmitTime()); } boolean saveResult = saveTaskInstance(taskInstance); if (!saveResult) { return null; } return taskInstance; } /** * get submit task instance state by the work process state * cannot modify the task state when running/kill/submit success, or this * task instance is already exists in task queue . * return pause if work process state is ready pause * return stop if work process state is ready stop * if all of above are not satisfied, return submit success * * @param taskInstance taskInstance * @param processInstanceState processInstanceState * @return process instance state */ public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState) { ExecutionStatus state = taskInstance.getState(); // running, delayed or killed // the task already exists in task queue // return state if ( state == ExecutionStatus.RUNNING_EXECUTION || state == ExecutionStatus.DELAY_EXECUTION || state == ExecutionStatus.KILL ) { return state; } //return pasue /stop if process instance state is ready pause / stop // or return submit success if (processInstanceState == ExecutionStatus.READY_PAUSE) { state = ExecutionStatus.PAUSE; } else if (processInstanceState == ExecutionStatus.READY_STOP || !checkProcessStrategy(taskInstance)) { state = ExecutionStatus.KILL; } else { state = ExecutionStatus.SUBMITTED_SUCCESS; } return state; } /** * check process instance strategy * * @param taskInstance taskInstance * @return check strategy result */ private boolean checkProcessStrategy(TaskInstance taskInstance) { ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); FailureStrategy failureStrategy = processInstance.getFailureStrategy(); if (failureStrategy == FailureStrategy.CONTINUE) { return true; } List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId()); for (TaskInstance task : taskInstances) { if (task.getState() == ExecutionStatus.FAILURE) { return false; } } return true; } /** * create a new process instance * * @param processInstance processInstance */ public void createProcessInstance(ProcessInstance processInstance) { if (processInstance != null) { processInstanceMapper.insert(processInstance); } } /** * insert or update work process instance to data base * * @param processInstance processInstance */ public void saveProcessInstance(ProcessInstance processInstance) { if (processInstance == null) { logger.error("save error, process instance is null!"); return; } if (processInstance.getId() != 0) { processInstanceMapper.updateById(processInstance); } else { createProcessInstance(processInstance); } } /** * insert or update command * * @param command command * @return save command result */ public int saveCommand(Command command) { if (command.getId() != 0) { return commandMapper.updateById(command); } else { return commandMapper.insert(command); } } /** * insert or update task instance * * @param taskInstance taskInstance * @return save task instance result */ public boolean saveTaskInstance(TaskInstance taskInstance) { if (taskInstance.getId() != 0) { return updateTaskInstance(taskInstance); } else { return createTaskInstance(taskInstance); } } /** * insert task instance * * @param taskInstance taskInstance * @return create task instance result */ public boolean createTaskInstance(TaskInstance taskInstance) { int count = taskInstanceMapper.insert(taskInstance); return count > 0; } /** * update task instance * * @param taskInstance taskInstance * @return update task instance result */ public boolean updateTaskInstance(TaskInstance taskInstance) { int count = taskInstanceMapper.updateById(taskInstance); return count > 0; } /** * delete a command by id * * @param id id */ public void delCommandById(int id) { commandMapper.deleteById(id); } /** * find task instance by id * * @param taskId task id * @return task intance */ public TaskInstance findTaskInstanceById(Integer taskId) { return taskInstanceMapper.selectById(taskId); } /** * package task instance,associate processInstance and processDefine * * @param taskInstId taskInstId * @return task instance */ public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId) { // get task instance TaskInstance taskInstance = findTaskInstanceById(taskInstId); if (taskInstance == null) { return taskInstance; } // get process instance ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); // get process define ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId()); taskInstance.setProcessInstance(processInstance); taskInstance.setProcessDefine(processDefine); return taskInstance; } /** * get id list by task state * * @param instanceId instanceId * @param state state * @return task instance states */ public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state) { return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal()); } /** * find valid task list by process definition id * * @param processInstanceId processInstanceId * @return task instance list */ public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId) { return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES); } /** * find previous task list by work process id * * @param processInstanceId processInstanceId * @return task instance list */ public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId) { return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO); } /** * update work process instance map * * @param processInstanceMap processInstanceMap * @return update process instance result */ public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) { return processInstanceMapMapper.updateById(processInstanceMap); } /** * create work process instance map * * @param processInstanceMap processInstanceMap * @return create process instance result */ public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) { int count = 0; if (processInstanceMap != null) { return processInstanceMapMapper.insert(processInstanceMap); } return count; } /** * find work process map by parent process id and parent task id. * * @param parentWorkProcessId parentWorkProcessId * @param parentTaskId parentTaskId * @return process instance map */ public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId) { return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId); } /** * delete work process map by parent process id * * @param parentWorkProcessId parentWorkProcessId * @return delete process map result */ public int deleteWorkProcessMapByParentId(int parentWorkProcessId) { return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId); } /** * find sub process instance * * @param parentProcessId parentProcessId * @param parentTaskId parentTaskId * @return process instance */ public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId) { ProcessInstance processInstance = null; ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId); if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) { return processInstance; } processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId()); return processInstance; } /** * find parent process instance * * @param subProcessId subProcessId * @return process instance */ public ProcessInstance findParentProcessInstance(Integer subProcessId) { ProcessInstance processInstance = null; ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId); if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) { return processInstance; } processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId()); return processInstance; } /** * change task state * * @param state state * @param startTime startTime * @param host host * @param executePath executePath * @param logPath logPath * @param taskInstId taskInstId */ public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host, String executePath, String logPath, int taskInstId) { taskInstance.setState(state); taskInstance.setStartTime(startTime); taskInstance.setHost(host); taskInstance.setExecutePath(executePath); taskInstance.setLogPath(logPath); saveTaskInstance(taskInstance); } /** * update process instance * * @param processInstance processInstance * @return update process instance result */ public int updateProcessInstance(ProcessInstance processInstance) { return processInstanceMapper.updateById(processInstance); } /** * update the process instance * * @param processInstanceId processInstanceId * @param processJson processJson * @param globalParams globalParams * @param scheduleTime scheduleTime * @param flag flag * @param locations locations * @param connects connects * @return update process instance result */ public int updateProcessInstance(Integer processInstanceId, String processJson, String globalParams, Date scheduleTime, Flag flag, String locations, String connects) { ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); if (processInstance != null) { processInstance.setProcessInstanceJson(processJson); processInstance.setGlobalParams(globalParams); processInstance.setScheduleTime(scheduleTime); processInstance.setLocations(locations); processInstance.setConnects(connects); return processInstanceMapper.updateById(processInstance); } return 0; } /** * change task state * * @param state state * @param endTime endTime * @param taskInstId taskInstId * @param varPool varPool */ public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date endTime, int processId, String appIds, int taskInstId, String varPool, String result) { taskInstance.setPid(processId); taskInstance.setAppLink(appIds); taskInstance.setState(state); taskInstance.setEndTime(endTime); taskInstance.setVarPool(varPool); changeOutParam(result, taskInstance); saveTaskInstance(taskInstance); } public void changeOutParam(String result, TaskInstance taskInstance) { if (StringUtils.isEmpty(result)) { return; } List<Map<String, String>> workerResultParam = getListMapByString(result); if (CollectionUtils.isEmpty(workerResultParam)) { return; } //if the result more than one line,just get the first . Map<String, String> row = workerResultParam.get(0); if (row == null || row.size() == 0) { return; } TaskNode taskNode = JSONUtils.parseObject(taskInstance.getTaskJson(), TaskNode.class); Map<String, Object> taskParams = JSONUtils.toMap(taskNode.getParams(), String.class, Object.class); Object localParams = taskParams.get(LOCAL_PARAMS); if (localParams == null) { return; } ProcessInstance processInstance = this.processInstanceMapper.queryDetailById(taskInstance.getProcessInstanceId()); List<Property> params4Property = JSONUtils.toList(processInstance.getGlobalParams(), Property.class); Map<String, Property> allParamMap = params4Property.stream().collect(Collectors.toMap(Property::getProp, Property -> Property)); List<Property> allParam = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); for (Property info : allParam) { if (info.getDirect() == Direct.OUT) { String paramName = info.getProp(); Property property = allParamMap.get(paramName); if (property == null) { continue; } String value = String.valueOf(row.get(paramName)); if (StringUtils.isNotEmpty(value)) { property.setValue(value); info.setValue(value); } } } taskParams.put(LOCAL_PARAMS, allParam); taskNode.setParams(JSONUtils.toJsonString(taskParams)); // task instance node json taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode)); String params4ProcessString = JSONUtils.toJsonString(params4Property); int updateCount = this.processInstanceMapper.updateGlobalParamsById(params4ProcessString, processInstance.getId()); logger.info("updateCount:{}, params4Process:{}, processInstanceId:{}", updateCount, params4ProcessString, processInstance.getId()); } public List<Map<String, String>> getListMapByString(String json) { List<Map<String, String>> allParams = new ArrayList<>(); ArrayNode paramsByJson = JSONUtils.parseArray(json); Iterator<JsonNode> listIterator = paramsByJson.iterator(); while (listIterator.hasNext()) { Map<String, String> param = JSONUtils.toMap(listIterator.next().toString(), String.class, String.class); allParams.add(param); } return allParams; } /** * convert integer list to string list * * @param intList intList * @return string list */ public List<String> convertIntListToString(List<Integer> intList) { if (intList == null) { return new ArrayList<>(); } List<String> result = new ArrayList<>(intList.size()); for (Integer intVar : intList) { result.add(String.valueOf(intVar)); } return result; } /** * query schedule by id * * @param id id * @return schedule */ public Schedule querySchedule(int id) { return scheduleMapper.selectById(id); } /** * query Schedule by processDefinitionId * * @param processDefinitionId processDefinitionId * @see Schedule */ public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) { return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); } /** * query need failover process instance * * @param host host * @return process instance list */ public List<ProcessInstance> queryNeedFailoverProcessInstances(String host) { return processInstanceMapper.queryByHostAndStatus(host, stateArray); } /** * process need failover process instance * * @param processInstance processInstance */ @Transactional(rollbackFor = RuntimeException.class) public void processNeedFailoverProcessInstances(ProcessInstance processInstance) { //1 update processInstance host is null processInstance.setHost(Constants.NULL); processInstanceMapper.updateById(processInstance); //2 insert into recover command Command cmd = new Command(); cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId()); cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); cmd.setExecutorId(processInstance.getExecutorId()); cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); createCommand(cmd); } /** * query all need failover task instances by host * * @param host host * @return task instance list */ public List<TaskInstance> queryNeedFailoverTaskInstances(String host) { return taskInstanceMapper.queryByHostAndStatus(host, stateArray); } /** * find data source by id * * @param id id * @return datasource */ public DataSource findDataSourceById(int id) { return dataSourceMapper.selectById(id); } /** * update process instance state by id * * @param processInstanceId processInstanceId * @param executionStatus executionStatus * @return update process result */ public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { ProcessInstance instance = processInstanceMapper.selectById(processInstanceId); instance.setState(executionStatus); return processInstanceMapper.updateById(instance); } /** * find process instance by the task id * * @param taskId taskId * @return process instance */ public ProcessInstance findProcessInstanceByTaskId(int taskId) { TaskInstance taskInstance = taskInstanceMapper.selectById(taskId); if (taskInstance != null) { return processInstanceMapper.selectById(taskInstance.getProcessInstanceId()); } return null; } /** * find udf function list by id list string * * @param ids ids * @return udf function list */ public List<UdfFunc> queryUdfFunListByIds(int[] ids) { return udfFuncMapper.queryUdfByIdStr(ids, null); } /** * find tenant code by resource name * * @param resName resource name * @param resourceType resource type * @return tenant code */ public String queryTenantCodeByResName(String resName, ResourceType resourceType) { // in order to query tenant code successful although the version is older String fullName = resName.startsWith("/") ? resName : String.format("/%s", resName); List<Resource> resourceList = resourceMapper.queryResource(fullName, resourceType.ordinal()); if (CollectionUtils.isEmpty(resourceList)) { return StringUtils.EMPTY; } int userId = resourceList.get(0).getUserId(); User user = userMapper.selectById(userId); if (Objects.isNull(user)) { return StringUtils.EMPTY; } Tenant tenant = tenantMapper.selectById(user.getTenantId()); if (Objects.isNull(tenant)) { return StringUtils.EMPTY; } return tenant.getTenantCode(); } /** * find schedule list by process define id. * * @param ids ids * @return schedule list */ public List<Schedule> selectAllByProcessDefineId(int[] ids) { return scheduleMapper.selectAllByProcessDefineArray( ids); } /** * get dependency cycle by work process define id and scheduler fire time * * @param masterId masterId * @param processDefinitionId processDefinitionId * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency * @throws Exception if error throws Exception */ public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception { List<CycleDependency> list = getCycleDependencies(masterId, new int[]{processDefinitionId}, scheduledFireTime); return !list.isEmpty() ? list.get(0) : null; } /** * get dependency cycle list by work process define id list and scheduler fire time * * @param masterId masterId * @param ids ids * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency list * @throws Exception if error throws Exception */ public List<CycleDependency> getCycleDependencies(int masterId, int[] ids, Date scheduledFireTime) throws Exception { List<CycleDependency> cycleDependencyList = new ArrayList<>(); if (null == ids || ids.length == 0) { logger.warn("ids[] is empty!is invalid!"); return cycleDependencyList; } if (scheduledFireTime == null) { logger.warn("scheduledFireTime is null!is invalid!"); return cycleDependencyList; } String strCrontab = ""; CronExpression depCronExpression; Cron depCron; List<Date> list; List<Schedule> schedules = this.selectAllByProcessDefineId(ids); // for all scheduling information for (Schedule depSchedule : schedules) { strCrontab = depSchedule.getCrontab(); depCronExpression = CronUtils.parse2CronExpression(strCrontab); depCron = CronUtils.parse2Cron(strCrontab); CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron); if (cycleEnum == null) { logger.error("{} is not valid", strCrontab); continue; } Calendar calendar = Calendar.getInstance(); switch (cycleEnum) { case HOUR: calendar.add(Calendar.HOUR, -25); break; case DAY: case WEEK: calendar.add(Calendar.DATE, -32); break; case MONTH: calendar.add(Calendar.MONTH, -13); break; default: String cycleName = cycleEnum.name(); logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleName); continue; } Date start = calendar.getTime(); if (depSchedule.getProcessDefinitionId() == masterId) { list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression); } else { list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression); } if (!list.isEmpty()) { start = list.get(list.size() - 1); CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(), start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum); cycleDependencyList.add(dependency); } } return cycleDependencyList; } /** * find last scheduler process instance in the date interval * * @param definitionId definitionId * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastSchedulerProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); } /** * find last manual process instance interval * * @param definitionId process definition id * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastManualProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); } /** * find last running process instance * * @param definitionId process definition id * @param startTime start time * @param endTime end time * @return process instance */ public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) { return processInstanceMapper.queryLastRunningProcess(definitionId, startTime, endTime, stateArray); } /** * query user queue by process instance id * * @param processInstanceId processInstanceId * @return queue */ public String queryUserQueueByProcessInstanceId(int processInstanceId) { String queue = ""; ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId); if (processInstance == null) { return queue; } User executor = userMapper.selectById(processInstance.getExecutorId()); if (executor != null) { queue = executor.getQueue(); } return queue; } /** * query project name and user name by processInstanceId. * * @param processInstanceId processInstanceId * @return projectName and userName */ public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) { return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId); } /** * get task worker group * * @param taskInstance taskInstance * @return workerGroupId */ public String getTaskWorkerGroup(TaskInstance taskInstance) { String workerGroup = taskInstance.getWorkerGroup(); if (StringUtils.isNotBlank(workerGroup)) { return workerGroup; } int processInstanceId = taskInstance.getProcessInstanceId(); ProcessInstance processInstance = findProcessInstanceById(processInstanceId); if (processInstance != null) { return processInstance.getWorkerGroup(); } logger.info("task : {} will use default worker group", taskInstance.getId()); return Constants.DEFAULT_WORKER_GROUP; } /** * get have perm project list * * @param userId userId * @return project list */ public List<Project> getProjectListHavePerm(int userId) { List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId); List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId); if (createProjects == null) { createProjects = new ArrayList<>(); } if (authedProjects != null) { createProjects.addAll(authedProjects); } return createProjects; } /** * get have perm project ids * * @param userId userId * @return project ids */ public List<Integer> getProjectIdListHavePerm(int userId) { List<Integer> projectIdList = new ArrayList<>(); for (Project project : getProjectListHavePerm(userId)) { projectIdList.add(project.getId()); } return projectIdList; } /** * list unauthorized udf function * * @param userId user id * @param needChecks data source id array * @return unauthorized udf function list */ public <T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) { List<T> resultList = new ArrayList<>(); if (Objects.nonNull(needChecks) && needChecks.length > 0) { Set<T> originResSet = new HashSet<>(Arrays.asList(needChecks)); switch (authorizationType) { case RESOURCE_FILE_ID: case UDF_FILE: List<Resource> ownUdfResources = resourceMapper.listAuthorizedResourceById(userId, needChecks); addAuthorizedResources(ownUdfResources, userId); Set<Integer> authorizedResourceFiles = ownUdfResources.stream().map(Resource::getId).collect(toSet()); originResSet.removeAll(authorizedResourceFiles); break; case RESOURCE_FILE_NAME: List<Resource> ownResources = resourceMapper.listAuthorizedResource(userId, needChecks); addAuthorizedResources(ownResources, userId); Set<String> authorizedResources = ownResources.stream().map(Resource::getFullName).collect(toSet()); originResSet.removeAll(authorizedResources); break; case DATASOURCE: Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId, needChecks).stream().map(DataSource::getId).collect(toSet()); originResSet.removeAll(authorizedDatasources); break; case UDF: Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(UdfFunc::getId).collect(toSet()); originResSet.removeAll(authorizedUdfs); break; default: break; } resultList.addAll(originResSet); } return resultList; } /** * get user by user id * * @param userId user id * @return User */ public User getUserById(int userId) { return userMapper.selectById(userId); } /** * get resource by resoruce id * * @param resoruceId resource id * @return Resource */ public Resource getResourceById(int resoruceId) { return resourceMapper.selectById(resoruceId); } /** * list resources by ids * * @param resIds resIds * @return resource list */ public List<Resource> listResourceByIds(Integer[] resIds) { return resourceMapper.listResourceByIds(resIds); } /** * format task app id in task instance */ public String formatTaskAppId(TaskInstance taskInstance) { ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId()); ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); if (definition == null || processInstanceById == null) { return ""; } return String.format("%s_%s_%s", definition.getId(), processInstanceById.getId(), taskInstance.getId()); } /** * solve the branch rename bug * * @return String */ public String changeJson(ProcessData processData, String oldJson) { ProcessData oldProcessData = JSONUtils.parseObject(oldJson, ProcessData.class); HashMap<String, String> oldNameTaskId = new HashMap<>(); List<TaskNode> oldTasks = oldProcessData.getTasks(); for (int i = 0; i < oldTasks.size(); i++) { TaskNode taskNode = oldTasks.get(i); String oldName = taskNode.getName(); String oldId = taskNode.getId(); oldNameTaskId.put(oldName, oldId); } // take the processdefinitionjson saved this time, and then save the taskid and name HashMap<String, String> newNameTaskId = new HashMap<>(); List<TaskNode> newTasks = processData.getTasks(); for (int i = 0; i < newTasks.size(); i++) { TaskNode taskNode = newTasks.get(i); String newId = taskNode.getId(); String newName = taskNode.getName(); newNameTaskId.put(newId, newName); } // replace the previous conditionresult with a new one List<TaskNode> tasks = processData.getTasks(); for (int i = 0; i < tasks.size(); i++) { TaskNode taskNode = newTasks.get(i); String type = taskNode.getType(); if (TaskType.CONDITIONS.getDescp().equalsIgnoreCase(type)) { ConditionsParameters conditionsParameters = JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class); String oldSuccessNodeName = conditionsParameters.getSuccessNode().get(0); String oldFailedNodeName = conditionsParameters.getFailedNode().get(0); String newSuccessNodeName = newNameTaskId.get(oldNameTaskId.get(oldSuccessNodeName)); String newFailedNodeName = newNameTaskId.get(oldNameTaskId.get(oldFailedNodeName)); if (newSuccessNodeName != null) { ArrayList<String> successNode = new ArrayList<>(); successNode.add(newSuccessNodeName); conditionsParameters.setSuccessNode(successNode); } if (newFailedNodeName != null) { ArrayList<String> failedNode = new ArrayList<>(); failedNode.add(newFailedNodeName); conditionsParameters.setFailedNode(failedNode); } String conditionResultStr = conditionsParameters.getConditionResult(); taskNode.setConditionResult(conditionResultStr); tasks.set(i, taskNode); } } return JSONUtils.toJsonString(processData); } /** * add authorized resources * * @param ownResources own resources * @param userId userId */ private void addAuthorizedResources(List<Resource> ownResources, int userId) { List<Integer> relationResourceIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, 7); List<Resource> relationResources = CollectionUtils.isNotEmpty(relationResourceIds) ? resourceMapper.queryResourceListById(relationResourceIds) : new ArrayList<>(); ownResources.addAll(relationResources); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,322
[Bug][worker] Insert into clickhouse table successfully but process status turn to failed and log exception
**For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append the description in Chinese(just for Mandarin(CN)), thx! ** **Describe the bug** DATASORUCE: clickhouse My process only has one SQL like: `INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt` The process failed but the data has been successfully inserted into the table. **To Reproduce** Steps to reproduce the behavior, for example: 1. Create a new process with only one SQL `INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt` 2. Save and run it. 3. See error below [INFO] 2021-04-19 17:49:57.364 - [taskAppId=TASK-2-9-10]:[115] - create dir success /tmp/dolphinscheduler/exec/process/1/2/9/10 [INFO] 2021-04-19 17:49:57.367 - [taskAppId=TASK-2-9-10]:[88] - sql task params {"postStatements":[],"connParams":"","receiversCc":"","udfs":"","type":"CLICKHOUSE","title":"test","sql":"INSERT INTO test\nSELECT dt, count(1) as c\n FROM dau\n GROUP BY dt\n ORDER BY dt\n","preStatements":[],"sqlType":"0","receivers":"xxx@xxx.com","datasource":1,"showType":"TABLE","localParams":[]} [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[104] - Full sql parameters: SqlParameters{type='CLICKHOUSE', datasource=1, sql='INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt ', sqlType=0, udfs='', showType='TABLE', connParams='', title='test', receivers='xxx@xxx.com', receiversCc='', preStatements=[], postStatements=[]} [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[105] - sql type : CLICKHOUSE, datasource : 1, sql : INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt , localParams : [],udfs : ,showType : TABLE,connParams : [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[176] - SQL title : test [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[498] - after replace sql , preparing : INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[503] - Sql Params are replaced sql , parameters: [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[52] - can't find udf function resource [INFO] 2021-04-19 17:49:57.381 - [taskAppId=TASK-2-9-10]:[414] - prepare statement replace sql : ru.yandex.clickhouse.ClickHousePreparedStatementImpl@65fc3a50 [ERROR] 2021-04-19 17:49:57.388 - [taskAppId=TASK-2-9-10]:[243] - execute sql error java.lang.NullPointerException: null at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.resultProcess(SqlTask.java:258) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.executeFuncAndSql(SqlTask.java:233) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.handle(SqlTask.java:139) at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:133) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) [ERROR] 2021-04-19 17:49:57.388 - [taskAppId=TASK-2-9-10]:[145] - sql task error java.lang.RuntimeException: execute sql error at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.executeFuncAndSql(SqlTask.java:244) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.handle(SqlTask.java:139) at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:133) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) **Expected behavior** A clear and concise description of what you expected to happen. Data inserted into the table and process status should be successful. **Screenshots** If applicable, add screenshots to help explain your problem. ![1618827014390](https://user-images.githubusercontent.com/4523603/115220161-d04fd500-a13a-11eb-9a5e-471897e4180a.jpg) ![1618827054023](https://user-images.githubusercontent.com/4523603/115220193-d776e300-a13a-11eb-95ce-61b719789c2e.jpg) ![1618827080868](https://user-images.githubusercontent.com/4523603/115220240-e2ca0e80-a13a-11eb-9b79-547627751944.jpg) **Which version of Dolphin Scheduler:** -1.3.5 **Additional context** Add any other context about the problem here. No matter deployed by docker or k8s, both met this problem. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5322
https://github.com/apache/dolphinscheduler/pull/5324
e57ee658e842ab8c5cd111f35cd6c71ed7065ecd
abdd2337b144df149a2031c3b26862ae41b4e936
"2021-04-19T10:13:44Z"
java
"2021-04-22T04:02:32Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.sql; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.sql.SqlBinds; import org.apache.dolphinscheduler.common.task.sql.SqlParameters; import org.apache.dolphinscheduler.common.task.sql.SqlType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand; import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.UDFUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.slf4j.Logger; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * sql task */ public class SqlTask extends AbstractTask { /** * sql parameters */ private SqlParameters sqlParameters; /** * alert dao */ private AlertDao alertDao; /** * base datasource */ private BaseDataSource baseDataSource; /** * taskExecutionContext */ private TaskExecutionContext taskExecutionContext; /** * default query sql limit */ private static final int LIMIT = 10000; private AlertClientService alertClientService; public SqlTask(TaskExecutionContext taskExecutionContext, Logger logger, AlertClientService alertClientService) { super(taskExecutionContext, logger); this.taskExecutionContext = taskExecutionContext; logger.info("sql task params {}", taskExecutionContext.getTaskParams()); this.sqlParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SqlParameters.class); if (!sqlParameters.checkParameters()) { throw new RuntimeException("sql task params is not valid"); } this.alertClientService = alertClientService; this.alertDao = SpringApplicationContext.getBean(AlertDao.class); } @Override public void handle() throws Exception { // set the name of the current thread String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); Thread.currentThread().setName(threadLoggerInfoName); logger.info("Full sql parameters: {}", sqlParameters); logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {}", sqlParameters.getType(), sqlParameters.getDatasource(), sqlParameters.getSql(), sqlParameters.getLocalParams(), sqlParameters.getUdfs(), sqlParameters.getShowType(), sqlParameters.getConnParams()); try { SQLTaskExecutionContext sqlTaskExecutionContext = taskExecutionContext.getSqlTaskExecutionContext(); // get datasource baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf(sqlParameters.getType()), sqlTaskExecutionContext.getConnectionParams()); // ready to execute SQL and parameter entity Map SqlBinds mainSqlBinds = getSqlAndSqlParamsMap(sqlParameters.getSql()); List<SqlBinds> preStatementSqlBinds = Optional.ofNullable(sqlParameters.getPreStatements()) .orElse(new ArrayList<>()) .stream() .map(this::getSqlAndSqlParamsMap) .collect(Collectors.toList()); List<SqlBinds> postStatementSqlBinds = Optional.ofNullable(sqlParameters.getPostStatements()) .orElse(new ArrayList<>()) .stream() .map(this::getSqlAndSqlParamsMap) .collect(Collectors.toList()); List<String> createFuncs = UDFUtils.createFuncs(sqlTaskExecutionContext.getUdfFuncTenantCodeMap(), logger); // execute sql task executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs); setExitStatusCode(Constants.EXIT_CODE_SUCCESS); } catch (Exception e) { setExitStatusCode(Constants.EXIT_CODE_FAILURE); logger.error("sql task error: {}", e.toString()); throw e; } } /** * ready to execute SQL and parameter entity Map * * @return SqlBinds */ private SqlBinds getSqlAndSqlParamsMap(String sql) { Map<Integer, Property> sqlParamsMap = new HashMap<>(); StringBuilder sqlBuilder = new StringBuilder(); // find process instance by task id Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), taskExecutionContext.getDefinedParams(), sqlParameters.getLocalParametersMap(), CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), taskExecutionContext.getScheduleTime()); // spell SQL according to the final user-defined variable if (paramsMap == null) { sqlBuilder.append(sql); return new SqlBinds(sqlBuilder.toString(), sqlParamsMap); } if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { String title = ParameterUtils.convertParameterPlaceholders(sqlParameters.getTitle(), ParamUtils.convert(paramsMap)); logger.info("SQL title : {}", title); sqlParameters.setTitle(title); } //new //replace variable TIME with $[YYYYmmddd...] in sql when history run job and batch complement job sql = ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime()); // special characters need to be escaped, ${} needs to be escaped String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap); //Replace the original value in sql !{...} ,Does not participate in precompilation String rgexo = "['\"]*\\!\\{(.*?)\\}['\"]*"; sql = replaceOriginalValue(sql, rgexo, paramsMap); // replace the ${} of the SQL statement with the Placeholder String formatSql = sql.replaceAll(rgex, "?"); sqlBuilder.append(formatSql); // print repalce sql printReplacedSql(sql, formatSql, rgex, sqlParamsMap); return new SqlBinds(sqlBuilder.toString(), sqlParamsMap); } public String replaceOriginalValue(String content, String rgex, Map<String, Property> sqlParamsMap) { Pattern pattern = Pattern.compile(rgex); while (true) { Matcher m = pattern.matcher(content); if (!m.find()) { break; } String paramName = m.group(1); String paramValue = sqlParamsMap.get(paramName).getValue(); content = m.replaceFirst(paramValue); } return content; } @Override public AbstractParameters getParameters() { return this.sqlParameters; } /** * execute function and sql * * @param mainSqlBinds main sql binds * @param preStatementsBinds pre statements binds * @param postStatementsBinds post statements binds * @param createFuncs create functions */ public void executeFuncAndSql(SqlBinds mainSqlBinds, List<SqlBinds> preStatementsBinds, List<SqlBinds> postStatementsBinds, List<String> createFuncs) throws Exception { Connection connection = null; PreparedStatement stmt = null; ResultSet resultSet = null; try { baseDataSource.setConnParams(sqlParameters.getConnParams()); // create connection connection = baseDataSource.getConnection(); // create temp function if (CollectionUtils.isNotEmpty(createFuncs)) { createTempFunction(connection, createFuncs); } // pre sql preSql(connection, preStatementsBinds); stmt = prepareStatementAndBind(connection, mainSqlBinds); String result = null; // decide whether to executeQuery or executeUpdate based on sqlType if (sqlParameters.getSqlType() == SqlType.QUERY.ordinal()) { // query statements need to be convert to JsonArray and inserted into Alert to send resultSet = stmt.executeQuery(); result = resultProcess(resultSet); } else if (sqlParameters.getSqlType() == SqlType.NON_QUERY.ordinal()) { // non query statement String updateResult = String.valueOf(stmt.executeUpdate()); result = setNonQuerySqlReturn(updateResult, sqlParameters.getLocalParams()); } postSql(connection, postStatementsBinds); this.setResultString(result); } catch (Exception e) { logger.error("execute sql error: {}", e.getMessage()); throw e; } finally { close(resultSet, stmt, connection); } } public String setNonQuerySqlReturn(String updateResult, List<Property> properties) { String result = null; for (Property info :properties) { if (Direct.OUT == info.getDirect()) { List<Map<String,String>> updateRL = new ArrayList<>(); Map<String,String> updateRM = new HashMap<>(); updateRM.put(info.getProp(),updateResult); updateRL.add(updateRM); result = JSONUtils.toJsonString(updateRL); break; } } return result; } /** * result process * * @param resultSet resultSet * @throws Exception Exception */ private String resultProcess(ResultSet resultSet) throws Exception { ArrayNode resultJSONArray = JSONUtils.createArrayNode(); ResultSetMetaData md = resultSet.getMetaData(); int num = md.getColumnCount(); int rowCount = 0; while (rowCount < LIMIT && resultSet.next()) { ObjectNode mapOfColValues = JSONUtils.createObjectNode(); for (int i = 1; i <= num; i++) { mapOfColValues.set(md.getColumnLabel(i), JSONUtils.toJsonNode(resultSet.getObject(i))); } resultJSONArray.add(mapOfColValues); rowCount++; } String result = JSONUtils.toJsonString(resultJSONArray); logger.debug("execute sql result : {}", result); int displayRows = sqlParameters.getDisplayRows() > 0 ? sqlParameters.getDisplayRows() : Constants.DEFAULT_DISPLAY_ROWS; displayRows = Math.min(displayRows, resultJSONArray.size()); logger.info("display sql result {} rows as follows:", displayRows); for (int i = 0; i < displayRows; i++) { String row = JSONUtils.toJsonString(resultJSONArray.get(i)); logger.info("row {} : {}", i + 1, row); } if (sqlParameters.getSendEmail() == null || sqlParameters.getSendEmail()) { sendAttachment(sqlParameters.getGroupId(), StringUtils.isNotEmpty(sqlParameters.getTitle()) ? sqlParameters.getTitle() : taskExecutionContext.getTaskName() + " query result sets", JSONUtils.toJsonString(resultJSONArray)); } return result; } /** * pre sql * * @param connection connection * @param preStatementsBinds preStatementsBinds */ private void preSql(Connection connection, List<SqlBinds> preStatementsBinds) throws Exception { for (SqlBinds sqlBind : preStatementsBinds) { try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)) { int result = pstmt.executeUpdate(); logger.info("pre statement execute result: {}, for sql: {}", result, sqlBind.getSql()); } } } /** * post sql * * @param connection connection * @param postStatementsBinds postStatementsBinds */ private void postSql(Connection connection, List<SqlBinds> postStatementsBinds) throws Exception { for (SqlBinds sqlBind : postStatementsBinds) { try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)) { int result = pstmt.executeUpdate(); logger.info("post statement execute result: {},for sql: {}", result, sqlBind.getSql()); } } } /** * create temp function * * @param connection connection * @param createFuncs createFuncs */ private void createTempFunction(Connection connection, List<String> createFuncs) throws Exception { try (Statement funcStmt = connection.createStatement()) { for (String createFunc : createFuncs) { logger.info("hive create function sql: {}", createFunc); funcStmt.execute(createFunc); } } } /** * close jdbc resource * * @param resultSet resultSet * @param pstmt pstmt * @param connection connection */ private void close(ResultSet resultSet, PreparedStatement pstmt, Connection connection) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException e) { logger.error("close result set error : {}", e.getMessage(), e); } } if (pstmt != null) { try { pstmt.close(); } catch (SQLException e) { logger.error("close prepared statement error : {}", e.getMessage(), e); } } if (connection != null) { try { connection.close(); } catch (SQLException e) { logger.error("close connection error : {}", e.getMessage(), e); } } } /** * preparedStatement bind * * @param connection connection * @param sqlBinds sqlBinds * @return PreparedStatement * @throws Exception Exception */ private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception { // is the timeout set boolean timeoutFlag = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.FAILED || TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.WARNFAILED; PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql()); if (timeoutFlag) { stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout()); } Map<Integer, Property> params = sqlBinds.getParamsMap(); if (params != null) { for (Map.Entry<Integer, Property> entry : params.entrySet()) { Property prop = entry.getValue(); ParameterUtils.setInParameter(entry.getKey(), stmt, prop.getType(), prop.getValue()); } } logger.info("prepare statement replace sql : {} ", stmt); return stmt; } /** * send mail as an attachment * * @param title title * @param content content */ public void sendAttachment(int groupId, String title, String content) { AlertSendResponseCommand alertSendResponseCommand = alertClientService.sendAlert(groupId, title, content); if (!alertSendResponseCommand.getResStatus()) { throw new RuntimeException("send mail failed!"); } } /** * regular expressions match the contents between two specified strings * * @param content content * @param rgex rgex * @param sqlParamsMap sql params map * @param paramsPropsMap params props map */ public void setSqlParamsMap(String content, String rgex, Map<Integer, Property> sqlParamsMap, Map<String, Property> paramsPropsMap) { Pattern pattern = Pattern.compile(rgex); Matcher m = pattern.matcher(content); int index = 1; while (m.find()) { String paramName = m.group(1); Property prop = paramsPropsMap.get(paramName); sqlParamsMap.put(index, prop); index++; } } /** * print replace sql * * @param content content * @param formatSql format sql * @param rgex rgex * @param sqlParamsMap sql params map */ public void printReplacedSql(String content, String formatSql, String rgex, Map<Integer, Property> sqlParamsMap) { //parameter print style logger.info("after replace sql , preparing : {}", formatSql); StringBuilder logPrint = new StringBuilder("replaced sql , parameters:"); for (int i = 1; i <= sqlParamsMap.size(); i++) { logPrint.append(sqlParamsMap.get(i).getValue() + "(" + sqlParamsMap.get(i).getType() + ")"); } logger.info("Sql Params are {}", logPrint); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,322
[Bug][worker] Insert into clickhouse table successfully but process status turn to failed and log exception
**For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append the description in Chinese(just for Mandarin(CN)), thx! ** **Describe the bug** DATASORUCE: clickhouse My process only has one SQL like: `INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt` The process failed but the data has been successfully inserted into the table. **To Reproduce** Steps to reproduce the behavior, for example: 1. Create a new process with only one SQL `INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt` 2. Save and run it. 3. See error below [INFO] 2021-04-19 17:49:57.364 - [taskAppId=TASK-2-9-10]:[115] - create dir success /tmp/dolphinscheduler/exec/process/1/2/9/10 [INFO] 2021-04-19 17:49:57.367 - [taskAppId=TASK-2-9-10]:[88] - sql task params {"postStatements":[],"connParams":"","receiversCc":"","udfs":"","type":"CLICKHOUSE","title":"test","sql":"INSERT INTO test\nSELECT dt, count(1) as c\n FROM dau\n GROUP BY dt\n ORDER BY dt\n","preStatements":[],"sqlType":"0","receivers":"xxx@xxx.com","datasource":1,"showType":"TABLE","localParams":[]} [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[104] - Full sql parameters: SqlParameters{type='CLICKHOUSE', datasource=1, sql='INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt ', sqlType=0, udfs='', showType='TABLE', connParams='', title='test', receivers='xxx@xxx.com', receiversCc='', preStatements=[], postStatements=[]} [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[105] - sql type : CLICKHOUSE, datasource : 1, sql : INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt , localParams : [],udfs : ,showType : TABLE,connParams : [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[176] - SQL title : test [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[498] - after replace sql , preparing : INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[503] - Sql Params are replaced sql , parameters: [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[52] - can't find udf function resource [INFO] 2021-04-19 17:49:57.381 - [taskAppId=TASK-2-9-10]:[414] - prepare statement replace sql : ru.yandex.clickhouse.ClickHousePreparedStatementImpl@65fc3a50 [ERROR] 2021-04-19 17:49:57.388 - [taskAppId=TASK-2-9-10]:[243] - execute sql error java.lang.NullPointerException: null at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.resultProcess(SqlTask.java:258) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.executeFuncAndSql(SqlTask.java:233) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.handle(SqlTask.java:139) at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:133) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) [ERROR] 2021-04-19 17:49:57.388 - [taskAppId=TASK-2-9-10]:[145] - sql task error java.lang.RuntimeException: execute sql error at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.executeFuncAndSql(SqlTask.java:244) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.handle(SqlTask.java:139) at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:133) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) **Expected behavior** A clear and concise description of what you expected to happen. Data inserted into the table and process status should be successful. **Screenshots** If applicable, add screenshots to help explain your problem. ![1618827014390](https://user-images.githubusercontent.com/4523603/115220161-d04fd500-a13a-11eb-9a5e-471897e4180a.jpg) ![1618827054023](https://user-images.githubusercontent.com/4523603/115220193-d776e300-a13a-11eb-95ce-61b719789c2e.jpg) ![1618827080868](https://user-images.githubusercontent.com/4523603/115220240-e2ca0e80-a13a-11eb-9b79-547627751944.jpg) **Which version of Dolphin Scheduler:** -1.3.5 **Additional context** Add any other context about the problem here. No matter deployed by docker or k8s, both met this problem. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5322
https://github.com/apache/dolphinscheduler/pull/5324
e57ee658e842ab8c5cd111f35cd6c71ed7065ecd
abdd2337b144df149a2031c3b26862ae41b4e936
"2021-04-19T10:13:44Z"
java
"2021-04-22T04:02:32Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTaskTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.sql; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.util.Date; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * sql task test */ @RunWith(PowerMockRunner.class) @PrepareForTest(value = {SqlTask.class, DriverManager.class, SpringApplicationContext.class, ParameterUtils.class}) public class SqlTaskTest { private static final Logger logger = LoggerFactory.getLogger(SqlTaskTest.class); private static final String CONNECTION_PARAMS = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://127.0.0.1:3306\"," + "\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"}"; private SqlTask sqlTask; private TaskExecutionContext taskExecutionContext; private AlertClientService alertClientService; @Before public void before() throws Exception { taskExecutionContext = new TaskExecutionContext(); TaskProps props = new TaskProps(); props.setExecutePath("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); props.setTaskInstanceId(1); props.setTenantCode("1"); props.setEnvFile(".dolphinscheduler_env.sh"); props.setTaskStartTime(new Date()); props.setTaskTimeout(0); props.setTaskParams( "{\"localParams\":[{\"prop\":\"ret\", \"direct\":\"OUT\", \"type\":\"VARCHAR\", \"value\":\"\"}]," + "\"type\":\"POSTGRESQL\",\"datasource\":1,\"sql\":\"insert into tb_1 values('1','2')\"," + "\"sqlType\":1}"); taskExecutionContext = PowerMockito.mock(TaskExecutionContext.class); PowerMockito.when(taskExecutionContext.getTaskParams()).thenReturn(props.getTaskParams()); PowerMockito.when(taskExecutionContext.getExecutePath()).thenReturn("/tmp"); PowerMockito.when(taskExecutionContext.getTaskAppId()).thenReturn("1"); PowerMockito.when(taskExecutionContext.getTenantCode()).thenReturn("root"); PowerMockito.when(taskExecutionContext.getStartTime()).thenReturn(new Date()); PowerMockito.when(taskExecutionContext.getTaskTimeout()).thenReturn(10000); PowerMockito.when(taskExecutionContext.getLogPath()).thenReturn("/tmp/dx"); SQLTaskExecutionContext sqlTaskExecutionContext = new SQLTaskExecutionContext(); sqlTaskExecutionContext.setConnectionParams(CONNECTION_PARAMS); PowerMockito.when(taskExecutionContext.getSqlTaskExecutionContext()).thenReturn(sqlTaskExecutionContext); PowerMockito.mockStatic(SpringApplicationContext.class); PowerMockito.when(SpringApplicationContext.getBean(Mockito.any())).thenReturn(new AlertDao()); alertClientService = PowerMockito.mock(AlertClientService.class); sqlTask = new SqlTask(taskExecutionContext, logger, alertClientService); sqlTask.init(); } @Test public void testGetParameters() { Assert.assertNotNull(sqlTask.getParameters()); } @Test public void testHandle() throws Exception { Connection connection = PowerMockito.mock(Connection.class); PowerMockito.mockStatic(DriverManager.class); PowerMockito.when(DriverManager.getConnection(Mockito.any(), Mockito.any(), Mockito.any())).thenReturn(connection); PreparedStatement preparedStatement = PowerMockito.mock(PreparedStatement.class); PowerMockito.when(connection.prepareStatement(Mockito.any())).thenReturn(preparedStatement); PowerMockito.mockStatic(ParameterUtils.class); PowerMockito.when(ParameterUtils.replaceScheduleTime(Mockito.any(), Mockito.any())).thenReturn("insert into tb_1 values('1','2')"); sqlTask.handle(); Assert.assertEquals(Constants.EXIT_CODE_SUCCESS,sqlTask.getExitStatusCode()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,322
[Bug][worker] Insert into clickhouse table successfully but process status turn to failed and log exception
**For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append the description in Chinese(just for Mandarin(CN)), thx! ** **Describe the bug** DATASORUCE: clickhouse My process only has one SQL like: `INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt` The process failed but the data has been successfully inserted into the table. **To Reproduce** Steps to reproduce the behavior, for example: 1. Create a new process with only one SQL `INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt` 2. Save and run it. 3. See error below [INFO] 2021-04-19 17:49:57.364 - [taskAppId=TASK-2-9-10]:[115] - create dir success /tmp/dolphinscheduler/exec/process/1/2/9/10 [INFO] 2021-04-19 17:49:57.367 - [taskAppId=TASK-2-9-10]:[88] - sql task params {"postStatements":[],"connParams":"","receiversCc":"","udfs":"","type":"CLICKHOUSE","title":"test","sql":"INSERT INTO test\nSELECT dt, count(1) as c\n FROM dau\n GROUP BY dt\n ORDER BY dt\n","preStatements":[],"sqlType":"0","receivers":"xxx@xxx.com","datasource":1,"showType":"TABLE","localParams":[]} [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[104] - Full sql parameters: SqlParameters{type='CLICKHOUSE', datasource=1, sql='INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt ', sqlType=0, udfs='', showType='TABLE', connParams='', title='test', receivers='xxx@xxx.com', receiversCc='', preStatements=[], postStatements=[]} [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[105] - sql type : CLICKHOUSE, datasource : 1, sql : INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt , localParams : [],udfs : ,showType : TABLE,connParams : [INFO] 2021-04-19 17:49:57.369 - [taskAppId=TASK-2-9-10]:[176] - SQL title : test [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[498] - after replace sql , preparing : INSERT INTO test SELECT dt, count(1) as c FROM dau GROUP BY dt ORDER BY dt [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[503] - Sql Params are replaced sql , parameters: [INFO] 2021-04-19 17:49:57.370 - [taskAppId=TASK-2-9-10]:[52] - can't find udf function resource [INFO] 2021-04-19 17:49:57.381 - [taskAppId=TASK-2-9-10]:[414] - prepare statement replace sql : ru.yandex.clickhouse.ClickHousePreparedStatementImpl@65fc3a50 [ERROR] 2021-04-19 17:49:57.388 - [taskAppId=TASK-2-9-10]:[243] - execute sql error java.lang.NullPointerException: null at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.resultProcess(SqlTask.java:258) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.executeFuncAndSql(SqlTask.java:233) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.handle(SqlTask.java:139) at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:133) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) [ERROR] 2021-04-19 17:49:57.388 - [taskAppId=TASK-2-9-10]:[145] - sql task error java.lang.RuntimeException: execute sql error at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.executeFuncAndSql(SqlTask.java:244) at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.handle(SqlTask.java:139) at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:133) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) **Expected behavior** A clear and concise description of what you expected to happen. Data inserted into the table and process status should be successful. **Screenshots** If applicable, add screenshots to help explain your problem. ![1618827014390](https://user-images.githubusercontent.com/4523603/115220161-d04fd500-a13a-11eb-9a5e-471897e4180a.jpg) ![1618827054023](https://user-images.githubusercontent.com/4523603/115220193-d776e300-a13a-11eb-95ce-61b719789c2e.jpg) ![1618827080868](https://user-images.githubusercontent.com/4523603/115220240-e2ca0e80-a13a-11eb-9b79-547627751944.jpg) **Which version of Dolphin Scheduler:** -1.3.5 **Additional context** Add any other context about the problem here. No matter deployed by docker or k8s, both met this problem. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5322
https://github.com/apache/dolphinscheduler/pull/5324
e57ee658e842ab8c5cd111f35cd6c71ed7065ecd
abdd2337b144df149a2031c3b26862ae41b4e936
"2021-04-19T10:13:44Z"
java
"2021-04-22T04:02:32Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.18.RELEASE</spring.version> <spring.boot.version>2.1.17.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.2.5</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/DingTalkUtilsTest.java</include> <include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,351
[Bug][K8s] Quartz cron task cannot take effect
**To Reproduce** Steps to reproduce the behavior, for example: 1. Deploy DolphinScheduler in K8s 2. Configure Quartz cron task for a Process Definition 3. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115666960-d1217a80-a377-11eb-8d64-a7c3e643e949.png) ![image](https://user-images.githubusercontent.com/4902714/115666973-d41c6b00-a377-11eb-88a2-0f9f1310da49.png) ![image](https://user-images.githubusercontent.com/4902714/115666992-d979b580-a377-11eb-84ef-94898df057fc.png) ![image](https://user-images.githubusercontent.com/4902714/115667005-dd0d3c80-a377-11eb-8779-be755adbcb29.png) **Which version of Dolphin Scheduler:** -[1.3.5] -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5351
https://github.com/apache/dolphinscheduler/pull/5353
abdd2337b144df149a2031c3b26862ae41b4e936
3a7006a79649df4f45397f125c2e9665f9659c0b
"2021-04-22T06:34:43Z"
java
"2021-04-22T11:07:19Z"
sql/dolphinscheduler_mysql.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for QRTZ_BLOB_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_BLOB_TRIGGERS`; CREATE TABLE `QRTZ_BLOB_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `BLOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `SCHED_NAME` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_BLOB_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_BLOB_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_CALENDARS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_CALENDARS`; CREATE TABLE `QRTZ_CALENDARS` ( `SCHED_NAME` varchar(120) NOT NULL, `CALENDAR_NAME` varchar(200) NOT NULL, `CALENDAR` blob NOT NULL, PRIMARY KEY (`SCHED_NAME`,`CALENDAR_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CALENDARS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_CRON_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_CRON_TRIGGERS`; CREATE TABLE `QRTZ_CRON_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `CRON_EXPRESSION` varchar(120) NOT NULL, `TIME_ZONE_ID` varchar(80) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_CRON_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CRON_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_FIRED_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_FIRED_TRIGGERS`; CREATE TABLE `QRTZ_FIRED_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `ENTRY_ID` varchar(95) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `INSTANCE_NAME` varchar(200) NOT NULL, `FIRED_TIME` bigint(13) NOT NULL, `SCHED_TIME` bigint(13) NOT NULL, `PRIORITY` int(11) NOT NULL, `STATE` varchar(16) NOT NULL, `JOB_NAME` varchar(200) DEFAULT NULL, `JOB_GROUP` varchar(200) DEFAULT NULL, `IS_NONCONCURRENT` varchar(1) DEFAULT NULL, `REQUESTS_RECOVERY` varchar(1) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`ENTRY_ID`), KEY `IDX_QRTZ_FT_TRIG_INST_NAME` (`SCHED_NAME`,`INSTANCE_NAME`), KEY `IDX_QRTZ_FT_INST_JOB_REQ_RCVRY` (`SCHED_NAME`,`INSTANCE_NAME`,`REQUESTS_RECOVERY`), KEY `IDX_QRTZ_FT_J_G` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_FT_JG` (`SCHED_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_FT_T_G` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_FT_TG` (`SCHED_NAME`,`TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_FIRED_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_JOB_DETAILS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_JOB_DETAILS`; CREATE TABLE `QRTZ_JOB_DETAILS` ( `SCHED_NAME` varchar(120) NOT NULL, `JOB_NAME` varchar(200) NOT NULL, `JOB_GROUP` varchar(200) NOT NULL, `DESCRIPTION` varchar(250) DEFAULT NULL, `JOB_CLASS_NAME` varchar(250) NOT NULL, `IS_DURABLE` varchar(1) NOT NULL, `IS_NONCONCURRENT` varchar(1) NOT NULL, `IS_UPDATE_DATA` varchar(1) NOT NULL, `REQUESTS_RECOVERY` varchar(1) NOT NULL, `JOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_J_REQ_RECOVERY` (`SCHED_NAME`,`REQUESTS_RECOVERY`), KEY `IDX_QRTZ_J_GRP` (`SCHED_NAME`,`JOB_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_JOB_DETAILS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_LOCKS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_LOCKS`; CREATE TABLE `QRTZ_LOCKS` ( `SCHED_NAME` varchar(120) NOT NULL, `LOCK_NAME` varchar(40) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`LOCK_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_LOCKS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_PAUSED_TRIGGER_GRPS`; CREATE TABLE `QRTZ_PAUSED_TRIGGER_GRPS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SCHEDULER_STATE -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SCHEDULER_STATE`; CREATE TABLE `QRTZ_SCHEDULER_STATE` ( `SCHED_NAME` varchar(120) NOT NULL, `INSTANCE_NAME` varchar(200) NOT NULL, `LAST_CHECKIN_TIME` bigint(13) NOT NULL, `CHECKIN_INTERVAL` bigint(13) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`INSTANCE_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SCHEDULER_STATE -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SIMPLE_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SIMPLE_TRIGGERS`; CREATE TABLE `QRTZ_SIMPLE_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `REPEAT_COUNT` bigint(7) NOT NULL, `REPEAT_INTERVAL` bigint(12) NOT NULL, `TIMES_TRIGGERED` bigint(10) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPLE_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPLE_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SIMPROP_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SIMPROP_TRIGGERS`; CREATE TABLE `QRTZ_SIMPROP_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `STR_PROP_1` varchar(512) DEFAULT NULL, `STR_PROP_2` varchar(512) DEFAULT NULL, `STR_PROP_3` varchar(512) DEFAULT NULL, `INT_PROP_1` int(11) DEFAULT NULL, `INT_PROP_2` int(11) DEFAULT NULL, `LONG_PROP_1` bigint(20) DEFAULT NULL, `LONG_PROP_2` bigint(20) DEFAULT NULL, `DEC_PROP_1` decimal(13,4) DEFAULT NULL, `DEC_PROP_2` decimal(13,4) DEFAULT NULL, `BOOL_PROP_1` varchar(1) DEFAULT NULL, `BOOL_PROP_2` varchar(1) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPROP_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPROP_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_TRIGGERS`; CREATE TABLE `QRTZ_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `JOB_NAME` varchar(200) NOT NULL, `JOB_GROUP` varchar(200) NOT NULL, `DESCRIPTION` varchar(250) DEFAULT NULL, `NEXT_FIRE_TIME` bigint(13) DEFAULT NULL, `PREV_FIRE_TIME` bigint(13) DEFAULT NULL, `PRIORITY` int(11) DEFAULT NULL, `TRIGGER_STATE` varchar(16) NOT NULL, `TRIGGER_TYPE` varchar(8) NOT NULL, `START_TIME` bigint(13) NOT NULL, `END_TIME` bigint(13) DEFAULT NULL, `CALENDAR_NAME` varchar(200) DEFAULT NULL, `MISFIRE_INSTR` smallint(2) DEFAULT NULL, `JOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_T_J` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_T_JG` (`SCHED_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_T_C` (`SCHED_NAME`,`CALENDAR_NAME`), KEY `IDX_QRTZ_T_G` (`SCHED_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_T_STATE` (`SCHED_NAME`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_N_STATE` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_N_G_STATE` (`SCHED_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_NEXT_FIRE_TIME` (`SCHED_NAME`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_ST` (`SCHED_NAME`,`TRIGGER_STATE`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_ST_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_NFT_ST_MISFIRE_GRP` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), CONSTRAINT `QRTZ_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) REFERENCES `QRTZ_JOB_DETAILS` (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_access_token -- ---------------------------- DROP TABLE IF EXISTS `t_ds_access_token`; CREATE TABLE `t_ds_access_token` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) DEFAULT NULL COMMENT 'user id', `token` varchar(64) DEFAULT NULL COMMENT 'token', `expire_time` datetime DEFAULT NULL COMMENT 'end time of token ', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_access_token -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_alert -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert`; CREATE TABLE `t_ds_alert` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `title` varchar(64) DEFAULT NULL COMMENT 'title', `content` text COMMENT 'Message content (can be email, can be SMS. Mail is stored in JSON map, and SMS is string)', `alert_status` tinyint(4) DEFAULT '0' COMMENT '0:wait running,1:success,2:failed', `log` text COMMENT 'log', `alertgroup_id` int(11) DEFAULT NULL COMMENT 'alert group id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_alert -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_alertgroup -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alertgroup`; CREATE TABLE `t_ds_alertgroup`( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `alert_instance_ids` varchar (255) DEFAULT NULL COMMENT 'alert instance ids', `create_user_id` int(11) DEFAULT NULL COMMENT 'create user id', `group_name` varchar(255) DEFAULT NULL COMMENT 'group name', `description` varchar(255) DEFAULT NULL, `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_alertgroup -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_command -- ---------------------------- DROP TABLE IF EXISTS `t_ds_command`; CREATE TABLE `t_ds_command` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause process, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'Node dependency type: 0 current node, 1 forward, 2 backward', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'Failed policy: 0 end, 1 continue', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', `update_time` datetime DEFAULT NULL COMMENT 'update time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority: 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) COMMENT 'worker group', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_command -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_datasource -- ---------------------------- DROP TABLE IF EXISTS `t_ds_datasource`; CREATE TABLE `t_ds_datasource` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(64) NOT NULL COMMENT 'data source name', `note` varchar(256) DEFAULT NULL COMMENT 'description', `type` tinyint(4) NOT NULL COMMENT 'data source type: 0:mysql,1:postgresql,2:hive,3:spark', `user_id` int(11) NOT NULL COMMENT 'the creator id', `connection_params` text NOT NULL COMMENT 'json connection params', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_datasource -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_error_command -- ---------------------------- DROP TABLE IF EXISTS `t_ds_error_command`; CREATE TABLE `t_ds_error_command` ( `id` int(11) NOT NULL COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group id', `schedule_time` datetime DEFAULT NULL COMMENT 'scheduler time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority, 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) COMMENT 'worker group', `message` text COMMENT 'message', PRIMARY KEY (`id`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC; -- ---------------------------- -- Records of t_ds_error_command -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_process_definition -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_definition`; CREATE TABLE `t_ds_process_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'process definition name', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', `project_id` int(11) DEFAULT NULL COMMENT 'project id', `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', `process_definition_json` longtext COMMENT 'process definition json content', `description` text, `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `update_time` datetime DEFAULT NULL COMMENT 'update time', `modify_by` varchar(255) DEFAULT NULL, `resource_ids` varchar(255) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `process_definition_unique` (`name`,`project_id`), KEY `process_definition_index` (`project_id`,`id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_process_definition -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_process_definition_version -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_definition_version`; CREATE TABLE `t_ds_process_definition_version` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `process_definition_json` longtext COMMENT 'process definition json content', `description` text, `global_params` text COMMENT 'global parameters', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids', PRIMARY KEY (`id`), UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE, KEY `process_definition_index` (`id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_process_definition -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_process_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_instance`; CREATE TABLE `t_ds_process_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'process instance name', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `state` tinyint(4) DEFAULT NULL COMMENT 'process instance Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `recovery` tinyint(4) DEFAULT NULL COMMENT 'process instance failover flag:0:normal,1:failover instance', `start_time` datetime DEFAULT NULL COMMENT 'process instance start time', `end_time` datetime DEFAULT NULL COMMENT 'process instance end time', `run_times` int(11) DEFAULT NULL COMMENT 'process instance run times', `host` varchar(135) DEFAULT NULL COMMENT 'process instance host', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type. 0: only current node,1:before the node,2:later nodes', `max_try_times` tinyint(4) DEFAULT '0' COMMENT 'max try times', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy. 0:end the process when node failed,1:continue running the other nodes when node failed', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type. 0:no warning,1:warning if process success,2:warning if process failed,3:warning if success', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group id', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `command_start_time` datetime DEFAULT NULL COMMENT 'command start time', `global_params` text COMMENT 'global parameters', `process_instance_json` longtext COMMENT 'process instance json(copy的process definition 的json)', `flag` tinyint(4) DEFAULT '1' COMMENT 'flag', `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `is_sub_process` int(11) DEFAULT '0' COMMENT 'flag, whether the process is sub process', `executor_id` int(11) NOT NULL COMMENT 'executor id', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `history_cmd` text COMMENT 'history commands of process instance operation', `dependence_schedule_times` text COMMENT 'depend schedule fire time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority. 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) DEFAULT NULL COMMENT 'worker group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `var_pool` longtext COMMENT 'var_pool', PRIMARY KEY (`id`), KEY `process_instance_index` (`process_definition_id`,`id`) USING BTREE, KEY `start_time_index` (`start_time`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_process_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_project -- ---------------------------- DROP TABLE IF EXISTS `t_ds_project`; CREATE TABLE `t_ds_project` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(100) DEFAULT NULL COMMENT 'project name', `description` varchar(200) DEFAULT NULL, `user_id` int(11) DEFAULT NULL COMMENT 'creator id', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), KEY `user_id_index` (`user_id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_project -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_queue -- ---------------------------- DROP TABLE IF EXISTS `t_ds_queue`; CREATE TABLE `t_ds_queue` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `queue_name` varchar(64) DEFAULT NULL COMMENT 'queue name', `queue` varchar(64) DEFAULT NULL COMMENT 'yarn queue name', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_queue -- ---------------------------- INSERT INTO `t_ds_queue` VALUES ('1', 'default', 'default', null, null); -- ---------------------------- -- Table structure for t_ds_relation_datasource_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_datasource_user`; CREATE TABLE `t_ds_relation_datasource_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `datasource_id` int(11) DEFAULT NULL COMMENT 'data source id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_datasource_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_process_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_process_instance`; CREATE TABLE `t_ds_relation_process_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `parent_process_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', `parent_task_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', `process_instance_id` int(11) DEFAULT NULL COMMENT 'child process instance id', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_process_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_project_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_project_user`; CREATE TABLE `t_ds_relation_project_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `project_id` int(11) DEFAULT NULL COMMENT 'project id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), KEY `user_id_index` (`user_id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_project_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_resources_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_resources_user`; CREATE TABLE `t_ds_relation_resources_user` ( `id` int(11) NOT NULL AUTO_INCREMENT, `user_id` int(11) NOT NULL COMMENT 'user id', `resources_id` int(11) DEFAULT NULL COMMENT 'resource id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_resources_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_udfs_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_udfs_user`; CREATE TABLE `t_ds_relation_udfs_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'userid', `udf_id` int(11) DEFAULT NULL COMMENT 'udf id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_resources -- ---------------------------- DROP TABLE IF EXISTS `t_ds_resources`; CREATE TABLE `t_ds_resources` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `alias` varchar(64) DEFAULT NULL COMMENT 'alias', `file_name` varchar(64) DEFAULT NULL COMMENT 'file name', `description` varchar(256) DEFAULT NULL, `user_id` int(11) DEFAULT NULL COMMENT 'user id', `type` tinyint(4) DEFAULT NULL COMMENT 'resource type,0:FILE,1:UDF', `size` bigint(20) DEFAULT NULL COMMENT 'resource size', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `pid` int(11) DEFAULT NULL, `full_name` varchar(64) DEFAULT NULL, `is_directory` tinyint(4) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_resources_un` (`full_name`,`type`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_resources -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_schedules -- ---------------------------- DROP TABLE IF EXISTS `t_ds_schedules`; CREATE TABLE `t_ds_schedules` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', `start_time` datetime NOT NULL COMMENT 'start time', `end_time` datetime NOT NULL COMMENT 'end time', `crontab` varchar(256) NOT NULL COMMENT 'crontab description', `failure_strategy` tinyint(4) NOT NULL COMMENT 'failure strategy. 0:end,1:continue', `user_id` int(11) NOT NULL COMMENT 'user id', `release_state` tinyint(4) NOT NULL COMMENT 'release state. 0:offline,1:online ', `warning_type` tinyint(4) NOT NULL COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(256) DEFAULT '' COMMENT 'worker group id', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_schedules -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_session -- ---------------------------- DROP TABLE IF EXISTS `t_ds_session`; CREATE TABLE `t_ds_session` ( `id` varchar(64) NOT NULL COMMENT 'key', `user_id` int(11) DEFAULT NULL COMMENT 'user id', `ip` varchar(45) DEFAULT NULL COMMENT 'ip', `last_login_time` datetime DEFAULT NULL COMMENT 'last login time', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_session -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_task_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_instance`; CREATE TABLE `t_ds_task_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'task name', `task_type` varchar(64) DEFAULT NULL COMMENT 'task type', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `process_instance_id` int(11) DEFAULT NULL COMMENT 'process instance id', `task_json` longtext COMMENT 'task content json', `state` tinyint(4) DEFAULT NULL COMMENT 'Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `submit_time` datetime DEFAULT NULL COMMENT 'task submit time', `start_time` datetime DEFAULT NULL COMMENT 'task start time', `end_time` datetime DEFAULT NULL COMMENT 'task end time', `host` varchar(135) DEFAULT NULL COMMENT 'host of task running on', `execute_path` varchar(200) DEFAULT NULL COMMENT 'task execute path in the host', `log_path` varchar(200) DEFAULT NULL COMMENT 'task log path', `alert_flag` tinyint(4) DEFAULT NULL COMMENT 'whether alert', `retry_times` int(4) DEFAULT '0' COMMENT 'task retry times', `pid` int(4) DEFAULT NULL COMMENT 'pid of task', `app_link` text COMMENT 'yarn app id', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `retry_interval` int(4) DEFAULT NULL COMMENT 'retry interval when task failed ', `max_retry_times` int(2) DEFAULT NULL COMMENT 'max retry times', `task_instance_priority` int(11) DEFAULT NULL COMMENT 'task instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) DEFAULT NULL COMMENT 'worker group id', `executor_id` int(11) DEFAULT NULL, `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time', `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time', `var_pool` longtext COMMENT 'var_pool', PRIMARY KEY (`id`), KEY `process_instance_id` (`process_instance_id`) USING BTREE, KEY `task_instance_index` (`process_definition_id`,`process_instance_id`) USING BTREE, CONSTRAINT `foreign_key_instance_id` FOREIGN KEY (`process_instance_id`) REFERENCES `t_ds_process_instance` (`id`) ON DELETE CASCADE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_task_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_tenant -- ---------------------------- DROP TABLE IF EXISTS `t_ds_tenant`; CREATE TABLE `t_ds_tenant` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `tenant_code` varchar(64) DEFAULT NULL COMMENT 'tenant code', `description` varchar(256) DEFAULT NULL, `queue_id` int(11) DEFAULT NULL COMMENT 'queue id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_tenant -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_udfs -- ---------------------------- DROP TABLE IF EXISTS `t_ds_udfs`; CREATE TABLE `t_ds_udfs` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `func_name` varchar(100) NOT NULL COMMENT 'UDF function name', `class_name` varchar(255) NOT NULL COMMENT 'class of udf', `type` tinyint(4) NOT NULL COMMENT 'Udf function type', `arg_types` varchar(255) DEFAULT NULL COMMENT 'arguments types', `database` varchar(255) DEFAULT NULL COMMENT 'data base', `description` varchar(255) DEFAULT NULL, `resource_id` int(11) NOT NULL COMMENT 'resource id', `resource_name` varchar(255) NOT NULL COMMENT 'resource name', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_udfs -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_user`; CREATE TABLE `t_ds_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'user id', `user_name` varchar(64) DEFAULT NULL COMMENT 'user name', `user_password` varchar(64) DEFAULT NULL COMMENT 'user password', `user_type` tinyint(4) DEFAULT NULL COMMENT 'user type, 0:administrator,1:ordinary user', `email` varchar(64) DEFAULT NULL COMMENT 'email', `phone` varchar(11) DEFAULT NULL COMMENT 'phone', `tenant_id` int(11) DEFAULT NULL COMMENT 'tenant id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `queue` varchar(64) DEFAULT NULL COMMENT 'queue', `state` int(1) DEFAULT 1 COMMENT 'state 0:disable 1:enable', PRIMARY KEY (`id`), UNIQUE KEY `user_name_unique` (`user_name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_worker_group -- ---------------------------- DROP TABLE IF EXISTS `t_ds_worker_group`; CREATE TABLE `t_ds_worker_group` ( `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', `name` varchar(256) NOT NULL COMMENT 'worker group name', `addr_list` text NULL DEFAULT NULL COMMENT 'worker addr list. split by [,]', `create_time` datetime NULL DEFAULT NULL COMMENT 'create time', `update_time` datetime NULL DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `name_unique` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_worker_group -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_version -- ---------------------------- DROP TABLE IF EXISTS `t_ds_version`; CREATE TABLE `t_ds_version` ( `id` int(11) NOT NULL AUTO_INCREMENT, `version` varchar(200) NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `version_UNIQUE` (`version`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COMMENT='version'; -- ---------------------------- -- Records of t_ds_version -- ---------------------------- INSERT INTO `t_ds_version` VALUES ('1', '1.4.0'); -- ---------------------------- -- Records of t_ds_alertgroup -- ---------------------------- INSERT INTO `t_ds_alertgroup`(alert_instance_ids, create_user_id, group_name, description, create_time, update_time) VALUES ("1,2", 1, 'default admin warning group', 'default admin warning group', '2018-11-29 10:20:39', '2018-11-29 10:20:39'); -- ---------------------------- -- Records of t_ds_user -- ---------------------------- INSERT INTO `t_ds_user` VALUES ('1', 'admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', '', '0', '2018-03-27 15:48:50', '2018-10-24 17:40:22', null, 1); -- ---------------------------- -- Table structure for t_ds_plugin_define -- ---------------------------- SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); DROP TABLE IF EXISTS `t_ds_plugin_define`; CREATE TABLE `t_ds_plugin_define` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_name` varchar(100) NOT NULL COMMENT 'the name of plugin eg: email', `plugin_type` varchar(100) NOT NULL COMMENT 'plugin type . alert=alert plugin, job=job plugin', `plugin_params` text COMMENT 'plugin params', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_plugin_define_UN` (`plugin_name`,`plugin_type`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_alert_plugin_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert_plugin_instance`; CREATE TABLE `t_ds_alert_plugin_instance` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_define_id` int NOT NULL, `plugin_instance_params` text COMMENT 'plugin instance params. Also contain the params value which user input in web ui.', `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `instance_name` varchar(200) DEFAULT NULL COMMENT 'alert instance name', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,351
[Bug][K8s] Quartz cron task cannot take effect
**To Reproduce** Steps to reproduce the behavior, for example: 1. Deploy DolphinScheduler in K8s 2. Configure Quartz cron task for a Process Definition 3. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115666960-d1217a80-a377-11eb-8d64-a7c3e643e949.png) ![image](https://user-images.githubusercontent.com/4902714/115666973-d41c6b00-a377-11eb-88a2-0f9f1310da49.png) ![image](https://user-images.githubusercontent.com/4902714/115666992-d979b580-a377-11eb-84ef-94898df057fc.png) ![image](https://user-images.githubusercontent.com/4902714/115667005-dd0d3c80-a377-11eb-8779-be755adbcb29.png) **Which version of Dolphin Scheduler:** -[1.3.5] -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5351
https://github.com/apache/dolphinscheduler/pull/5353
abdd2337b144df149a2031c3b26862ae41b4e936
3a7006a79649df4f45397f125c2e9665f9659c0b
"2021-04-22T06:34:43Z"
java
"2021-04-22T11:07:19Z"
sql/dolphinscheduler_postgre.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS; DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS; DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE; DROP TABLE IF EXISTS QRTZ_LOCKS; DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS; DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS; DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS; DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS; DROP TABLE IF EXISTS QRTZ_TRIGGERS; DROP TABLE IF EXISTS QRTZ_JOB_DETAILS; DROP TABLE IF EXISTS QRTZ_CALENDARS; CREATE TABLE QRTZ_JOB_DETAILS( SCHED_NAME character varying(120) NOT NULL, JOB_NAME character varying(200) NOT NULL, JOB_GROUP character varying(200) NOT NULL, DESCRIPTION character varying(250) NULL, JOB_CLASS_NAME character varying(250) NOT NULL, IS_DURABLE boolean NOT NULL, IS_NONCONCURRENT boolean NOT NULL, IS_UPDATE_DATA boolean NOT NULL, REQUESTS_RECOVERY boolean NOT NULL, JOB_DATA bytea NULL); alter table QRTZ_JOB_DETAILS add primary key(SCHED_NAME,JOB_NAME,JOB_GROUP); CREATE TABLE QRTZ_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, JOB_NAME character varying(200) NOT NULL, JOB_GROUP character varying(200) NOT NULL, DESCRIPTION character varying(250) NULL, NEXT_FIRE_TIME BIGINT NULL, PREV_FIRE_TIME BIGINT NULL, PRIORITY INTEGER NULL, TRIGGER_STATE character varying(16) NOT NULL, TRIGGER_TYPE character varying(8) NOT NULL, START_TIME BIGINT NOT NULL, END_TIME BIGINT NULL, CALENDAR_NAME character varying(200) NULL, MISFIRE_INSTR SMALLINT NULL, JOB_DATA bytea NULL) ; alter table QRTZ_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_SIMPLE_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, REPEAT_COUNT BIGINT NOT NULL, REPEAT_INTERVAL BIGINT NOT NULL, TIMES_TRIGGERED BIGINT NOT NULL) ; alter table QRTZ_SIMPLE_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_CRON_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, CRON_EXPRESSION character varying(120) NOT NULL, TIME_ZONE_ID character varying(80)) ; alter table QRTZ_CRON_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_SIMPROP_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, STR_PROP_1 character varying(512) NULL, STR_PROP_2 character varying(512) NULL, STR_PROP_3 character varying(512) NULL, INT_PROP_1 INT NULL, INT_PROP_2 INT NULL, LONG_PROP_1 BIGINT NULL, LONG_PROP_2 BIGINT NULL, DEC_PROP_1 NUMERIC(13,4) NULL, DEC_PROP_2 NUMERIC(13,4) NULL, BOOL_PROP_1 boolean NULL, BOOL_PROP_2 boolean NULL) ; alter table QRTZ_SIMPROP_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_BLOB_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, BLOB_DATA bytea NULL) ; alter table QRTZ_BLOB_TRIGGERS add primary key(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_CALENDARS ( SCHED_NAME character varying(120) NOT NULL, CALENDAR_NAME character varying(200) NOT NULL, CALENDAR bytea NOT NULL) ; alter table QRTZ_CALENDARS add primary key(SCHED_NAME,CALENDAR_NAME); CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS ( SCHED_NAME character varying(120) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL) ; alter table QRTZ_PAUSED_TRIGGER_GRPS add primary key(SCHED_NAME,TRIGGER_GROUP); CREATE TABLE QRTZ_FIRED_TRIGGERS ( SCHED_NAME character varying(120) NOT NULL, ENTRY_ID character varying(95) NOT NULL, TRIGGER_NAME character varying(200) NOT NULL, TRIGGER_GROUP character varying(200) NOT NULL, INSTANCE_NAME character varying(200) NOT NULL, FIRED_TIME BIGINT NOT NULL, SCHED_TIME BIGINT NOT NULL, PRIORITY INTEGER NOT NULL, STATE character varying(16) NOT NULL, JOB_NAME character varying(200) NULL, JOB_GROUP character varying(200) NULL, IS_NONCONCURRENT boolean NULL, REQUESTS_RECOVERY boolean NULL) ; alter table QRTZ_FIRED_TRIGGERS add primary key(SCHED_NAME,ENTRY_ID); CREATE TABLE QRTZ_SCHEDULER_STATE ( SCHED_NAME character varying(120) NOT NULL, INSTANCE_NAME character varying(200) NOT NULL, LAST_CHECKIN_TIME BIGINT NOT NULL, CHECKIN_INTERVAL BIGINT NOT NULL) ; alter table QRTZ_SCHEDULER_STATE add primary key(SCHED_NAME,INSTANCE_NAME); CREATE TABLE QRTZ_LOCKS ( SCHED_NAME character varying(120) NOT NULL, LOCK_NAME character varying(40) NOT NULL) ; alter table QRTZ_LOCKS add primary key(SCHED_NAME,LOCK_NAME); CREATE INDEX IDX_QRTZ_J_REQ_RECOVERY ON QRTZ_JOB_DETAILS(SCHED_NAME,REQUESTS_RECOVERY); CREATE INDEX IDX_QRTZ_J_GRP ON QRTZ_JOB_DETAILS(SCHED_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_T_J ON QRTZ_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_T_JG ON QRTZ_TRIGGERS(SCHED_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_T_C ON QRTZ_TRIGGERS(SCHED_NAME,CALENDAR_NAME); CREATE INDEX IDX_QRTZ_T_G ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP); CREATE INDEX IDX_QRTZ_T_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_N_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_N_G_STATE ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_GROUP,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_NEXT_FIRE_TIME ON QRTZ_TRIGGERS(SCHED_NAME,NEXT_FIRE_TIME); CREATE INDEX IDX_QRTZ_T_NFT_ST ON QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_STATE,NEXT_FIRE_TIME); CREATE INDEX IDX_QRTZ_T_NFT_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME); CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_T_NFT_ST_MISFIRE_GRP ON QRTZ_TRIGGERS(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_GROUP,TRIGGER_STATE); CREATE INDEX IDX_QRTZ_FT_TRIG_INST_NAME ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME); CREATE INDEX IDX_QRTZ_FT_INST_JOB_REQ_RCVRY ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,INSTANCE_NAME,REQUESTS_RECOVERY); CREATE INDEX IDX_QRTZ_FT_J_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_FT_JG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,JOB_GROUP); CREATE INDEX IDX_QRTZ_FT_T_G ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP); CREATE INDEX IDX_QRTZ_FT_TG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_GROUP); -- -- Table structure for table t_ds_access_token -- DROP TABLE IF EXISTS t_ds_access_token; CREATE TABLE t_ds_access_token ( id int NOT NULL , user_id int DEFAULT NULL , token varchar(64) DEFAULT NULL , expire_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_alert -- DROP TABLE IF EXISTS t_ds_alert; CREATE TABLE t_ds_alert ( id int NOT NULL , title varchar(64) DEFAULT NULL , content text , alert_status int DEFAULT '0' , log text , alertgroup_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_alertgroup -- DROP TABLE IF EXISTS t_ds_alertgroup; CREATE TABLE t_ds_alertgroup( id int NOT NULL, alert_instance_ids varchar (255) DEFAULT NULL, create_user_id int4 DEFAULT NULL, group_name varchar(255) DEFAULT NULL, description varchar(255) DEFAULT NULL, create_time timestamp DEFAULT NULL, update_time timestamp DEFAULT NULL, PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_command -- DROP TABLE IF EXISTS t_ds_command; CREATE TABLE t_ds_command ( id int NOT NULL , command_type int DEFAULT NULL , process_definition_id int DEFAULT NULL , command_param text , task_depend_type int DEFAULT NULL , failure_strategy int DEFAULT '0' , warning_type int DEFAULT '0' , warning_group_id int DEFAULT NULL , schedule_time timestamp DEFAULT NULL , start_time timestamp DEFAULT NULL , executor_id int DEFAULT NULL , update_time timestamp DEFAULT NULL , process_instance_priority int DEFAULT NULL , worker_group varchar(64), PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_datasource -- DROP TABLE IF EXISTS t_ds_datasource; CREATE TABLE t_ds_datasource ( id int NOT NULL , name varchar(64) NOT NULL , note varchar(256) DEFAULT NULL , type int NOT NULL , user_id int NOT NULL , connection_params text NOT NULL , create_time timestamp NOT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_error_command -- DROP TABLE IF EXISTS t_ds_error_command; CREATE TABLE t_ds_error_command ( id int NOT NULL , command_type int DEFAULT NULL , executor_id int DEFAULT NULL , process_definition_id int DEFAULT NULL , command_param text , task_depend_type int DEFAULT NULL , failure_strategy int DEFAULT '0' , warning_type int DEFAULT '0' , warning_group_id int DEFAULT NULL , schedule_time timestamp DEFAULT NULL , start_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , process_instance_priority int DEFAULT NULL , worker_group varchar(64), message text , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_master_server -- -- -- Table structure for table t_ds_process_definition -- DROP TABLE IF EXISTS t_ds_process_definition; CREATE TABLE t_ds_process_definition ( id int NOT NULL , name varchar(255) DEFAULT NULL , version int DEFAULT NULL , release_state int DEFAULT NULL , project_id int DEFAULT NULL , user_id int DEFAULT NULL , process_definition_json text , description text , global_params text , flag int DEFAULT NULL , locations text , connects text , warning_group_id int4 DEFAULT NULL , create_time timestamp DEFAULT NULL , timeout int DEFAULT '0' , tenant_id int NOT NULL DEFAULT '-1' , update_time timestamp DEFAULT NULL , modify_by varchar(36) DEFAULT '' , resource_ids varchar(64), PRIMARY KEY (id), CONSTRAINT process_definition_unique UNIQUE (name, project_id) ) ; create index process_definition_index on t_ds_process_definition (project_id,id); -- -- Table structure for table t_ds_process_definition_version -- DROP TABLE IF EXISTS t_ds_process_definition_version; CREATE TABLE t_ds_process_definition_version ( id int NOT NULL , process_definition_id int NOT NULL , version int DEFAULT NULL , process_definition_json text , description text , global_params text , locations text , connects text , warning_group_id int4 DEFAULT NULL, create_time timestamp DEFAULT NULL , timeout int DEFAULT '0' , resource_ids varchar(64), PRIMARY KEY (id) ) ; create index process_definition_id_and_version on t_ds_process_definition_version (process_definition_id,version); -- -- Table structure for table t_ds_process_instance -- DROP TABLE IF EXISTS t_ds_process_instance; CREATE TABLE t_ds_process_instance ( id int NOT NULL , name varchar(255) DEFAULT NULL , process_definition_id int DEFAULT NULL , state int DEFAULT NULL , recovery int DEFAULT NULL , start_time timestamp DEFAULT NULL , end_time timestamp DEFAULT NULL , run_times int DEFAULT NULL , host varchar(135) DEFAULT NULL , command_type int DEFAULT NULL , command_param text , task_depend_type int DEFAULT NULL , max_try_times int DEFAULT '0' , failure_strategy int DEFAULT '0' , warning_type int DEFAULT '0' , warning_group_id int DEFAULT NULL , schedule_time timestamp DEFAULT NULL , command_start_time timestamp DEFAULT NULL , global_params text , process_instance_json text , flag int DEFAULT '1' , update_time timestamp NULL , is_sub_process int DEFAULT '0' , executor_id int NOT NULL , locations text , connects text , history_cmd text , dependence_schedule_times text , process_instance_priority int DEFAULT NULL , worker_group varchar(64) , timeout int DEFAULT '0' , tenant_id int NOT NULL DEFAULT '-1' , var_pool text , PRIMARY KEY (id) ) ; create index process_instance_index on t_ds_process_instance (process_definition_id,id); create index start_time_index on t_ds_process_instance (start_time); -- -- Table structure for table t_ds_project -- DROP TABLE IF EXISTS t_ds_project; CREATE TABLE t_ds_project ( id int NOT NULL , name varchar(100) DEFAULT NULL , description varchar(200) DEFAULT NULL , user_id int DEFAULT NULL , flag int DEFAULT '1' , create_time timestamp DEFAULT CURRENT_TIMESTAMP , update_time timestamp DEFAULT CURRENT_TIMESTAMP , PRIMARY KEY (id) ) ; create index user_id_index on t_ds_project (user_id); -- -- Table structure for table t_ds_queue -- DROP TABLE IF EXISTS t_ds_queue; CREATE TABLE t_ds_queue ( id int NOT NULL , queue_name varchar(64) DEFAULT NULL , queue varchar(64) DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_relation_datasource_user -- DROP TABLE IF EXISTS t_ds_relation_datasource_user; CREATE TABLE t_ds_relation_datasource_user ( id int NOT NULL , user_id int NOT NULL , datasource_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; ; -- -- Table structure for table t_ds_relation_process_instance -- DROP TABLE IF EXISTS t_ds_relation_process_instance; CREATE TABLE t_ds_relation_process_instance ( id int NOT NULL , parent_process_instance_id int DEFAULT NULL , parent_task_instance_id int DEFAULT NULL , process_instance_id int DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_relation_project_user -- DROP TABLE IF EXISTS t_ds_relation_project_user; CREATE TABLE t_ds_relation_project_user ( id int NOT NULL , user_id int NOT NULL , project_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; create index relation_project_user_id_index on t_ds_relation_project_user (user_id); -- -- Table structure for table t_ds_relation_resources_user -- DROP TABLE IF EXISTS t_ds_relation_resources_user; CREATE TABLE t_ds_relation_resources_user ( id int NOT NULL , user_id int NOT NULL , resources_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_relation_udfs_user -- DROP TABLE IF EXISTS t_ds_relation_udfs_user; CREATE TABLE t_ds_relation_udfs_user ( id int NOT NULL , user_id int NOT NULL , udf_id int DEFAULT NULL , perm int DEFAULT '1' , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; ; -- -- Table structure for table t_ds_resources -- DROP TABLE IF EXISTS t_ds_resources; CREATE TABLE t_ds_resources ( id int NOT NULL , alias varchar(64) DEFAULT NULL , file_name varchar(64) DEFAULT NULL , description varchar(256) DEFAULT NULL , user_id int DEFAULT NULL , type int DEFAULT NULL , size bigint DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , pid int, full_name varchar(64), is_directory int, PRIMARY KEY (id), CONSTRAINT t_ds_resources_un UNIQUE (full_name, type) ) ; -- -- Table structure for table t_ds_schedules -- DROP TABLE IF EXISTS t_ds_schedules; CREATE TABLE t_ds_schedules ( id int NOT NULL , process_definition_id int NOT NULL , start_time timestamp NOT NULL , end_time timestamp NOT NULL , crontab varchar(256) NOT NULL , failure_strategy int NOT NULL , user_id int NOT NULL , release_state int NOT NULL , warning_type int NOT NULL , warning_group_id int DEFAULT NULL , process_instance_priority int DEFAULT NULL , worker_group varchar(64), create_time timestamp NOT NULL , update_time timestamp NOT NULL , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_session -- DROP TABLE IF EXISTS t_ds_session; CREATE TABLE t_ds_session ( id varchar(64) NOT NULL , user_id int DEFAULT NULL , ip varchar(45) DEFAULT NULL , last_login_time timestamp DEFAULT NULL , PRIMARY KEY (id) ); -- -- Table structure for table t_ds_task_instance -- DROP TABLE IF EXISTS t_ds_task_instance; CREATE TABLE t_ds_task_instance ( id int NOT NULL , name varchar(255) DEFAULT NULL , task_type varchar(64) DEFAULT NULL , process_definition_id int DEFAULT NULL , process_instance_id int DEFAULT NULL , task_json text , state int DEFAULT NULL , submit_time timestamp DEFAULT NULL , start_time timestamp DEFAULT NULL , end_time timestamp DEFAULT NULL , host varchar(135) DEFAULT NULL , execute_path varchar(200) DEFAULT NULL , log_path varchar(200) DEFAULT NULL , alert_flag int DEFAULT NULL , retry_times int DEFAULT '0' , pid int DEFAULT NULL , app_link text , flag int DEFAULT '1' , retry_interval int DEFAULT NULL , max_retry_times int DEFAULT NULL , task_instance_priority int DEFAULT NULL , worker_group varchar(64), executor_id int DEFAULT NULL , first_submit_time timestamp DEFAULT NULL , delay_time int DEFAULT '0' , var_pool text , PRIMARY KEY (id), CONSTRAINT foreign_key_instance_id FOREIGN KEY(process_instance_id) REFERENCES t_ds_process_instance(id) ON DELETE CASCADE ) ; -- -- Table structure for table t_ds_tenant -- DROP TABLE IF EXISTS t_ds_tenant; CREATE TABLE t_ds_tenant ( id int NOT NULL , tenant_code varchar(64) DEFAULT NULL , description varchar(256) DEFAULT NULL , queue_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_udfs -- DROP TABLE IF EXISTS t_ds_udfs; CREATE TABLE t_ds_udfs ( id int NOT NULL , user_id int NOT NULL , func_name varchar(100) NOT NULL , class_name varchar(255) NOT NULL , type int NOT NULL , arg_types varchar(255) DEFAULT NULL , database varchar(255) DEFAULT NULL , description varchar(255) DEFAULT NULL , resource_id int NOT NULL , resource_name varchar(255) NOT NULL , create_time timestamp NOT NULL , update_time timestamp NOT NULL , PRIMARY KEY (id) ) ; -- -- Table structure for table t_ds_user -- DROP TABLE IF EXISTS t_ds_user; CREATE TABLE t_ds_user ( id int NOT NULL , user_name varchar(64) DEFAULT NULL , user_password varchar(64) DEFAULT NULL , user_type int DEFAULT NULL , email varchar(64) DEFAULT NULL , phone varchar(11) DEFAULT NULL , tenant_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , queue varchar(64) DEFAULT NULL , state int DEFAULT 1 , PRIMARY KEY (id) ); comment on column t_ds_user.state is 'state 0:disable 1:enable'; -- -- Table structure for table t_ds_version -- DROP TABLE IF EXISTS t_ds_version; CREATE TABLE t_ds_version ( id int NOT NULL , version varchar(200) NOT NULL, PRIMARY KEY (id) ) ; create index version_index on t_ds_version(version); -- -- Table structure for table t_ds_worker_group -- DROP TABLE IF EXISTS t_ds_worker_group; CREATE TABLE t_ds_worker_group ( id bigint NOT NULL , name varchar(256) NOT NULL , addr_list text DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) , CONSTRAINT name_unique UNIQUE (name) ) ; -- -- Table structure for table t_ds_worker_server -- DROP TABLE IF EXISTS t_ds_worker_server; CREATE TABLE t_ds_worker_server ( id int NOT NULL , host varchar(45) DEFAULT NULL , port int DEFAULT NULL , zk_directory varchar(64) DEFAULT NULL , res_info varchar(255) DEFAULT NULL , create_time timestamp DEFAULT NULL , last_heartbeat_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; DROP SEQUENCE IF EXISTS t_ds_access_token_id_sequence; CREATE SEQUENCE t_ds_access_token_id_sequence; ALTER TABLE t_ds_access_token ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_access_token_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_alert_id_sequence; CREATE SEQUENCE t_ds_alert_id_sequence; ALTER TABLE t_ds_alert ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alert_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_alertgroup_id_sequence; CREATE SEQUENCE t_ds_alertgroup_id_sequence; ALTER TABLE t_ds_alertgroup ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_alertgroup_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_command_id_sequence; CREATE SEQUENCE t_ds_command_id_sequence; ALTER TABLE t_ds_command ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_command_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_datasource_id_sequence; CREATE SEQUENCE t_ds_datasource_id_sequence; ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence; CREATE SEQUENCE t_ds_process_definition_id_sequence; ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_definition_version_id_sequence; CREATE SEQUENCE t_ds_process_definition_version_id_sequence; ALTER TABLE t_ds_process_definition_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_version_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_instance_id_sequence; CREATE SEQUENCE t_ds_process_instance_id_sequence; ALTER TABLE t_ds_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_project_id_sequence; CREATE SEQUENCE t_ds_project_id_sequence; ALTER TABLE t_ds_project ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_project_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_queue_id_sequence; CREATE SEQUENCE t_ds_queue_id_sequence; ALTER TABLE t_ds_queue ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_queue_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_datasource_user_id_sequence; CREATE SEQUENCE t_ds_relation_datasource_user_id_sequence; ALTER TABLE t_ds_relation_datasource_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_datasource_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_process_instance_id_sequence; CREATE SEQUENCE t_ds_relation_process_instance_id_sequence; ALTER TABLE t_ds_relation_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_process_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_project_user_id_sequence; CREATE SEQUENCE t_ds_relation_project_user_id_sequence; ALTER TABLE t_ds_relation_project_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_project_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_resources_user_id_sequence; CREATE SEQUENCE t_ds_relation_resources_user_id_sequence; ALTER TABLE t_ds_relation_resources_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_resources_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_udfs_user_id_sequence; CREATE SEQUENCE t_ds_relation_udfs_user_id_sequence; ALTER TABLE t_ds_relation_udfs_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_udfs_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_resources_id_sequence; CREATE SEQUENCE t_ds_resources_id_sequence; ALTER TABLE t_ds_resources ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_resources_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_schedules_id_sequence; CREATE SEQUENCE t_ds_schedules_id_sequence; ALTER TABLE t_ds_schedules ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_schedules_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_task_instance_id_sequence; CREATE SEQUENCE t_ds_task_instance_id_sequence; ALTER TABLE t_ds_task_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_tenant_id_sequence; CREATE SEQUENCE t_ds_tenant_id_sequence; ALTER TABLE t_ds_tenant ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_tenant_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_udfs_id_sequence; CREATE SEQUENCE t_ds_udfs_id_sequence; ALTER TABLE t_ds_udfs ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_udfs_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_user_id_sequence; CREATE SEQUENCE t_ds_user_id_sequence; ALTER TABLE t_ds_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_user_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_version_id_sequence; CREATE SEQUENCE t_ds_version_id_sequence; ALTER TABLE t_ds_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_version_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_worker_group_id_sequence; CREATE SEQUENCE t_ds_worker_group_id_sequence; ALTER TABLE t_ds_worker_group ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_group_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_worker_server_id_sequence; CREATE SEQUENCE t_ds_worker_server_id_sequence; ALTER TABLE t_ds_worker_server ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_server_id_sequence'); -- Records of t_ds_user?user : admin , password : dolphinscheduler123 INSERT INTO t_ds_user(user_name, user_password, user_type, email, phone, tenant_id, state, create_time, update_time) VALUES ('admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', '', '0', 1, '2018-03-27 15:48:50', '2018-10-24 17:40:22'); -- Records of t_ds_alertgroup, default admin warning group INSERT INTO t_ds_alertgroup(alert_instance_ids, create_user_id, group_name, description, create_time, update_time) VALUES ('1,2', 1, 'default admin warning group', 'default admin warning group', '2018-11-29 10:20:39', '2018-11-29 10:20:39'); -- Records of t_ds_queue,default queue name : default INSERT INTO t_ds_queue(queue_name, queue, create_time, update_time) VALUES ('default', 'default', '2018-11-29 10:22:33', '2018-11-29 10:22:33'); -- Records of t_ds_queue,default queue name : default INSERT INTO t_ds_version(version) VALUES ('1.4.0'); -- -- Table structure for table t_ds_plugin_define -- DROP TABLE IF EXISTS t_ds_plugin_define; CREATE TABLE t_ds_plugin_define ( id serial NOT NULL, plugin_name varchar(100) NOT NULL, plugin_type varchar(100) NOT NULL, plugin_params text NULL, create_time timestamp NULL, update_time timestamp NULL, CONSTRAINT t_ds_plugin_define_pk PRIMARY KEY (id), CONSTRAINT t_ds_plugin_define_un UNIQUE (plugin_name, plugin_type) ); -- -- Table structure for table t_ds_alert_plugin_instance -- DROP TABLE IF EXISTS t_ds_alert_plugin_instance; CREATE TABLE t_ds_alert_plugin_instance ( id serial NOT NULL, plugin_define_id int4 NOT NULL, plugin_instance_params text NULL, create_time timestamp NULL, update_time timestamp NULL, instance_name varchar(200) NULL, CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id) );
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,351
[Bug][K8s] Quartz cron task cannot take effect
**To Reproduce** Steps to reproduce the behavior, for example: 1. Deploy DolphinScheduler in K8s 2. Configure Quartz cron task for a Process Definition 3. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115666960-d1217a80-a377-11eb-8d64-a7c3e643e949.png) ![image](https://user-images.githubusercontent.com/4902714/115666973-d41c6b00-a377-11eb-88a2-0f9f1310da49.png) ![image](https://user-images.githubusercontent.com/4902714/115666992-d979b580-a377-11eb-84ef-94898df057fc.png) ![image](https://user-images.githubusercontent.com/4902714/115667005-dd0d3c80-a377-11eb-8779-be755adbcb29.png) **Which version of Dolphin Scheduler:** -[1.3.5] -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5351
https://github.com/apache/dolphinscheduler/pull/5353
abdd2337b144df149a2031c3b26862ae41b4e936
3a7006a79649df4f45397f125c2e9665f9659c0b
"2021-04-22T06:34:43Z"
java
"2021-04-22T11:07:19Z"
sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_ddl.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); -- uc_dolphin_T_t_ds_worker_group_R_ip_list drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_worker_group_R_ip_list; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_worker_group_R_ip_list() BEGIN IF EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_worker_group' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='ip_list') THEN ALTER TABLE t_ds_worker_group CHANGE COLUMN `ip_list` `addr_list` text; ALTER TABLE t_ds_worker_group MODIFY COLUMN `name` varchar(256) NOT NULL; ALTER TABLE t_ds_worker_group ADD UNIQUE KEY `name_unique` (`name`); END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_worker_group_R_ip_list; DROP PROCEDURE uc_dolphin_T_t_ds_worker_group_R_ip_list;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,351
[Bug][K8s] Quartz cron task cannot take effect
**To Reproduce** Steps to reproduce the behavior, for example: 1. Deploy DolphinScheduler in K8s 2. Configure Quartz cron task for a Process Definition 3. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115666960-d1217a80-a377-11eb-8d64-a7c3e643e949.png) ![image](https://user-images.githubusercontent.com/4902714/115666973-d41c6b00-a377-11eb-88a2-0f9f1310da49.png) ![image](https://user-images.githubusercontent.com/4902714/115666992-d979b580-a377-11eb-84ef-94898df057fc.png) ![image](https://user-images.githubusercontent.com/4902714/115667005-dd0d3c80-a377-11eb-8779-be755adbcb29.png) **Which version of Dolphin Scheduler:** -[1.3.5] -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5351
https://github.com/apache/dolphinscheduler/pull/5353
abdd2337b144df149a2031c3b26862ae41b4e936
3a7006a79649df4f45397f125c2e9665f9659c0b
"2021-04-22T06:34:43Z"
java
"2021-04-22T11:07:19Z"
sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_ddl.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ -- uc_dolphin_T_t_ds_worker_group_A_ip_list delimiter d// CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_worker_group_A_ip_list() RETURNS void AS $$ BEGIN IF EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_worker_group' AND COLUMN_NAME ='ip_list') THEN ALTER TABLE t_ds_worker_group RENAME ip_list TO addr_list; ALTER TABLE t_ds_worker_group ALTER COLUMN addr_list type text; ALTER TABLE t_ds_worker_group ALTER COLUMN name type varchar(256), ALTER COLUMN name SET NOT NULL; ALTER TABLE t_ds_worker_group ADD CONSTRAINT name_unique UNIQUE (name); END IF; END; $$ LANGUAGE plpgsql; d// delimiter ; SELECT uc_dolphin_T_t_ds_worker_group_A_ip_list(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_worker_group_A_ip_list(); -- Add foreign key constraints for t_ds_task_instance -- delimiter ; ALTER TABLE t_ds_task_instance ADD CONSTRAINT foreign_key_instance_id FOREIGN KEY(process_instance_id) REFERENCES t_ds_process_instance(id) ON DELETE CASCADE;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,349
[Bug][master] Manually kill the task that fails to be retried. After the parallel task succeeds, the workflow status is always running
1. The configuration of task 2 fails to be retried. When the failure is retried for the second time, manually kill the process ID of task 2 2. After task 3 is successfully executed, a task will be added to the master, resulting in the workflow status running all the time ![image](https://user-images.githubusercontent.com/55787491/115653438-bf34dd00-a361-11eb-94c2-240df791059c.png) ![image](https://user-images.githubusercontent.com/55787491/115653120-2aca7a80-a361-11eb-8a66-f688e68d248c.png) **Which version of Dolphin Scheduler:** -[1.3.6-prepare]
https://github.com/apache/dolphinscheduler/issues/5349
https://github.com/apache/dolphinscheduler/pull/5362
3a7006a79649df4f45397f125c2e9665f9659c0b
75d1ad645c994feadf157fb2343141b3d9ec49bf
"2021-04-22T03:56:30Z"
java
"2021-04-23T06:29:26Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DependResult.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.enums; /** * depend result */ public enum DependResult { /** * 0 success * 1 waiting * 2 failed */ SUCCESS, WAITING, FAILED }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,349
[Bug][master] Manually kill the task that fails to be retried. After the parallel task succeeds, the workflow status is always running
1. The configuration of task 2 fails to be retried. When the failure is retried for the second time, manually kill the process ID of task 2 2. After task 3 is successfully executed, a task will be added to the master, resulting in the workflow status running all the time ![image](https://user-images.githubusercontent.com/55787491/115653438-bf34dd00-a361-11eb-94c2-240df791059c.png) ![image](https://user-images.githubusercontent.com/55787491/115653120-2aca7a80-a361-11eb-8a66-f688e68d248c.png) **Which version of Dolphin Scheduler:** -[1.3.6-prepare]
https://github.com/apache/dolphinscheduler/issues/5349
https://github.com/apache/dolphinscheduler/pull/5362
3a7006a79649df4f45397f125c2e9665f9659c0b
75d1ad645c994feadf157fb2343141b3d9ec49bf
"2021-04-22T03:56:30Z"
java
"2021-04-23T06:29:26Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.runner; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVERY_START_NODE_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.common.Constants.SEC_2_MINUTES_TIME_UNIT; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.VarPoolUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.AlertManager; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.apache.dolphinscheduler.service.queue.PeerTaskInstancePriorityQueue; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; /** * master exec thread,split dag */ public class MasterExecThread implements Runnable { /** * logger of MasterExecThread */ private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class); /** * runing TaskNode */ private final Map<MasterBaseTaskExecThread, Future<Boolean>> activeTaskNode = new ConcurrentHashMap<>(); /** * task exec service */ private final ExecutorService taskExecService; /** * process instance */ private ProcessInstance processInstance; /** * submit failure nodes */ private boolean taskFailedSubmit = false; /** * recover node id list */ private List<TaskInstance> recoverNodeIdList = new ArrayList<>(); /** * error task list */ private Map<String, TaskInstance> errorTaskList = new ConcurrentHashMap<>(); /** * complete task list */ private Map<String, TaskInstance> completeTaskList = new ConcurrentHashMap<>(); /** * ready to submit task queue */ private PeerTaskInstancePriorityQueue readyToSubmitTaskQueue = new PeerTaskInstancePriorityQueue(); /** * depend failed task map */ private Map<String, TaskInstance> dependFailedTask = new ConcurrentHashMap<>(); /** * forbidden task map */ private Map<String, TaskNode> forbiddenTaskList = new ConcurrentHashMap<>(); /** * skip task map */ private Map<String, TaskNode> skipTaskNodeList = new ConcurrentHashMap<>(); /** * recover tolerance fault task list */ private List<TaskInstance> recoverToleranceFaultTaskList = new ArrayList<>(); /** * alert manager */ private AlertManager alertManager; /** * the object of DAG */ private DAG<String, TaskNode, TaskNodeRelation> dag; /** * process service */ private ProcessService processService; /** * master config */ private MasterConfig masterConfig; /** * */ private NettyRemotingClient nettyRemotingClient; /** * submit post node * * @param parentNodeName parent node name */ private Map<String, Object> propToValue = new ConcurrentHashMap<String, Object>(); /** * constructor of MasterExecThread * * @param processInstance processInstance * @param processService processService * @param nettyRemotingClient nettyRemotingClient */ public MasterExecThread(ProcessInstance processInstance , ProcessService processService , NettyRemotingClient nettyRemotingClient , AlertManager alertManager , MasterConfig masterConfig) { this.processService = processService; this.processInstance = processInstance; this.masterConfig = masterConfig; int masterTaskExecNum = masterConfig.getMasterExecTaskNum(); this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread", masterTaskExecNum); this.nettyRemotingClient = nettyRemotingClient; this.alertManager = alertManager; } @Override public void run() { // process instance is null if (processInstance == null) { logger.info("process instance is not exists"); return; } // check to see if it's done if (processInstance.getState().typeIsFinished()) { logger.info("process instance is done : {}", processInstance.getId()); return; } try { if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()) { // sub process complement data executeComplementProcess(); } else { // execute flow executeProcess(); } } catch (Exception e) { logger.error("master exec thread exception", e); logger.error("process execute failed, process id:{}", processInstance.getId()); processInstance.setState(ExecutionStatus.FAILURE); processInstance.setEndTime(new Date()); processService.updateProcessInstance(processInstance); } finally { taskExecService.shutdown(); } } /** * execute process * * @throws Exception exception */ private void executeProcess() throws Exception { prepareProcess(); runProcess(); endProcess(); } /** * execute complement process * * @throws Exception exception */ private void executeComplementProcess() throws Exception { Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); processService.saveProcessInstance(processInstance); // get schedules int processDefinitionId = processInstance.getProcessDefinitionId(); List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); List<Date> listDate = Lists.newLinkedList(); if (!CollectionUtils.isEmpty(schedules)) { for (Schedule schedule : schedules) { listDate.addAll(CronUtils.getSelfFireDateList(startDate, endDate, schedule.getCrontab())); } } // get first fire date Iterator<Date> iterator = null; Date scheduleDate = null; if (!CollectionUtils.isEmpty(listDate)) { iterator = listDate.iterator(); scheduleDate = iterator.next(); processInstance.setScheduleTime(scheduleDate); processService.updateProcessInstance(processInstance); } else { scheduleDate = processInstance.getScheduleTime(); if (scheduleDate == null) { scheduleDate = startDate; } } while (Stopper.isRunning()) { logger.info("process {} start to complement {} data", processInstance.getId(), DateUtils.dateToString(scheduleDate)); // prepare dag and other info prepareProcess(); if (dag == null) { logger.error("process {} dag is null, please check out parameters", processInstance.getId()); processInstance.setState(ExecutionStatus.SUCCESS); processService.updateProcessInstance(processInstance); return; } // execute process ,waiting for end runProcess(); endProcess(); // process instance failure ,no more complements if (!processInstance.getState().typeIsSuccess()) { logger.info("process {} state {}, complement not completely!", processInstance.getId(), processInstance.getState()); break; } // current process instance success ,next execute if (null == iterator) { // loop by day scheduleDate = DateUtils.getSomeDay(scheduleDate, 1); if (scheduleDate.after(endDate)) { // all success logger.info("process {} complement completely!", processInstance.getId()); break; } } else { // loop by schedule date if (!iterator.hasNext()) { // all success logger.info("process {} complement completely!", processInstance.getId()); break; } scheduleDate = iterator.next(); } // flow end // execute next process instance complement data processInstance.setScheduleTime(scheduleDate); if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) { cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); } processInstance.setState(ExecutionStatus.RUNNING_EXECUTION); processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processInstance.getProcessDefinition().getGlobalParamMap(), processInstance.getProcessDefinition().getGlobalParamList(), CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); processInstance.setId(0); processInstance.setStartTime(new Date()); processInstance.setEndTime(null); processService.saveProcessInstance(processInstance); } } /** * prepare process parameter * * @throws Exception exception */ private void prepareProcess() throws Exception { // gen process dag buildFlowDag(); // init task queue initTaskQueue(); logger.info("prepare process :{} end", processInstance.getId()); } /** * process end handle */ private void endProcess() { processInstance.setEndTime(new Date()); processService.updateProcessInstance(processInstance); if (processInstance.getState().typeIsWaitingThread()) { processService.createRecoveryWaitingThreadCommand(null, processInstance); } List<TaskInstance> taskInstances = processService.findValidTaskListByProcessId(processInstance.getId()); ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); alertManager.sendAlertProcessInstance(processInstance, taskInstances, projectUser); } /** * generate process dag * * @throws Exception exception */ private void buildFlowDag() throws Exception { recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam()); forbiddenTaskList = DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson()); // generate process to get DAG info List<String> recoveryNameList = getRecoveryNodeNameList(); List<String> startNodeNameList = parseStartNodeName(processInstance.getCommandParam()); ProcessDag processDag = generateFlowDag(processInstance.getProcessInstanceJson(), startNodeNameList, recoveryNameList, processInstance.getTaskDependType()); if (processDag == null) { logger.error("processDag is null"); return; } // generate process dag dag = DagHelper.buildDagGraph(processDag); } /** * init task queue */ private void initTaskQueue() { taskFailedSubmit = false; activeTaskNode.clear(); dependFailedTask.clear(); completeTaskList.clear(); errorTaskList.clear(); List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); for (TaskInstance task : taskInstanceList) { if (task.isTaskComplete()) { completeTaskList.put(task.getName(), task); } if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { continue; } if (task.getState().typeIsFailure() && !task.taskCanRetry()) { errorTaskList.put(task.getName(), task); } } } /** * submit task to execute * * @param taskInstance task instance * @return TaskInstance */ private TaskInstance submitTaskExec(TaskInstance taskInstance) { MasterBaseTaskExecThread abstractExecThread = null; if (taskInstance.isSubProcess()) { abstractExecThread = new SubProcessTaskExecThread(taskInstance); } else if (taskInstance.isDependTask()) { abstractExecThread = new DependentTaskExecThread(taskInstance); } else if (taskInstance.isConditionsTask()) { abstractExecThread = new ConditionsTaskExecThread(taskInstance); } else { abstractExecThread = new MasterTaskExecThread(taskInstance); } Future<Boolean> future = taskExecService.submit(abstractExecThread); activeTaskNode.putIfAbsent(abstractExecThread, future); return abstractExecThread.getTaskInstance(); } /** * find task instance in db. * in case submit more than one same name task in the same time. * * @param taskName task name * @return TaskInstance */ private TaskInstance findTaskIfExists(String taskName) { List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(this.processInstance.getId()); for (TaskInstance taskInstance : taskInstanceList) { if (taskInstance.getName().equals(taskName)) { return taskInstance; } } return null; } /** * encapsulation task * * @param processInstance process instance * @param nodeName node name * @return TaskInstance */ private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName, TaskNode taskNode) { //update processInstance for update the globalParams this.processInstance = this.processService.findProcessInstanceById(this.processInstance.getId()); TaskInstance taskInstance = findTaskIfExists(nodeName); if (taskInstance == null) { taskInstance = new TaskInstance(); // task name taskInstance.setName(nodeName); // process instance define id taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId()); // task instance state taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); // process instance id taskInstance.setProcessInstanceId(processInstance.getId()); // task instance node json taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode)); // task instance type taskInstance.setTaskType(taskNode.getType()); // task instance whether alert taskInstance.setAlertFlag(Flag.NO); // task instance start time taskInstance.setStartTime(null); // task instance flag taskInstance.setFlag(Flag.YES); // task instance retry times taskInstance.setRetryTimes(0); // max task instance retry times taskInstance.setMaxRetryTimes(taskNode.getMaxRetryTimes()); // retry task instance interval taskInstance.setRetryInterval(taskNode.getRetryInterval()); // task instance priority if (taskNode.getTaskInstancePriority() == null) { taskInstance.setTaskInstancePriority(Priority.MEDIUM); } else { taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); } String processWorkerGroup = processInstance.getWorkerGroup(); processWorkerGroup = StringUtils.isBlank(processWorkerGroup) ? DEFAULT_WORKER_GROUP : processWorkerGroup; String taskWorkerGroup = StringUtils.isBlank(taskNode.getWorkerGroup()) ? processWorkerGroup : taskNode.getWorkerGroup(); if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP) && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) { taskInstance.setWorkerGroup(processWorkerGroup); } else { taskInstance.setWorkerGroup(taskWorkerGroup); } //get process global setProcessGlobal(taskNode, taskInstance); // delay execution time taskInstance.setDelayTime(taskNode.getDelayTime()); } return taskInstance; } private void setProcessGlobal(TaskNode taskNode, TaskInstance taskInstance) { String globalParams = this.processInstance.getGlobalParams(); if (StringUtils.isNotEmpty(globalParams)) { Map<String, String> globalMap = processService.getGlobalParamMap(globalParams); if (globalMap != null && globalMap.size() != 0) { setGlobalMapToTask(taskNode, taskInstance, globalMap); } } } private void setGlobalMapToTask(TaskNode taskNode, TaskInstance taskInstance, Map<String, String> globalMap) { // the param save in localParams Map<String, Object> result = JSONUtils.toMap(taskNode.getParams(), String.class, Object.class); Object localParams = result.get(LOCAL_PARAMS); if (localParams != null) { List<Property> allParam = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); for (Property info : allParam) { if (info.getDirect().equals(Direct.IN)) { String paramName = info.getProp(); String value = globalMap.get(paramName); if (StringUtils.isNotEmpty(value)) { info.setValue(value); } } } result.put(LOCAL_PARAMS, allParam); taskNode.setParams(JSONUtils.toJsonString(result)); // task instance node json taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode)); } } private void submitPostNode(String parentNodeName) { Set<String> submitTaskNodeList = DagHelper.parsePostNodes(parentNodeName, skipTaskNodeList, dag, completeTaskList); List<TaskInstance> taskInstances = new ArrayList<>(); for (String taskNode : submitTaskNodeList) { try { VarPoolUtils.convertVarPoolToMap(propToValue, processInstance.getVarPool()); } catch (ParseException e) { logger.error("parse {} exception", processInstance.getVarPool(), e); throw new RuntimeException(); } TaskNode taskNodeObject = dag.getNode(taskNode); VarPoolUtils.setTaskNodeLocalParams(taskNodeObject, propToValue); taskInstances.add(createTaskInstance(processInstance, taskNode, taskNodeObject)); } // if previous node success , post node submit for (TaskInstance task : taskInstances) { if (readyToSubmitTaskQueue.contains(task)) { continue; } if (completeTaskList.containsKey(task.getName())) { logger.info("task {} has already run success", task.getName()); continue; } if (task.getState().typeIsPause() || task.getState().typeIsCancel()) { logger.info("task {} stopped, the state is {}", task.getName(), task.getState()); } else { addTaskToStandByList(task); } } } /** * determine whether the dependencies of the task node are complete * * @return DependResult */ private DependResult isTaskDepsComplete(String taskName) { Collection<String> startNodes = dag.getBeginNode(); // if vertex,returns true directly if (startNodes.contains(taskName)) { return DependResult.SUCCESS; } TaskNode taskNode = dag.getNode(taskName); List<String> depNameList = taskNode.getDepList(); for (String depsNode : depNameList) { if (!dag.containsNode(depsNode) || forbiddenTaskList.containsKey(depsNode) || skipTaskNodeList.containsKey(depsNode)) { continue; } // dependencies must be fully completed if (!completeTaskList.containsKey(depsNode)) { return DependResult.WAITING; } ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); if (depTaskState.typeIsPause() || depTaskState.typeIsCancel()) { return DependResult.WAITING; } // ignore task state if current task is condition if (taskNode.isConditionsTask()) { continue; } if (!dependTaskSuccess(depsNode, taskName)) { return DependResult.FAILED; } } logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray())); return DependResult.SUCCESS; } /** * depend node is completed, but here need check the condition task branch is the next node */ private boolean dependTaskSuccess(String dependNodeName, String nextNodeName) { if (dag.getNode(dependNodeName).isConditionsTask()) { //condition task need check the branch to run List<String> nextTaskList = DagHelper.parseConditionTask(dependNodeName, skipTaskNodeList, dag, completeTaskList); if (!nextTaskList.contains(nextNodeName)) { return false; } } else { ExecutionStatus depTaskState = completeTaskList.get(dependNodeName).getState(); if (depTaskState.typeIsFailure()) { return false; } } return true; } /** * query task instance by complete state * * @param state state * @return task instance list */ private List<TaskInstance> getCompleteTaskByState(ExecutionStatus state) { List<TaskInstance> resultList = new ArrayList<>(); for (Map.Entry<String, TaskInstance> entry : completeTaskList.entrySet()) { if (entry.getValue().getState() == state) { resultList.add(entry.getValue()); } } return resultList; } /** * where there are ongoing tasks * * @param state state * @return ExecutionStatus */ private ExecutionStatus runningState(ExecutionStatus state) { if (state == ExecutionStatus.READY_STOP || state == ExecutionStatus.READY_PAUSE || state == ExecutionStatus.WAITTING_THREAD || state == ExecutionStatus.DELAY_EXECUTION) { // if the running task is not completed, the state remains unchanged return state; } else { return ExecutionStatus.RUNNING_EXECUTION; } } /** * exists failure task,contains submit failure、dependency failure,execute failure(retry after) * * @return Boolean whether has failed task */ private boolean hasFailedTask() { if (this.taskFailedSubmit) { return true; } if (this.errorTaskList.size() > 0) { return true; } return this.dependFailedTask.size() > 0; } /** * process instance failure * * @return Boolean whether process instance failed */ private boolean processFailed() { if (hasFailedTask()) { if (processInstance.getFailureStrategy() == FailureStrategy.END) { return true; } if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) { return readyToSubmitTaskQueue.size() == 0 || activeTaskNode.size() == 0; } } return false; } /** * whether task for waiting thread * * @return Boolean whether has waiting thread task */ private boolean hasWaitingThreadTask() { List<TaskInstance> waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD); return CollectionUtils.isNotEmpty(waitingList); } /** * prepare for pause * 1,failed retry task in the preparation queue , returns to failure directly * 2,exists pause task,complement not completed, pending submission of tasks, return to suspension * 3,success * * @return ExecutionStatus */ private ExecutionStatus processReadyPause() { if (hasRetryTaskInStandBy()) { return ExecutionStatus.FAILURE; } List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); if (CollectionUtils.isNotEmpty(pauseList) || !isComplementEnd() || readyToSubmitTaskQueue.size() > 0) { return ExecutionStatus.PAUSE; } else { return ExecutionStatus.SUCCESS; } } /** * generate the latest process instance status by the tasks state * * @return process instance execution status */ private ExecutionStatus getProcessInstanceState() { ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); ExecutionStatus state = instance.getState(); if (activeTaskNode.size() > 0 || hasRetryTaskInStandBy()) { // active task and retry task exists return runningState(state); } // process failure if (processFailed()) { return ExecutionStatus.FAILURE; } // waiting thread if (hasWaitingThreadTask()) { return ExecutionStatus.WAITTING_THREAD; } // pause if (state == ExecutionStatus.READY_PAUSE) { return processReadyPause(); } // stop if (state == ExecutionStatus.READY_STOP) { List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP); List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL); if (CollectionUtils.isNotEmpty(stopList) || CollectionUtils.isNotEmpty(killList) || !isComplementEnd()) { return ExecutionStatus.STOP; } else { return ExecutionStatus.SUCCESS; } } // success if (state == ExecutionStatus.RUNNING_EXECUTION) { List<TaskInstance> killTasks = getCompleteTaskByState(ExecutionStatus.KILL); if (readyToSubmitTaskQueue.size() > 0) { //tasks currently pending submission, no retries, indicating that depend is waiting to complete return ExecutionStatus.RUNNING_EXECUTION; } else if (CollectionUtils.isNotEmpty(killTasks)) { // tasks maybe killed manually return ExecutionStatus.FAILURE; } else { // if the waiting queue is empty and the status is in progress, then success return ExecutionStatus.SUCCESS; } } return state; } /** * whether standby task list have retry tasks */ private boolean retryTaskExists() { boolean result = false; for (Iterator<TaskInstance> iter = readyToSubmitTaskQueue.iterator(); iter.hasNext(); ) { TaskInstance task = iter.next(); if (task.getState().typeIsFailure()) { result = true; break; } } return result; } /** * whether complement end * * @return Boolean whether is complement end */ private boolean isComplementEnd() { if (!processInstance.isComplementData()) { return true; } try { Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); return processInstance.getScheduleTime().equals(endTime); } catch (Exception e) { logger.error("complement end failed ", e); return false; } } /** * updateProcessInstance process instance state * after each batch of tasks is executed, the status of the process instance is updated */ private void updateProcessInstanceState() { ExecutionStatus state = getProcessInstanceState(); if (processInstance.getState() != state) { logger.info( "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", processInstance.getId(), processInstance.getName(), processInstance.getState(), state, processInstance.getCommandType()); ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); instance.setState(state); instance.setProcessDefinition(processInstance.getProcessDefinition()); processService.updateProcessInstance(instance); processInstance = instance; } } /** * get task dependency result * * @param taskInstance task instance * @return DependResult */ private DependResult getDependResultForTask(TaskInstance taskInstance) { return isTaskDepsComplete(taskInstance.getName()); } /** * add task to standby list * * @param taskInstance task instance */ private void addTaskToStandByList(TaskInstance taskInstance) { logger.info("add task to stand by list: {}", taskInstance.getName()); try { readyToSubmitTaskQueue.put(taskInstance); } catch (Exception e) { logger.error("add task instance to readyToSubmitTaskQueue error, taskName: {}", taskInstance.getName(), e); } } /** * remove task from stand by list * * @param taskInstance task instance */ private void removeTaskFromStandbyList(TaskInstance taskInstance) { logger.info("remove task from stand by list: {}", taskInstance.getName()); try { readyToSubmitTaskQueue.remove(taskInstance); } catch (Exception e) { logger.error("remove task instance from readyToSubmitTaskQueue error, taskName: {}", taskInstance.getName(), e); } } /** * has retry task in standby * * @return Boolean whether has retry task in standby */ private boolean hasRetryTaskInStandBy() { for (Iterator<TaskInstance> iter = readyToSubmitTaskQueue.iterator(); iter.hasNext(); ) { if (iter.next().getState().typeIsFailure()) { return true; } } return false; } /** * submit and watch the tasks, until the work flow stop */ private void runProcess() { // submit start node submitPostNode(null); boolean sendTimeWarning = false; while (!processInstance.isProcessInstanceStop() && Stopper.isRunning()) { // send warning email if process time out. if (!sendTimeWarning && checkProcessTimeOut(processInstance)) { alertManager.sendProcessTimeoutAlert(processInstance, processService.findProcessDefineById(processInstance.getProcessDefinitionId())); sendTimeWarning = true; } for (Map.Entry<MasterBaseTaskExecThread, Future<Boolean>> entry : activeTaskNode.entrySet()) { Future<Boolean> future = entry.getValue(); TaskInstance task = entry.getKey().getTaskInstance(); if (!future.isDone()) { continue; } // node monitor thread complete task = this.processService.findTaskInstanceById(task.getId()); if (task == null) { this.taskFailedSubmit = true; activeTaskNode.remove(entry.getKey()); continue; } // node monitor thread complete if (task.getState().typeIsFinished()) { activeTaskNode.remove(entry.getKey()); } logger.info("task :{}, id:{} complete, state is {} ", task.getName(), task.getId(), task.getState()); // node success , post node submit if (task.getState() == ExecutionStatus.SUCCESS) { processInstance = processService.findProcessInstanceById(processInstance.getId()); processInstance.setVarPool(task.getVarPool()); processService.updateProcessInstance(processInstance); completeTaskList.put(task.getName(), task); submitPostNode(task.getName()); continue; } // node fails, retry first, and then execute the failure process if (task.getState().typeIsFailure()) { if (task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE) { this.recoverToleranceFaultTaskList.add(task); } if (task.taskCanRetry()) { addTaskToStandByList(task); } else { completeTaskList.put(task.getName(), task); if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { submitPostNode(task.getName()); } else { errorTaskList.put(task.getName(), task); if (processInstance.getFailureStrategy() == FailureStrategy.END) { killTheOtherTasks(); } } } continue; } // other status stop/pause completeTaskList.put(task.getName(), task); } // send alert if (CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)) { alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList); this.recoverToleranceFaultTaskList.clear(); } // updateProcessInstance completed task status // failure priority is higher than pause // if a task fails, other suspended tasks need to be reset kill // check if there exists forced success nodes in errorTaskList if (errorTaskList.size() > 0) { for (Map.Entry<String, TaskInstance> entry : completeTaskList.entrySet()) { TaskInstance completeTask = entry.getValue(); if (completeTask.getState() == ExecutionStatus.PAUSE) { completeTask.setState(ExecutionStatus.KILL); completeTaskList.put(entry.getKey(), completeTask); processService.updateTaskInstance(completeTask); } } for (Map.Entry<String, TaskInstance> entry : errorTaskList.entrySet()) { TaskInstance errorTask = entry.getValue(); TaskInstance currentTask = processService.findTaskInstanceById(errorTask.getId()); if (currentTask == null) { continue; } // for nodes that have been forced success if (errorTask.getState().typeIsFailure() && currentTask.getState().equals(ExecutionStatus.FORCED_SUCCESS)) { // update state in this thread and remove from errorTaskList errorTask.setState(currentTask.getState()); logger.info("task: {} has been forced success, remove it from error task list", errorTask.getName()); errorTaskList.remove(errorTask.getName()); // submit post nodes submitPostNode(errorTask.getName()); } } } if (canSubmitTaskToQueue()) { submitStandByTask(); } try { Thread.sleep(Constants.SLEEP_TIME_MILLIS); } catch (InterruptedException e) { logger.error(e.getMessage(), e); Thread.currentThread().interrupt(); } updateProcessInstanceState(); } logger.info("process:{} end, state :{}", processInstance.getId(), processInstance.getState()); } /** * whether check process time out * * @param processInstance task instance * @return true if time out of process instance > running time of process instance */ private boolean checkProcessTimeOut(ProcessInstance processInstance) { if (processInstance.getTimeout() == 0) { return false; } Date now = new Date(); long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); return runningTime > processInstance.getTimeout(); } /** * whether can submit task to queue * * @return boolean */ private boolean canSubmitTaskToQueue() { return OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory()); } /** * close the on going tasks */ private void killTheOtherTasks() { logger.info("kill called on process instance id: {}, num: {}", processInstance.getId(), activeTaskNode.size()); for (Map.Entry<MasterBaseTaskExecThread, Future<Boolean>> entry : activeTaskNode.entrySet()) { MasterBaseTaskExecThread taskExecThread = entry.getKey(); Future<Boolean> future = entry.getValue(); TaskInstance taskInstance = taskExecThread.getTaskInstance(); taskInstance = processService.findTaskInstanceById(taskInstance.getId()); if (taskInstance != null && taskInstance.getState().typeIsFinished()) { continue; } if (!future.isDone()) { // record kill info logger.info("kill process instance, id: {}, task: {}", processInstance.getId(), taskExecThread.getTaskInstance().getId()); // kill node taskExecThread.kill(); } } } /** * whether the retry interval is timed out * * @param taskInstance task instance * @return Boolean */ private boolean retryTaskIntervalOverTime(TaskInstance taskInstance) { if (taskInstance.getState() != ExecutionStatus.FAILURE) { return true; } if (taskInstance.getId() == 0 || taskInstance.getMaxRetryTimes() == 0 || taskInstance.getRetryInterval() == 0) { return true; } Date now = new Date(); long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime()); // task retry does not over time, return false return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval; } /** * handling the list of tasks to be submitted */ private void submitStandByTask() { try { int length = readyToSubmitTaskQueue.size(); for (int i = 0; i < length; i++) { TaskInstance task = readyToSubmitTaskQueue.peek(); // stop tasks which is retrying if forced success happens if (task.taskCanRetry()) { TaskInstance retryTask = processService.findTaskInstanceById(task.getId()); if (retryTask != null && retryTask.getState().equals(ExecutionStatus.FORCED_SUCCESS)) { task.setState(retryTask.getState()); logger.info("task: {} has been forced success, put it into complete task list and stop retrying", task.getName()); removeTaskFromStandbyList(task); completeTaskList.put(task.getName(), task); submitPostNode(task.getName()); continue; } } DependResult dependResult = getDependResultForTask(task); if (DependResult.SUCCESS == dependResult) { if (retryTaskIntervalOverTime(task)) { submitTaskExec(task); removeTaskFromStandbyList(task); } } else if (DependResult.FAILED == dependResult) { // if the dependency fails, the current node is not submitted and the state changes to failure. dependFailedTask.put(task.getName(), task); removeTaskFromStandbyList(task); logger.info("task {},id:{} depend result : {}", task.getName(), task.getId(), dependResult); } } } catch (Exception e) { logger.error("submit standby task error", e); } } /** * get recovery task instance * * @param taskId task id * @return recovery task instance */ private TaskInstance getRecoveryTaskInstance(String taskId) { if (!StringUtils.isNotEmpty(taskId)) { return null; } try { Integer intId = Integer.valueOf(taskId); TaskInstance task = processService.findTaskInstanceById(intId); if (task == null) { logger.error("start node id cannot be found: {}", taskId); } else { return task; } } catch (Exception e) { logger.error("get recovery task instance failed ", e); } return null; } /** * get start task instance list * * @param cmdParam command param * @return task instance list */ private List<TaskInstance> getStartTaskInstanceList(String cmdParam) { List<TaskInstance> instanceList = new ArrayList<>(); Map<String, String> paramMap = JSONUtils.toMap(cmdParam); if (paramMap != null && paramMap.containsKey(CMD_PARAM_RECOVERY_START_NODE_STRING)) { String[] idList = paramMap.get(CMD_PARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA); for (String nodeId : idList) { TaskInstance task = getRecoveryTaskInstance(nodeId); if (task != null) { instanceList.add(task); } } } return instanceList; } /** * parse "StartNodeNameList" from cmd param * * @param cmdParam command param * @return start node name list */ private List<String> parseStartNodeName(String cmdParam) { List<String> startNodeNameList = new ArrayList<>(); Map<String, String> paramMap = JSONUtils.toMap(cmdParam); if (paramMap == null) { return startNodeNameList; } if (paramMap.containsKey(CMD_PARAM_START_NODE_NAMES)) { startNodeNameList = Arrays.asList(paramMap.get(CMD_PARAM_START_NODE_NAMES).split(Constants.COMMA)); } return startNodeNameList; } /** * generate start node name list from parsing command param; * if "StartNodeIdList" exists in command param, return StartNodeIdList * * @return recovery node name list */ private List<String> getRecoveryNodeNameList() { List<String> recoveryNodeNameList = new ArrayList<>(); if (CollectionUtils.isNotEmpty(recoverNodeIdList)) { for (TaskInstance task : recoverNodeIdList) { recoveryNodeNameList.add(task.getName()); } } return recoveryNodeNameList; } /** * generate flow dag * * @param processDefinitionJson process definition json * @param startNodeNameList start node name list * @param recoveryNodeNameList recovery node name list * @param depNodeType depend node type * @return ProcessDag process dag * @throws Exception exception */ public ProcessDag generateFlowDag(String processDefinitionJson, List<String> startNodeNameList, List<String> recoveryNodeNameList, TaskDependType depNodeType) throws Exception { return DagHelper.generateFlowDag(processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> </parent> <artifactId>dolphinscheduler-dao</artifactId> <name>${project.artifactId}</name> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> <exclusions> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-to-slf4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-test</artifactId> <scope>test</scope> <exclusions> <exclusion> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> </exclusion> <exclusion> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot</artifactId> </exclusion> <exclusion> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-autoconfigure</artifactId> </exclusion> <exclusion> <artifactId>log4j-api</artifactId> <groupId>org.apache.logging.log4j</groupId> </exclusion> <exclusion> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-tomcat</artifactId> </exclusion> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-to-slf4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <exclusions> <exclusion> <artifactId>protobuf-java</artifactId> <groupId>com.google.protobuf</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.yaml</groupId> <artifactId>snakeyaml</artifactId> </dependency> </dependencies> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.SchemaUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; /** * upgrade manager */ public class DolphinSchedulerManager { private static final Logger logger = LoggerFactory.getLogger(DolphinSchedulerManager.class); UpgradeDao upgradeDao; /** * init upgrade dao */ private void initUpgradeDao() { DbType dbType = UpgradeDao.getDbType(); if (dbType != null) { switch (dbType) { case MYSQL: upgradeDao = MysqlUpgradeDao.getInstance(); break; case POSTGRESQL: upgradeDao = PostgresqlUpgradeDao.getInstance(); break; default: logger.error("not support sql type: {},can't upgrade", dbType); throw new IllegalArgumentException("not support sql type,can't upgrade"); } } } /** * constructor init */ public DolphinSchedulerManager() { initUpgradeDao(); } /** * init DolphinScheduler */ public void initDolphinScheduler() { // Determines whether the dolphinscheduler table structure has been init if (upgradeDao.isExistsTable("t_escheduler_version") || upgradeDao.isExistsTable("t_ds_version") || upgradeDao.isExistsTable("t_escheduler_queue")) { logger.info("The database has been initialized. Skip the initialization step"); return; } this.initDolphinSchedulerSchema(); } /** * init DolphinScheduler Schema */ public void initDolphinSchedulerSchema() { logger.info("Start initializing the DolphinScheduler manager table structure"); upgradeDao.initSchema(); } /** * upgrade DolphinScheduler * @throws Exception if error throws Exception */ public void upgradeDolphinScheduler() throws Exception{ // Gets a list of all upgrades List<String> schemaList = SchemaUtils.getAllSchemaList(); if(schemaList == null || schemaList.size() == 0) { logger.info("There is no schema to upgrade!"); }else { String version = ""; // Gets the version of the current system if (upgradeDao.isExistsTable("t_escheduler_version")) { version = upgradeDao.getCurrentVersion("t_escheduler_version"); }else if(upgradeDao.isExistsTable("t_ds_version")){ version = upgradeDao.getCurrentVersion("t_ds_version"); }else if(upgradeDao.isExistsColumn("t_escheduler_queue","create_time")){ version = "1.0.1"; }else if(upgradeDao.isExistsTable("t_escheduler_queue")){ version = "1.0.0"; }else{ logger.error("Unable to determine current software version, so cannot upgrade"); throw new RuntimeException("Unable to determine current software version, so cannot upgrade"); } // The target version of the upgrade String schemaVersion = ""; for(String schemaDir : schemaList) { schemaVersion = schemaDir.split("_")[0]; if(SchemaUtils.isAGreatVersion(schemaVersion , version)) { logger.info("upgrade DolphinScheduler metadata version from {} to {}", version, schemaVersion); logger.info("Begin upgrading DolphinScheduler's table structure"); upgradeDao.upgradeDolphinScheduler(schemaDir); if ("1.3.0".equals(schemaVersion)) { upgradeDao.upgradeDolphinSchedulerWorkerGroup(); } else if ("1.3.2".equals(schemaVersion)) { upgradeDao.upgradeDolphinSchedulerResourceList(); } version = schemaVersion; } } } // Assign the value of the version field in the version table to the version of the product upgradeDao.updateVersion(SchemaUtils.getSoftVersion()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.AbstractBaseDao; import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.sql.DataSource; import java.io.*; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.text.MessageFormat; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; public abstract class UpgradeDao extends AbstractBaseDao { public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); private static final String T_VERSION_NAME = "t_escheduler_version"; private static final String T_NEW_VERSION_NAME = "t_ds_version"; private static final String rootDir = System.getProperty("user.dir"); protected static final DataSource dataSource = getDataSource(); private static final DbType dbType = getCurrentDbType(); @Override protected void init() { } /** * get datasource * @return DruidDataSource */ public static DataSource getDataSource(){ return ConnectionFactory.getInstance().getDataSource(); } /** * get db type * @return dbType */ public static DbType getDbType(){ return dbType; } /** * get current dbType * @return */ private static DbType getCurrentDbType(){ Connection conn = null; try { conn = dataSource.getConnection(); String name = conn.getMetaData().getDatabaseProductName().toUpperCase(); return DbType.valueOf(name); } catch (Exception e) { logger.error(e.getMessage(),e); return null; }finally { ConnectionUtils.releaseResource(conn); } } /** * init schema */ public void initSchema() { DbType dbType = getDbType(); String initSqlPath = ""; if (dbType != null) { switch (dbType) { case MYSQL: initSqlPath = "/sql/create/release-1.0.0_schema/mysql/"; initSchema(initSqlPath); break; case POSTGRESQL: initSqlPath = "/sql/create/release-1.2.0_schema/postgresql/"; initSchema(initSqlPath); break; default: logger.error("not support sql type: {},can't upgrade", dbType); throw new IllegalArgumentException("not support sql type,can't upgrade"); } } } /** * init scheam * * @param initSqlPath initSqlPath */ public void initSchema(String initSqlPath) { // Execute the dolphinscheduler DDL, it cannot be rolled back runInitDDL(initSqlPath); // Execute the dolphinscheduler DML, it can be rolled back runInitDML(initSqlPath); } /** * run DML * * @param initSqlPath initSqlPath */ private void runInitDML(String initSqlPath) { Connection conn = null; if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_dml.sql"; try { conn = dataSource.getConnection(); conn.setAutoCommit(false); // Execute the dolphinscheduler_dml.sql script to import related data of dolphinscheduler ScriptRunner initScriptRunner = new ScriptRunner(conn, false, true); Reader initSqlReader = new FileReader(new File(mysqlSQLFilePath)); initScriptRunner.runScript(initSqlReader); conn.commit(); } catch (IOException e) { try { conn.rollback(); } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(conn); } } /** * run DDL * * @param initSqlPath initSqlPath */ private void runInitDDL(String initSqlPath) { Connection conn = null; if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } //String mysqlSQLFilePath = rootDir + "/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql"; String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_ddl.sql"; try { conn = dataSource.getConnection(); // Execute the dolphinscheduler_ddl.sql script to create the table structure of dolphinscheduler ScriptRunner initScriptRunner = new ScriptRunner(conn, true, true); Reader initSqlReader = new FileReader(new File(mysqlSQLFilePath)); initScriptRunner.runScript(initSqlReader); } catch (IOException e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(conn); } } /** * determines whether a table exists * * @param tableName tableName * @return if table exist return true,else return false */ public abstract boolean isExistsTable(String tableName); /** * determines whether a field exists in the specified table * * @param tableName tableName * @param columnName columnName * @return if column name exist return true,else return false */ public abstract boolean isExistsColumn(String tableName, String columnName); /** * get current version * * @param versionName versionName * @return version */ public String getCurrentVersion(String versionName) { String sql = String.format("select version from %s", versionName); Connection conn = null; ResultSet rs = null; PreparedStatement pstmt = null; String version = null; try { conn = dataSource.getConnection(); pstmt = conn.prepareStatement(sql); rs = pstmt.executeQuery(); if (rs.next()) { version = rs.getString(1); } return version; } catch (SQLException e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } finally { ConnectionUtils.releaseResource(rs, pstmt, conn); } } /** * upgrade DolphinScheduler * * @param schemaDir schema dir */ public void upgradeDolphinScheduler(String schemaDir) { upgradeDolphinSchedulerDDL(schemaDir); upgradeDolphinSchedulerDML(schemaDir); } /** * upgrade DolphinScheduler worker group * ds-1.3.0 modify the worker group for process definition json */ public void upgradeDolphinSchedulerWorkerGroup() { updateProcessDefinitionJsonWorkerGroup(); } /** * upgrade DolphinScheduler resource list * ds-1.3.2 modify the resource list for process definition json */ public void upgradeDolphinSchedulerResourceList() { updateProcessDefinitionJsonResourceList(); } /** * updateProcessDefinitionJsonWorkerGroup */ protected void updateProcessDefinitionJsonWorkerGroup() { WorkerGroupDao workerGroupDao = new WorkerGroupDao(); ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>(); try { Map<Integer, String> oldWorkerGroupMap = workerGroupDao.queryAllOldWorkerGroup(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) { ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); for (int i = 0; i < tasks.size(); i++) { ObjectNode task = (ObjectNode) tasks.path(i); ObjectNode workerGroupNode = (ObjectNode) task.path("workerGroupId"); Integer workerGroupId = -1; if (workerGroupNode != null && workerGroupNode.canConvertToInt()) { workerGroupId = workerGroupNode.asInt(-1); } if (workerGroupId == -1) { task.put("workerGroup", "default"); } else { task.put("workerGroup", oldWorkerGroupMap.get(workerGroupId)); } } jsonObject.remove("task"); jsonObject.put("tasks", tasks); replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); } if (replaceProcessDefinitionMap.size() > 0) { processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); } } catch (Exception e) { logger.error("update process definition json workergroup error", e); } } /** * updateProcessDefinitionJsonResourceList */ protected void updateProcessDefinitionJsonResourceList() { ResourceDao resourceDao = new ResourceDao(); ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>(); try { Map<String, Integer> resourcesMap = resourceDao.listAllResources(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) { ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); for (int i = 0; i < tasks.size(); i++) { ObjectNode task = (ObjectNode) tasks.get(i); ObjectNode param = (ObjectNode) task.get("params"); if (param != null) { List<ResourceInfo> resourceList = JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class); ResourceInfo mainJar = JSONUtils.parseObject(param.get("mainJar").toString(), ResourceInfo.class); if (mainJar != null && mainJar.getId() == 0) { String fullName = mainJar.getRes().startsWith("/") ? mainJar.getRes() : String.format("/%s", mainJar.getRes()); if (resourcesMap.containsKey(fullName)) { mainJar.setId(resourcesMap.get(fullName)); param.put("mainJar", JSONUtils.parseObject(JSONUtils.toJsonString(mainJar))); } } if (CollectionUtils.isNotEmpty(resourceList)) { List<ResourceInfo> newResourceList = resourceList.stream().map(resInfo -> { String fullName = resInfo.getRes().startsWith("/") ? resInfo.getRes() : String.format("/%s", resInfo.getRes()); if (resInfo.getId() == 0 && resourcesMap.containsKey(fullName)) { resInfo.setId(resourcesMap.get(fullName)); } return resInfo; }).collect(Collectors.toList()); param.put("resourceList", JSONUtils.parseObject(JSONUtils.toJsonString(newResourceList))); } } task.put("params", param); } jsonObject.remove("tasks"); jsonObject.put("tasks", tasks); replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); } if (replaceProcessDefinitionMap.size() > 0) { processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); } } catch (Exception e) { logger.error("update process definition json resource list error", e); } } /** * upgradeDolphinScheduler DML * * @param schemaDir schemaDir */ private void upgradeDolphinSchedulerDML(String schemaDir) { String schemaVersion = schemaDir.split("_")[0]; if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); logger.info("sqlSQLFilePath" + sqlFilePath); Connection conn = null; PreparedStatement pstmt = null; try { conn = dataSource.getConnection(); conn.setAutoCommit(false); // Execute the upgraded dolphinscheduler dml ScriptRunner scriptRunner = new ScriptRunner(conn, false, true); Reader sqlReader = new FileReader(new File(sqlFilePath)); scriptRunner.runScript(sqlReader); if (isExistsTable(T_VERSION_NAME)) { // Change version in the version table to the new version String upgradeSQL = String.format("update %s set version = ?", T_VERSION_NAME); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, schemaVersion); pstmt.executeUpdate(); } else if (isExistsTable(T_NEW_VERSION_NAME)) { // Change version in the version table to the new version String upgradeSQL = String.format("update %s set version = ?", T_NEW_VERSION_NAME); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, schemaVersion); pstmt.executeUpdate(); } conn.commit(); } catch (FileNotFoundException e) { try { conn.rollback(); } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException("sql file not found ", e); } catch (IOException e) { try { conn.rollback(); } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (SQLException e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } } /** * upgradeDolphinScheduler DDL * * @param schemaDir schemaDir */ private void upgradeDolphinSchedulerDDL(String schemaDir) { if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); Connection conn = null; PreparedStatement pstmt = null; try { conn = dataSource.getConnection(); String dbName = conn.getCatalog(); logger.info(dbName); conn.setAutoCommit(true); // Execute the dolphinscheduler ddl.sql for the upgrade ScriptRunner scriptRunner = new ScriptRunner(conn, true, true); Reader sqlReader = new FileReader(new File(sqlFilePath)); scriptRunner.runScript(sqlReader); } catch (FileNotFoundException e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql file not found ", e); } catch (IOException e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (SQLException e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } } /** * update version * * @param version version */ public void updateVersion(String version) { // Change version in the version table to the new version String versionName = T_VERSION_NAME; if (!SchemaUtils.isAGreatVersion("1.2.0", version)) { versionName = "t_ds_version"; } String upgradeSQL = String.format("update %s set version = ?", versionName); PreparedStatement pstmt = null; Connection conn = null; try { conn = dataSource.getConnection(); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, version); pstmt.executeUpdate(); } catch (SQLException e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql: " + upgradeSQL, e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * create DolphinScheduler * */ public class CreateDolphinScheduler { private static final Logger logger = LoggerFactory.getLogger(CreateDolphinScheduler.class); /** * create dolphin scheduler db * @param args args */ public static void main(String[] args) { DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); try { dolphinSchedulerManager.initDolphinScheduler(); logger.info("init DolphinScheduler finished"); dolphinSchedulerManager.upgradeDolphinScheduler(); logger.info("upgrade DolphinScheduler finished"); logger.info("create DolphinScheduler success"); } catch (Exception e) { logger.error("create DolphinScheduler failed",e); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * init DolphinScheduler * */ public class InitDolphinScheduler { private static final Logger logger = LoggerFactory.getLogger(InitDolphinScheduler.class); /** * init dolphin scheduler db * @param args args */ public static void main(String[] args) { Thread.currentThread().setName("manager-InitDolphinScheduler"); DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); dolphinSchedulerManager.initDolphinScheduler(); logger.info("init DolphinScheduler finished"); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDaoTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade; import org.junit.Test; import javax.sql.DataSource; import java.util.Map; import static org.apache.dolphinscheduler.dao.upgrade.UpgradeDao.getDataSource; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.junit.Assert.assertThat; public class UpgradeDaoTest { PostgresqlUpgradeDao postgresqlUpgradeDao = PostgresqlUpgradeDao.getInstance(); @Test public void testQueryQueryAllOldWorkerGroup() throws Exception{ postgresqlUpgradeDao.updateProcessDefinitionJsonWorkerGroup(); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinSchedulerTest.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinSchedulerTest.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,357
[Improvement][DAO] CreateDolphinScheduler
**Describe the question** When we run `sh script/create-dolphinscheduler.sh` to init the dolphinscheduler database environment, it will execute the old script in `sql/create` and upgrade to latest. It is better to execute the new sql script. **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** Change the sql init file to `sql/dolphinscheduler_mysql.sql`
https://github.com/apache/dolphinscheduler/issues/5357
https://github.com/apache/dolphinscheduler/pull/5358
75d1ad645c994feadf157fb2343141b3d9ec49bf
b108ac43b8489ad1d744afd0321d4ec150880548
"2021-04-22T11:13:59Z"
java
"2021-04-23T14:03:47Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.18.RELEASE</spring.version> <spring.boot.version>2.1.17.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.2.5</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/task/sql/SqlTaskTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/DingTalkUtilsTest.java</include> <include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> </parent> <artifactId>dolphinscheduler-dao</artifactId> <name>${project.artifactId}</name> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> <exclusions> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-to-slf4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-test</artifactId> <scope>test</scope> <exclusions> <exclusion> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> </exclusion> <exclusion> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot</artifactId> </exclusion> <exclusion> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-autoconfigure</artifactId> </exclusion> <exclusion> <artifactId>log4j-api</artifactId> <groupId>org.apache.logging.log4j</groupId> </exclusion> <exclusion> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-tomcat</artifactId> </exclusion> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-to-slf4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <exclusions> <exclusion> <artifactId>protobuf-java</artifactId> <groupId>com.google.protobuf</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.yaml</groupId> <artifactId>snakeyaml</artifactId> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> </dependencies> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.SchemaUtils; import java.io.IOException; import java.sql.SQLException; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * upgrade manager */ public class DolphinSchedulerManager { private static final Logger logger = LoggerFactory.getLogger(DolphinSchedulerManager.class); UpgradeDao upgradeDao; /** * init upgrade dao */ private void initUpgradeDao() { DbType dbType = UpgradeDao.getDbType(); if (dbType != null) { switch (dbType) { case MYSQL: upgradeDao = MysqlUpgradeDao.getInstance(); break; case POSTGRESQL: upgradeDao = PostgresqlUpgradeDao.getInstance(); break; default: logger.error("not support sql type: {},can't upgrade", dbType); throw new IllegalArgumentException("not support sql type,can't upgrade"); } } } /** * constructor init */ public DolphinSchedulerManager() { initUpgradeDao(); } /** * init DolphinScheduler */ public void initDolphinScheduler() throws SQLException, IOException { // Determines whether the dolphinscheduler table structure has been init if (upgradeDao.isExistsTable("t_escheduler_version") || upgradeDao.isExistsTable("t_ds_version") || upgradeDao.isExistsTable("t_escheduler_queue")) { logger.info("The database has been initialized. Skip the initialization step"); return; } this.initDolphinSchedulerSchema(); } /** * init DolphinScheduler Schema */ public void initDolphinSchedulerSchema() throws SQLException, IOException { logger.info("Start initializing the DolphinScheduler manager table structure"); upgradeDao.initSchema(); } /** * upgrade DolphinScheduler * @throws Exception if error throws Exception */ public void upgradeDolphinScheduler() throws Exception{ // Gets a list of all upgrades List<String> schemaList = SchemaUtils.getAllSchemaList(); if(schemaList == null || schemaList.size() == 0) { logger.info("There is no schema to upgrade!"); }else { String version = ""; // Gets the version of the current system if (upgradeDao.isExistsTable("t_escheduler_version")) { version = upgradeDao.getCurrentVersion("t_escheduler_version"); }else if(upgradeDao.isExistsTable("t_ds_version")){ version = upgradeDao.getCurrentVersion("t_ds_version"); }else if(upgradeDao.isExistsColumn("t_escheduler_queue","create_time")){ version = "1.0.1"; }else if(upgradeDao.isExistsTable("t_escheduler_queue")){ version = "1.0.0"; }else{ logger.error("Unable to determine current software version, so cannot upgrade"); throw new RuntimeException("Unable to determine current software version, so cannot upgrade"); } // The target version of the upgrade String schemaVersion = ""; for(String schemaDir : schemaList) { schemaVersion = schemaDir.split("_")[0]; if(SchemaUtils.isAGreatVersion(schemaVersion , version)) { logger.info("upgrade DolphinScheduler metadata version from {} to {}", version, schemaVersion); logger.info("Begin upgrading DolphinScheduler's table structure"); upgradeDao.upgradeDolphinScheduler(schemaDir); if ("1.3.0".equals(schemaVersion)) { upgradeDao.upgradeDolphinSchedulerWorkerGroup(); } else if ("1.3.2".equals(schemaVersion)) { upgradeDao.upgradeDolphinSchedulerResourceList(); } version = schemaVersion; } } } // Assign the value of the version field in the version table to the version of the product upgradeDao.updateVersion(SchemaUtils.getSoftVersion()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.ConnectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.SchemaUtils; import org.apache.dolphinscheduler.common.utils.ScriptRunner; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.AbstractBaseDao; import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.Reader; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.text.MessageFormat; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; public abstract class UpgradeDao extends AbstractBaseDao { public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); private static final String T_VERSION_NAME = "t_escheduler_version"; private static final String T_NEW_VERSION_NAME = "t_ds_version"; private static final String rootDir = System.getProperty("user.dir"); protected static final DataSource dataSource = getDataSource(); private static final DbType dbType = getCurrentDbType(); private static final String MYSQL_CREATE_SCRIPT = rootDir + "/sql/dolphinscheduler_mysql.sql"; private static final String POSTGRE_CREATE_SCRIPT = rootDir + "/sql/dolphinscheduler_postgre.sql"; @Override protected void init() { } /** * get datasource * @return DruidDataSource */ public static DataSource getDataSource(){ return ConnectionFactory.getInstance().getDataSource(); } /** * get db type * @return dbType */ public static DbType getDbType(){ return dbType; } /** * get current dbType * @return */ private static DbType getCurrentDbType(){ Connection conn = null; try { conn = dataSource.getConnection(); String name = conn.getMetaData().getDatabaseProductName().toUpperCase(); return DbType.valueOf(name); } catch (Exception e) { logger.error(e.getMessage(),e); return null; }finally { ConnectionUtils.releaseResource(conn); } } /** * init schema */ public void initSchema() throws SQLException, IOException { DbType dbType = getDbType(); String initSqlPath = ""; if (dbType != null) { switch (dbType) { case MYSQL: initSqlPath = MYSQL_CREATE_SCRIPT; break; case POSTGRESQL: initSqlPath = POSTGRE_CREATE_SCRIPT; break; default: logger.error("not support sql type: {},can't upgrade", dbType); throw new IllegalArgumentException("not support sql type,can't upgrade"); } } if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } logger.info("Init sql filePath: {}", initSqlPath); try (Connection conn = dataSource.getConnection()) { try { conn.setAutoCommit(false); ScriptRunner initScriptRunner = new ScriptRunner(conn, false, true); Reader initSqlReader = new FileReader(initSqlPath); initScriptRunner.runScript(initSqlReader); conn.commit(); } catch (IOException | SQLException e) { conn.rollback(); logger.error("execute init script error.", e); throw e; } } finally { // ignore } } /** * determines whether a table exists * * @param tableName tableName * @return if table exist return true,else return false */ public abstract boolean isExistsTable(String tableName); /** * determines whether a field exists in the specified table * * @param tableName tableName * @param columnName columnName * @return if column name exist return true,else return false */ public abstract boolean isExistsColumn(String tableName, String columnName); /** * get current version * * @param versionName versionName * @return version */ public String getCurrentVersion(String versionName) { String sql = String.format("select version from %s", versionName); Connection conn = null; ResultSet rs = null; PreparedStatement pstmt = null; String version = null; try { conn = dataSource.getConnection(); pstmt = conn.prepareStatement(sql); rs = pstmt.executeQuery(); if (rs.next()) { version = rs.getString(1); } return version; } catch (SQLException e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } finally { ConnectionUtils.releaseResource(rs, pstmt, conn); } } /** * upgrade DolphinScheduler * * @param schemaDir schema dir */ public void upgradeDolphinScheduler(String schemaDir) { upgradeDolphinSchedulerDDL(schemaDir); upgradeDolphinSchedulerDML(schemaDir); } /** * upgrade DolphinScheduler worker group * ds-1.3.0 modify the worker group for process definition json */ public void upgradeDolphinSchedulerWorkerGroup() { updateProcessDefinitionJsonWorkerGroup(); } /** * upgrade DolphinScheduler resource list * ds-1.3.2 modify the resource list for process definition json */ public void upgradeDolphinSchedulerResourceList() { updateProcessDefinitionJsonResourceList(); } /** * updateProcessDefinitionJsonWorkerGroup */ protected void updateProcessDefinitionJsonWorkerGroup() { WorkerGroupDao workerGroupDao = new WorkerGroupDao(); ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>(); try { Map<Integer, String> oldWorkerGroupMap = workerGroupDao.queryAllOldWorkerGroup(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) { ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); for (int i = 0; i < tasks.size(); i++) { ObjectNode task = (ObjectNode) tasks.path(i); ObjectNode workerGroupNode = (ObjectNode) task.path("workerGroupId"); Integer workerGroupId = -1; if (workerGroupNode != null && workerGroupNode.canConvertToInt()) { workerGroupId = workerGroupNode.asInt(-1); } if (workerGroupId == -1) { task.put("workerGroup", "default"); } else { task.put("workerGroup", oldWorkerGroupMap.get(workerGroupId)); } } jsonObject.remove("task"); jsonObject.put("tasks", tasks); replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); } if (replaceProcessDefinitionMap.size() > 0) { processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); } } catch (Exception e) { logger.error("update process definition json workergroup error", e); } } /** * updateProcessDefinitionJsonResourceList */ protected void updateProcessDefinitionJsonResourceList() { ResourceDao resourceDao = new ResourceDao(); ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); Map<Integer, String> replaceProcessDefinitionMap = new HashMap<>(); try { Map<String, Integer> resourcesMap = resourceDao.listAllResources(dataSource.getConnection()); Map<Integer, String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); for (Map.Entry<Integer, String> entry : processDefinitionJsonMap.entrySet()) { ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); for (int i = 0; i < tasks.size(); i++) { ObjectNode task = (ObjectNode) tasks.get(i); ObjectNode param = (ObjectNode) task.get("params"); if (param != null) { List<ResourceInfo> resourceList = JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class); ResourceInfo mainJar = JSONUtils.parseObject(param.get("mainJar").toString(), ResourceInfo.class); if (mainJar != null && mainJar.getId() == 0) { String fullName = mainJar.getRes().startsWith("/") ? mainJar.getRes() : String.format("/%s", mainJar.getRes()); if (resourcesMap.containsKey(fullName)) { mainJar.setId(resourcesMap.get(fullName)); param.put("mainJar", JSONUtils.parseObject(JSONUtils.toJsonString(mainJar))); } } if (CollectionUtils.isNotEmpty(resourceList)) { List<ResourceInfo> newResourceList = resourceList.stream().map(resInfo -> { String fullName = resInfo.getRes().startsWith("/") ? resInfo.getRes() : String.format("/%s", resInfo.getRes()); if (resInfo.getId() == 0 && resourcesMap.containsKey(fullName)) { resInfo.setId(resourcesMap.get(fullName)); } return resInfo; }).collect(Collectors.toList()); param.put("resourceList", JSONUtils.parseObject(JSONUtils.toJsonString(newResourceList))); } } task.put("params", param); } jsonObject.remove("tasks"); jsonObject.put("tasks", tasks); replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); } if (replaceProcessDefinitionMap.size() > 0) { processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); } } catch (Exception e) { logger.error("update process definition json resource list error", e); } } /** * upgradeDolphinScheduler DML * * @param schemaDir schemaDir */ private void upgradeDolphinSchedulerDML(String schemaDir) { String schemaVersion = schemaDir.split("_")[0]; if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); logger.info("sqlSQLFilePath" + sqlFilePath); Connection conn = null; PreparedStatement pstmt = null; try { conn = dataSource.getConnection(); conn.setAutoCommit(false); // Execute the upgraded dolphinscheduler dml ScriptRunner scriptRunner = new ScriptRunner(conn, false, true); Reader sqlReader = new FileReader(new File(sqlFilePath)); scriptRunner.runScript(sqlReader); if (isExistsTable(T_VERSION_NAME)) { // Change version in the version table to the new version String upgradeSQL = String.format("update %s set version = ?", T_VERSION_NAME); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, schemaVersion); pstmt.executeUpdate(); } else if (isExistsTable(T_NEW_VERSION_NAME)) { // Change version in the version table to the new version String upgradeSQL = String.format("update %s set version = ?", T_NEW_VERSION_NAME); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, schemaVersion); pstmt.executeUpdate(); } conn.commit(); } catch (FileNotFoundException e) { try { conn.rollback(); } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException("sql file not found ", e); } catch (IOException e) { try { conn.rollback(); } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (SQLException e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { logger.error(e1.getMessage(), e1); } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } } /** * upgradeDolphinScheduler DDL * * @param schemaDir schemaDir */ private void upgradeDolphinSchedulerDDL(String schemaDir) { if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); Connection conn = null; PreparedStatement pstmt = null; try { conn = dataSource.getConnection(); String dbName = conn.getCatalog(); logger.info(dbName); conn.setAutoCommit(true); // Execute the dolphinscheduler ddl.sql for the upgrade ScriptRunner scriptRunner = new ScriptRunner(conn, true, true); Reader sqlReader = new FileReader(new File(sqlFilePath)); scriptRunner.runScript(sqlReader); } catch (FileNotFoundException e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql file not found ", e); } catch (IOException e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (SQLException e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } } /** * update version * * @param version version */ public void updateVersion(String version) { // Change version in the version table to the new version String versionName = T_VERSION_NAME; if (!SchemaUtils.isAGreatVersion("1.2.0", version)) { versionName = "t_ds_version"; } String upgradeSQL = String.format("update %s set version = ?", versionName); PreparedStatement pstmt = null; Connection conn = null; try { conn = dataSource.getConnection(); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, version); pstmt.executeUpdate(); } catch (SQLException e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql: " + upgradeSQL, e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * create DolphinScheduler */ public class CreateDolphinScheduler { private static final Logger logger = LoggerFactory.getLogger(CreateDolphinScheduler.class); /** * create dolphin scheduler db * * @param args args */ public static void main(String[] args) { DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); try { logger.info("create DolphinScheduler begin"); dolphinSchedulerManager.initDolphinScheduler(); logger.info("create DolphinScheduler success"); } catch (Exception e) { logger.error("create DolphinScheduler failed", e); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * init DolphinScheduler */ public class InitDolphinScheduler { private static final Logger logger = LoggerFactory.getLogger(InitDolphinScheduler.class); /** * init dolphin scheduler db * @param args args */ public static void main(String[] args) { Thread.currentThread().setName("manager-InitDolphinScheduler"); DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); try { dolphinSchedulerManager.initDolphinScheduler(); logger.info("init DolphinScheduler finished"); } catch (Exception ex) { logger.error("init DolphinScheduler error", ex); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDaoTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade; import org.apache.dolphinscheduler.common.utils.ScriptRunner; import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import java.io.FileReader; import java.io.IOException; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.sql.DataSource; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest({ConnectionFactory.class, ScriptRunner.class, FileReader.class}) public class UpgradeDaoTest { @Test public void testGetCurrentVersion() throws SQLException { PowerMockito.mockStatic(ConnectionFactory.class); ConnectionFactory mockConnectionFactory = PowerMockito.mock(ConnectionFactory.class); PowerMockito.when(ConnectionFactory.getInstance()).thenReturn(mockConnectionFactory); DataSource mockDatasource = PowerMockito.mock(DataSource.class); PowerMockito.when(mockConnectionFactory.getDataSource()).thenReturn(mockDatasource); Connection mockConnection = PowerMockito.mock(Connection.class); PowerMockito.when(mockDatasource.getConnection()).thenReturn(mockConnection); PreparedStatement mockPrepareStatement = PowerMockito.mock(PreparedStatement.class); PowerMockito.when(mockConnection.prepareStatement(Mockito.any())).thenReturn(mockPrepareStatement); ResultSet mockResultSet = PowerMockito.mock(ResultSet.class); PowerMockito.when(mockPrepareStatement.executeQuery()).thenReturn(mockResultSet); DatabaseMetaData mockMetaData = PowerMockito.mock(DatabaseMetaData.class); PowerMockito.when(mockConnection.getMetaData()).thenReturn(mockMetaData); PowerMockito.when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); UpgradeDao upgradeDao = MysqlUpgradeDao.getInstance(); upgradeDao.getCurrentVersion("xx"); Assert.assertTrue(true); } @Test(expected = IOException.class) public void testInitSchema() throws Exception { PowerMockito.mockStatic(ConnectionFactory.class); ConnectionFactory mockConnectionFactory = PowerMockito.mock(ConnectionFactory.class); PowerMockito.when(ConnectionFactory.getInstance()).thenReturn(mockConnectionFactory); DataSource mockDatasource = PowerMockito.mock(DataSource.class); PowerMockito.when(mockConnectionFactory.getDataSource()).thenReturn(mockDatasource); Connection mockConnection = PowerMockito.mock(Connection.class); PowerMockito.when(mockDatasource.getConnection()).thenReturn(mockConnection); DatabaseMetaData mockMetaData = PowerMockito.mock(DatabaseMetaData.class); PowerMockito.when(mockConnection.getMetaData()).thenReturn(mockMetaData); PowerMockito.when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); UpgradeDao upgradeDao = MysqlUpgradeDao.getInstance(); upgradeDao.initSchema(); Assert.assertTrue(true); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinSchedulerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest({DolphinSchedulerManager.class, CreateDolphinScheduler.class}) public class CreateDolphinSchedulerTest { @Test public void mainTest() throws Exception { DolphinSchedulerManager mockManager = PowerMockito.mock(DolphinSchedulerManager.class); PowerMockito.whenNew(DolphinSchedulerManager.class).withNoArguments().thenReturn(mockManager); CreateDolphinScheduler.main(null); Assert.assertTrue(true); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinSchedulerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest({DolphinSchedulerManager.class, InitDolphinScheduler.class}) public class InitDolphinSchedulerTest { @Test public void main() throws Exception { DolphinSchedulerManager mockManager = PowerMockito.mock(DolphinSchedulerManager.class); PowerMockito.whenNew(DolphinSchedulerManager.class).withNoArguments().thenReturn(mockManager); InitDolphinScheduler.main(null); Assert.assertTrue(true); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,381
[Bug][API] Upload file failure with PostgreSQL
**To Reproduce** Steps to reproduce the behavior, for example: 1. Upload file in `Resource Manage` 2. See error **Expected behavior** Bug fixed **Screenshots** If applicable, add screenshots to help explain your problem. ![image](https://user-images.githubusercontent.com/4902714/115995032-28d81400-a60c-11eb-9f4a-4cdeac626d79.png) **API logs:** ``` [INFO] 2021-04-25 21:14:51.550 org.apache.dolphinscheduler.api.controller.ResourcesController:[157] - login user admin, create resource, type: FILE, resource alias: license.lic, desc: , file: file,license.lic [ERROR] 2021-04-25 21:14:51.711 org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl:[233] - resource already exists, can't recreate org.springframework.jdbc.BadSqlGrammarException: ### Error updating database. Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java (best guess) ### The error may involve org.apache.dolphinscheduler.dao.mapper.ResourceMapper.insert-Inline ### The error occurred while setting parameters ### SQL: INSERT INTO t_ds_resources ( file_name, size, create_time, description, full_name, alias, update_time, pid, type, user_id, is_directory ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ### Cause: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy97.insert(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.insert(SqlSessionTemplate.java:271) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:58) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy111.insert(Unknown Source) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:222) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) Caused by: org.postgresql.util.PSQLException: ERROR: column "is_directory" is of type integer but expression is of type boolean Hint: You will need to rewrite or cast the expression. Position: 192 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.update(PreparedStatementHandler.java:47) at org.apache.ibatis.executor.statement.RoutingStatementHandler.update(RoutingStatementHandler.java:74) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy159.update(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doUpdate(MybatisSimpleExecutor.java:54) at org.apache.ibatis.executor.BaseExecutor.update(BaseExecutor.java:117) at org.apache.ibatis.session.defaults.DefaultSqlSession.update(DefaultSqlSession.java:197) at org.apache.ibatis.session.defaults.DefaultSqlSession.insert(DefaultSqlSession.java:184) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 93 common frames omitted [ERROR] 2021-04-25 21:14:51.720 org.apache.dolphinscheduler.api.exceptions.ApiExceptionHandler:[46] - 创建资源错误 org.apache.dolphinscheduler.api.exceptions.ServiceException: resource already exists, can't recreate at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl.createResource(ResourcesServiceImpl.java:234) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$FastClassBySpringCGLIB$$d2d5d7f6.invoke(<generated>) at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:752) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:295) at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98) at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) at org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl$$EnhancerBySpringCGLIB$$22f4defc.createResource(<generated>) at org.apache.dolphinscheduler.api.controller.ResourcesController.createResource(ResourcesController.java:159) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190) at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138) at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:892) at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:797) at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87) at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040) at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943) at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006) at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909) at javax.servlet.http.HttpServlet.service(HttpServlet.java:707) at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883) at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) at org.eclipse.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1633) at org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:97) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.SecurityBasicAuthFilter.doFilter(SecurityBasicAuthFilter.java:84) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at com.github.xiaoymin.swaggerbootstrapui.filter.ProductionSecurityFilter.doFilter(ProductionSecurityFilter.java:53) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:94) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201) at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119) at org.eclipse.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) at org.eclipse.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1609) at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:561) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:143) at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:602) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:235) at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1612) at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1434) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501) at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1582) at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1349) at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:766) at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) at org.eclipse.jetty.server.Server.handle(Server.java:516) at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383) at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:556) at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375) at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:273) at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171) at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129) at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375) at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:773) at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:905) at java.lang.Thread.run(Thread.java:748) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5381
https://github.com/apache/dolphinscheduler/pull/5383
e6d8da484f8d3eff528b206ab4b159b451865df3
6b8461e901de65000f4f8f34ebfe011946e787d3
"2021-04-25T13:21:43Z"
java
"2021-04-25T15:29:00Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.19.RELEASE</spring.version> <spring.boot.version>2.1.18.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.2.5</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java</include> <include>**/common/datasource/db2/Db2DatasourceProcessorTest.java</include> <include>**/common/datasource/hive/HiveDatasourceProcessorTest.java</include> <include>**/common/datasource/mysql/MysqlDatasourceProcessorTest.java</include> <include>**/common/datasource/oracle/OracleDatasourceProcessorTest.java</include> <include>**/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java</include> <include>**/common/datasource/presto/PrestoDatasourceProcessorTest.java</include> <include>**/common/datasource/spark/SparkDatasourceProcessorTest.java</include> <include>**/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java</include> <include>**/common/datasource/DatasourceUtilTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/dao/upgrade/shell/CreateDolphinSchedulerTest.java</include> <include>**/dao/upgrade/shell/InitDolphinSchedulerTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/task/sql/SqlTaskTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/DingTalkUtilsTest.java</include> <include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.IMPORT_PROCESS_DEFINE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUTHORIZED_PROJECT; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROJECT_DETAILS_BY_ID_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UNAUTHORIZED_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROJECT_ERROR; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import springfox.documentation.annotations.ApiIgnore; /** * project controller */ @Api(tags = "PROJECT_TAG") @RestController @RequestMapping("projects") public class ProjectController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(ProjectController.class); @Autowired private ProjectService projectService; @Autowired private ProcessDefinitionService processDefinitionService; /** * create project * * @param loginUser login user * @param projectName project name * @param description description * @return returns an error if it exists */ @ApiOperation(value = "createProject", notes = "CREATE_PROJECT_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType = "String"), @ApiImplicitParam(name = "description", value = "PROJECT_DESC", dataType = "String") }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_PROJECT_ERROR) public Result createProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectName") String projectName, @RequestParam(value = "description", required = false) String description) { logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, description); Map<String, Object> result = projectService.createProject(loginUser, projectName, description); return returnDataList(result); } /** * updateProcessInstance project * * @param loginUser login user * @param projectId project id * @param projectName project name * @param description description * @return update result code */ @ApiOperation(value = "updateProject", notes = "UPDATE_PROJECT_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType = "String"), @ApiImplicitParam(name = "description", value = "PROJECT_DESC", dataType = "String") }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_PROJECT_ERROR) public Result updateProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectId") Integer projectId, @RequestParam("projectName") String projectName, @RequestParam(value = "description", required = false) String description) { logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, description); Map<String, Object> result = projectService.update(loginUser, projectId, projectName, description); return returnDataList(result); } /** * query project details by id * * @param loginUser login user * @param projectId project id * @return project detail information */ @ApiOperation(value = "queryProjectById", notes = "QUERY_PROJECT_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/query-by-id") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_PROJECT_DETAILS_BY_ID_ERROR) public Result queryProjectById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectId") Integer projectId) { logger.info("login user {}, query project by id: {}", loginUser.getUserName(), projectId); Map<String, Object> result = projectService.queryById(projectId); return returnDataList(result); } /** * query project list paging * * @param loginUser login user * @param searchVal search value * @param pageSize page size * @param pageNo page number * @return project list which the login user have permission to see */ @ApiOperation(value = "queryProjectListPaging", notes = "QUERY_PROJECT_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "20"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "1") }) @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) @ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR) public Result queryProjectListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageSize") Integer pageSize, @RequestParam("pageNo") Integer pageNo ) { logger.info("login user {}, query project list paging", loginUser.getUserName()); Map<String, Object> result = checkPageParams(pageNo, pageSize); if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); } searchVal = ParameterUtils.handleEscapes(searchVal); result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal); return returnDataListPaging(result); } /** * delete project by id * * @param loginUser login user * @param projectId project id * @return delete result code */ @ApiOperation(value = "deleteProjectById", notes = "DELETE_PROJECT_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_PROJECT_ERROR) public Result deleteProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectId") Integer projectId ) { logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId); Map<String, Object> result = projectService.deleteProject(loginUser, projectId); return returnDataList(result); } /** * query unauthorized project * * @param loginUser login user * @param userId user id * @return the projects which user have not permission to see */ @ApiOperation(value = "queryUnauthorizedProject", notes = "QUERY_UNAUTHORIZED_PROJECT_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/unauth-project") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_UNAUTHORIZED_PROJECT_ERROR) public Result queryUnauthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId); Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, userId); return returnDataList(result); } /** * query authorized project * * @param loginUser login user * @param userId user id * @return projects which the user have permission to see, Except for items created by this user */ @ApiOperation(value = "queryAuthorizedProject", notes = "QUERY_AUTHORIZED_PROJECT_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/authed-project") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_AUTHORIZED_PROJECT) public Result queryAuthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId); Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, userId); return returnDataList(result); } /** * query authorized and user created project * * @param loginUser login user * @return projects which the user create and authorized */ @ApiOperation(value = "queryProjectCreatedAndAuthorizedByUser", notes = "QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_NOTES") @GetMapping(value = "/created-and-authorized-project") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR) public Result queryProjectCreatedAndAuthorizedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, query authorized and user created project by user id: {}.", RegexUtils.escapeNRT(loginUser.getUserName()), RegexUtils.escapeNRT(String.valueOf(loginUser.getId()))); Map<String, Object> result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); return returnDataList(result); } /** * import process definition * * @param loginUser login user * @param file resource file * @param projectName project name * @return import result code */ @ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") }) @PostMapping(value = "/import-definition") @ApiException(IMPORT_PROCESS_DEFINE_ERROR) public Result importProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("file") MultipartFile file, @RequestParam("projectName") String projectName) { logger.info("import process definition by id, login user:{}, project: {}", loginUser.getUserName(), projectName); Map<String, Object> result = processDefinitionService.importProcessDefinition(loginUser, file, projectName); return returnDataList(result); } /** * query all project list * * @param loginUser login user * @return all project list */ @ApiOperation(value = "queryAllProjectList", notes = "QUERY_ALL_PROJECT_LIST_NOTES") @GetMapping(value = "/query-project-list") @ResponseStatus(HttpStatus.OK) @ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR) public Result queryAllProjectList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, query all project list", loginUser.getUserName()); Map<String, Object> result = projectService.queryAllProjectList(); return returnDataList(result); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.enums; import java.util.Locale; import org.springframework.context.i18n.LocaleContextHolder; /** * status enum // todo #4855 One category one interval */ public enum Status { SUCCESS(0, "success", "成功"), INTERNAL_SERVER_ERROR_ARGS(10000, "Internal Server Error: {0}", "服务端异常: {0}"), REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"), TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"), USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"), USER_NAME_NULL(10004, "user name is null", "用户名不能为空"), HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"), TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"), OS_TENANT_CODE_EXIST(10009, "os tenant code {0} already exists", "操作系统租户[{0}]已存在"), USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"), ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"), ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"), USER_NAME_PASSWD_ERROR(10013, "user name or password error", "用户名或密码错误"), LOGIN_SESSION_FAILED(10014, "create session failed!", "创建session失败"), DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"), DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"), TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"), PROJECT_NOT_FOUNT(10018, "project {0} not found ", "项目[{0}]不存在"), PROJECT_ALREADY_EXISTS(10019, "project {0} already exists", "项目名称[{0}]已存在"), TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist", "任务实例[{0}]不存在"), TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE(10021, "task instance {0} is not sub process instance", "任务实例[{0}]不是子流程实例"), SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist", "调度配置定时表达式[{0}]不存在"), SCHEDULE_CRON_ONLINE_FORBID_UPDATE(10023, "online status does not allow update operations", "调度配置上线状态不允许修改"), SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"), MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"), SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"), CREATE_ALERT_GROUP_ERROR(10027, "create alert group error", "创建告警组错误"), QUERY_ALL_ALERTGROUP_ERROR(10028, "query all alertgroup error", "查询告警组错误"), LIST_PAGING_ALERT_GROUP_ERROR(10029, "list paging alert group error", "分页查询告警组错误"), UPDATE_ALERT_GROUP_ERROR(10030, "update alert group error", "更新告警组错误"), DELETE_ALERT_GROUP_ERROR(10031, "delete alert group error", "删除告警组错误"), ALERT_GROUP_GRANT_USER_ERROR(10032, "alert group grant user error", "告警组授权用户错误"), CREATE_DATASOURCE_ERROR(10033, "create datasource error", "创建数据源错误"), UPDATE_DATASOURCE_ERROR(10034, "update datasource error", "更新数据源错误"), QUERY_DATASOURCE_ERROR(10035, "query datasource error", "查询数据源错误"), CONNECT_DATASOURCE_FAILURE(10036, "connect datasource failure", "建立数据源连接失败"), CONNECTION_TEST_FAILURE(10037, "connection test failure", "测试数据源连接失败"), DELETE_DATA_SOURCE_FAILURE(10038, "delete data source failure", "删除数据源失败"), VERIFY_DATASOURCE_NAME_FAILURE(10039, "verify datasource name failure", "验证数据源名称失败"), UNAUTHORIZED_DATASOURCE(10040, "unauthorized datasource", "未经授权的数据源"), AUTHORIZED_DATA_SOURCE(10041, "authorized data source", "授权数据源失败"), LOGIN_SUCCESS(10042, "login success", "登录成功"), USER_LOGIN_FAILURE(10043, "user login failure", "用户登录失败"), LIST_WORKERS_ERROR(10044, "list workers error", "查询worker列表错误"), LIST_MASTERS_ERROR(10045, "list masters error", "查询master列表错误"), UPDATE_PROJECT_ERROR(10046, "update project error", "更新项目信息错误"), QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047, "query project details by id error", "查询项目详细信息错误"), CREATE_PROJECT_ERROR(10048, "create project error", "创建项目错误"), LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049, "login user query project list paging error", "分页查询项目列表错误"), DELETE_PROJECT_ERROR(10050, "delete project error", "删除项目错误"), QUERY_UNAUTHORIZED_PROJECT_ERROR(10051, "query unauthorized project error", "查询未授权项目错误"), QUERY_AUTHORIZED_PROJECT(10052, "query authorized project", "查询授权项目错误"), QUERY_QUEUE_LIST_ERROR(10053, "query queue list error", "查询队列列表错误"), CREATE_RESOURCE_ERROR(10054, "create resource error", "创建资源错误"), UPDATE_RESOURCE_ERROR(10055, "update resource error", "更新资源错误"), QUERY_RESOURCES_LIST_ERROR(10056, "query resources list error", "查询资源列表错误"), QUERY_RESOURCES_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"), DELETE_RESOURCE_ERROR(10058, "delete resource error", "删除资源错误"), VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059, "verify resource by name and type error", "资源名称或类型验证错误"), VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060, "view resource file online error", "查看资源文件错误"), CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061, "create resource file online error", "创建资源文件错误"), RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"), EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"), DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"), CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"), VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"), UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"), QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"), QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"), VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"), DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"), AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"), AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"), UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"), AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"), CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"), UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"), PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"), OFFLINE_SCHEDULE_ERROR(10079, "offline schedule error", "下线调度配置错误"), QUERY_SCHEDULE_LIST_PAGING_ERROR(10080, "query schedule list paging error", "分页查询调度配置列表错误"), QUERY_SCHEDULE_LIST_ERROR(10081, "query schedule list error", "查询调度配置列表错误"), QUERY_TASK_LIST_PAGING_ERROR(10082, "query task list paging error", "分页查询任务列表错误"), QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083, "query task record list paging error", "分页查询任务记录错误"), CREATE_TENANT_ERROR(10084, "create tenant error", "创建租户错误"), QUERY_TENANT_LIST_PAGING_ERROR(10085, "query tenant list paging error", "分页查询租户列表错误"), QUERY_TENANT_LIST_ERROR(10086, "query tenant list error", "查询租户列表错误"), UPDATE_TENANT_ERROR(10087, "update tenant error", "更新租户错误"), DELETE_TENANT_BY_ID_ERROR(10088, "delete tenant by id error", "删除租户错误"), VERIFY_OS_TENANT_CODE_ERROR(10089, "verify os tenant code error", "操作系统租户验证错误"), CREATE_USER_ERROR(10090, "create user error", "创建用户错误"), QUERY_USER_LIST_PAGING_ERROR(10091, "query user list paging error", "分页查询用户列表错误"), UPDATE_USER_ERROR(10092, "update user error", "更新用户错误"), DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"), GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"), GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"), GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"), GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"), GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"), USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"), VERIFY_USERNAME_ERROR(10100, "verify username error", "用户名验证错误"), UNAUTHORIZED_USER_ERROR(10101, "unauthorized user error", "查询未授权用户错误"), AUTHORIZED_USER_ERROR(10102, "authorized user error", "查询授权用户错误"), QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"), DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104, "download task instance log file error", "下载任务日志文件错误"), CREATE_PROCESS_DEFINITION(10105, "create process definition", "创建工作流错误"), VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流定义名称验证错误"), UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"), RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"), QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query datail of process definition error", "查询工作流详细信息错误"), QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"), ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"), GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"), QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"), QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"), UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"), QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"), DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"), QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"), QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119, "query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"), QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"), ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"), QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"), SIGN_OUT_ERROR(10123, "sign out error", "退出错误"), OS_TENANT_CODE_HAS_ALREADY_EXISTS(10124, "os tenant code has already exists", "操作系统租户已存在"), IP_IS_EMPTY(10125, "ip is empty", "IP地址不能为空"), SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}", "调度配置上线错误[{0}]"), CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"), QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"), QUEUE_VALUE_EXIST(10129, "queue value {0} already exists", "队列值[{0}]已存在"), QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"), UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"), NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"), VERIFY_QUEUE_ERROR(10133, "verify queue error", "验证队列信息错误"), NAME_NULL(10134, "name must be not null", "名称不能为空"), NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"), SAVE_ERROR(10136, "save error", "保存错误"), DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"), BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误"), PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"), PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"), SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"), DELETE_TENANT_BY_ID_FAIL(10142, "delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), DELETE_WORKER_GROUP_BY_ID_FAIL(10145, "delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"), DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"), USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"), COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"), MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"), SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"), SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152 , "Switch process definition version error: not exists process definition, [process definition id {0}]", "切换工作流版本出错:工作流不存在,[工作流id {0}]"), SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153 , "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"), QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"), QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR(10155 , "query process definition versions error: [page number:{0}] < 1 or [page size:{1}] < 1", "查询工作流历史版本出错:[pageNo:{0}] < 1 或 [pageSize:{1}] < 1"), DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"), QUERY_USER_CREATED_PROJECT_ERROR(10157, "query user created project error error", "查询用户创建的项目错误"), PROCESS_DEFINITION_IDS_IS_EMPTY(10158, "process definition ids is empty", "工作流IDS不能为空"), BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"), BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"), QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"), QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR(10162, "query authorized and user created project error error", "查询授权的和用户创建的项目错误"), DELETE_PROCESS_DEFINITION_BY_ID_FAIL(10163, "delete process definition by id fail, for there are {0} process instances in executing using it", "删除工作流定义失败,有[{0}]个运行中的工作流实例正在使用"), CHECK_OS_TENANT_CODE_ERROR(10164, "Please enter the English os tenant code", "请输入英文操作系统租户"), FORCE_TASK_SUCCESS_ERROR(10165, "force task success error", "强制成功任务实例错误"), TASK_INSTANCE_STATE_OPERATION_ERROR(10166, "the status of task instance {0} is {1},Cannot perform force success operation", "任务实例[{0}]的状态是[{1}],无法执行强制成功操作"), DATASOURCE_TYPE_NOT_EXIST(10167, "data source type not exist", "数据源类型不存在"), PROCESS_DEFINITION_NAME_EXIST(10168, "process definition name {0} already exists", "工作流定义名称[{0}]已存在"), DATASOURCE_DB_TYPE_ILLEGAL(10169, "datasource type illegal", "数据源类型参数不合法"), DATASOURCE_PORT_ILLEGAL(10170, "datasource port illegal", "数据源端口参数不合法"), DATASOURCE_OTHER_PARAMS_ILLEGAL(10171, "datasource other params illegal", "数据源其他参数不合法"), DATASOURCE_NAME_ILLEGAL(10172, "datasource name illegal", "数据源名称不合法"), DATASOURCE_HOST_ILLEGAL(10173, "datasource host illegal", "数据源HOST不合法"), DELETE_WORKER_GROUP_NOT_EXIST(10174, "delete worker group not exist ", "删除worker分组不存在"), CREATE_WORKER_GROUP_FORBIDDEN_IN_DOCKER(10175, "create worker group forbidden in docker ", "创建worker分组在docker中禁止"), DELETE_WORKER_GROUP_FORBIDDEN_IN_DOCKER(10176, "delete worker group forbidden in docker ", "删除worker分组在docker中禁止"), WORKER_ADDRESS_INVALID(10177, "worker address {0} invalid", "worker地址[{0}]无效"), QUERY_WORKER_ADDRESS_LIST_FAIL(10178, "query worker address list fail ", "查询worker地址列表失败"), UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"), RESOURCE_EXIST(20005, "resource already exists", "资源已存在"), RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing", "资源文件后缀不支持查看"), RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit", "上传资源文件大小超过限制"), RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"), UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"), HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"), RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"), RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"), UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"), RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"), PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"), RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource", "请检查任务节点并移除无权限或者已删除的资源"), RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"), USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"), USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission", "当前用户[{0}]没有[{1}]项目的操作权限"), PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist", "工作流实例[{0}]不存在"), PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists", "工作流实例[{0}]已存在"), PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist", "工作流定义[{0}]不存在"), PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} not on line", "工作流定义[{0}]不是上线状态"), PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}", "工作流实例[{0}]的状态已经是[{1}]"), PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation", "工作流实例[{0}]的状态是[{1}],无法执行[{2}]操作"), SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist", "子工作流实例不存在"), PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"), PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."), PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"), TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"), COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"), COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"), START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"), EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"), CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "工作流定义错误"), QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"), DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"), DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"), PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"), PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"), PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"), DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022, "delete process definition by id error", "删除工作流定义错误"), SCHEDULE_CRON_STATE_ONLINE(50023, "the status of schedule {0} is already on line", "调度配置[{0}]已上线"), DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"), BATCH_DELETE_PROCESS_DEFINE_ERROR(50025, "batch delete process definition error", "批量删除工作流定义错误"), BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026, "batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"), TENANT_NOT_SUITABLE(50027, "there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"), EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"), BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"), IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"), PROCESS_DAG_IS_EMPTY(50030, "process dag can not be empty", "工作流dag不能为空"), HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"), /** * for monitor */ QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"), QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"), CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"), GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"), QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012, "query access token list paging error", "分页查询访问token列表错误"), UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"), DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"), ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"), COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"), NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"), START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"), QUEUE_COUNT_ERROR(90001, "queue count error", "查询队列数据错误"), KERBEROS_STARTUP_STATE(100001, "get kerberos startup state error", "获取kerberos启动状态错误"), //plugin PLUGIN_NOT_A_UI_COMPONENT(110001, "query plugin error, this plugin has no UI component", "查询插件错误,此插件无UI组件"), QUERY_PLUGINS_RESULT_IS_NULL(110002, "query plugins result is null", "查询插件为空"), QUERY_PLUGINS_ERROR(110003, "query plugins error", "查询插件错误"), QUERY_PLUGIN_DETAIL_RESULT_IS_NULL(110004, "query plugin detail result is null", "查询插件详情结果为空"), UPDATE_ALERT_PLUGIN_INSTANCE_ERROR(110005, "update alert plugin instance error", "更新告警组和告警组插件实例错误"), DELETE_ALERT_PLUGIN_INSTANCE_ERROR(110006, "delete alert plugin instance error", "删除告警组和告警组插件实例错误"), GET_ALERT_PLUGIN_INSTANCE_ERROR(110007, "get alert plugin instance error", "获取告警组和告警组插件实例错误"), CREATE_ALERT_PLUGIN_INSTANCE_ERROR(110008, "create alert plugin instance error", "创建告警组和告警组插件实例错误"), QUERY_ALL_ALERT_PLUGIN_INSTANCE_ERROR(110009, "query all alert plugin instance error", "查询所有告警实例失败"), PLUGIN_INSTANCE_ALREADY_EXIT(110010, "plugin instance already exit", "该告警插件实例已存在"), LIST_PAGING_ALERT_PLUGIN_INSTANCE_ERROR(110011, "query plugin instance page error", "分页查询告警实例失败"), DELETE_ALERT_PLUGIN_INSTANCE_ERROR_HAS_ALERT_GROUP_ASSOCIATED(110012, "failed to delete the alert instance, there is an alarm group associated with this alert instance", "删除告警实例失败,存在与此告警实例关联的警报组"), PROCESS_DEFINITION_VERSION_IS_USED(110013,"this process definition version is used","此工作流定义版本被使用"); private final int code; private final String enMsg; private final String zhMsg; Status(int code, String enMsg, String zhMsg) { this.code = code; this.enMsg = enMsg; this.zhMsg = zhMsg; } public int getCode() { return this.code; } public String getMsg() { if (Locale.SIMPLIFIED_CHINESE.getLanguage().equals(LocaleContextHolder.getLocale().getLanguage())) { return this.zhMsg; } else { return this.enMsg; } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; /** * project service **/ public interface ProjectService { /** * create project * * @param loginUser login user * @param name project name * @param desc description * @return returns an error if it exists */ Map<String, Object> createProject(User loginUser, String name, String desc); /** * query project details by id * * @param projectId project id * @return project detail information */ Map<String, Object> queryById(Integer projectId); /** * check project and authorization * * @param loginUser login user * @param project project * @param projectName project name * @return true if the login user have permission to see the project */ Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName); boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result); /** * admin can view all projects * * @param loginUser login user * @param searchVal search value * @param pageSize page size * @param pageNo page number * @return project list which the login user have permission to see */ Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal); /** * delete project by id * * @param loginUser login user * @param projectId project id * @return delete result code */ Map<String, Object> deleteProject(User loginUser, Integer projectId); /** * updateProcessInstance project * * @param loginUser login user * @param projectId project id * @param projectName project name * @param desc description * @return update result code */ Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc); /** * query unauthorized project * * @param loginUser login user * @param userId user id * @return the projects which user have not permission to see */ Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId); /** * query authorized project * * @param loginUser login user * @param userId user id * @return projects which the user have permission to see, Except for items created by this user */ Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId); /** * query authorized project * * @param loginUser login user * @return projects which the user have permission to see, Except for items created by this user */ Map<String, Object> queryProjectCreatedByUser(User loginUser); /** * query all project list that have one or more process definitions. * * @return project list */ Map<String, Object> queryAllProjectList(); /** * query authorized and user create project list by user id * @param loginUser * @return */ Map<String, Object> queryProjectCreatedAndAuthorizedByUser(User loginUser); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * project service impl **/ @Service public class ProjectServiceImpl extends BaseServiceImpl implements ProjectService { @Autowired private ProjectMapper projectMapper; @Autowired private ProjectUserMapper projectUserMapper; @Autowired private ProcessDefinitionMapper processDefinitionMapper; /** * create project * * @param loginUser login user * @param name project name * @param desc description * @return returns an error if it exists */ @Override public Map<String, Object> createProject(User loginUser, String name, String desc) { Map<String, Object> result = new HashMap<>(); Map<String, Object> descCheck = checkDesc(desc); if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { return descCheck; } Project project = projectMapper.queryByName(name); if (project != null) { putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); return result; } Date now = new Date(); project = Project .newBuilder() .name(name) .description(desc) .userId(loginUser.getId()) .userName(loginUser.getUserName()) .createTime(now) .updateTime(now) .build(); if (projectMapper.insert(project) > 0) { result.put(Constants.DATA_LIST, project.getId()); putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.CREATE_PROJECT_ERROR); } return result; } /** * query project details by id * * @param projectId project id * @return project detail information */ @Override public Map<String, Object> queryById(Integer projectId) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.selectById(projectId); if (project != null) { result.put(Constants.DATA_LIST, project); putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.PROJECT_NOT_FOUNT, projectId); } return result; } /** * check project and authorization * * @param loginUser login user * @param project project * @param projectName project name * @return true if the login user have permission to see the project */ @Override public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) { Map<String, Object> result = new HashMap<>(); if (project == null) { putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); } else if (!checkReadPermission(loginUser, project)) { // check read permission putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName); } else { putMsg(result, Status.SUCCESS); } return result; } @Override public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result) { boolean checkResult = false; if (project == null) { putMsg(result, Status.PROJECT_NOT_FOUNT, ""); } else if (!checkReadPermission(loginUser, project)) { putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName()); } else { checkResult = true; } return checkResult; } /** * admin can view all projects * * @param loginUser login user * @param searchVal search value * @param pageSize page size * @param pageNo page number * @return project list which the login user have permission to see */ @Override public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { Map<String, Object> result = new HashMap<>(); PageInfo<Project> pageInfo = new PageInfo<>(pageNo, pageSize); Page<Project> page = new Page<>(pageNo, pageSize); int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId(); IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal); List<Project> projectList = projectIPage.getRecords(); if (userId != 0) { for (Project project : projectList) { project.setPerm(Constants.DEFAULT_ADMIN_PERMISSION); } } pageInfo.setTotalCount((int) projectIPage.getTotal()); pageInfo.setLists(projectList); result.put(Constants.COUNT, (int) projectIPage.getTotal()); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } /** * delete project by id * * @param loginUser login user * @param projectId project id * @return delete result code */ @Override public Map<String, Object> deleteProject(User loginUser, Integer projectId) { Map<String, Object> result = new HashMap<>(); Project project = projectMapper.selectById(projectId); Map<String, Object> checkResult = getCheckResult(loginUser, project); if (checkResult != null) { return checkResult; } if (!hasPerm(loginUser, project.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId); if (!processDefinitionList.isEmpty()) { putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL); return result; } int delete = projectMapper.deleteById(projectId); if (delete > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.DELETE_PROJECT_ERROR); } return result; } /** * get check result * * @param loginUser login user * @param project project * @return check result */ private Map<String, Object> getCheckResult(User loginUser, Project project) { String projectName = project == null ? null : project.getName(); Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, projectName); Status status = (Status) checkResult.get(Constants.STATUS); if (status != Status.SUCCESS) { return checkResult; } return null; } /** * updateProcessInstance project * * @param loginUser login user * @param projectId project id * @param projectName project name * @param desc description * @return update result code */ @Override public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) { Map<String, Object> result = new HashMap<>(); Map<String, Object> descCheck = checkDesc(desc); if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { return descCheck; } Project project = projectMapper.selectById(projectId); boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result); if (!hasProjectAndPerm) { return result; } Project tempProject = projectMapper.queryByName(projectName); if (tempProject != null && tempProject.getId() != projectId) { putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName); return result; } project.setName(projectName); project.setDescription(desc); project.setUpdateTime(new Date()); int update = projectMapper.updateById(project); if (update > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.UPDATE_PROJECT_ERROR); } return result; } /** * query unauthorized project * * @param loginUser login user * @param userId user id * @return the projects which user have not permission to see */ @Override public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); if (loginUser.getId() != userId && isNotAdmin(loginUser, result)) { return result; } /** * query all project list except specified userId */ List<Project> projectList = projectMapper.queryProjectExceptUserId(userId); List<Project> resultList = new ArrayList<>(); Set<Project> projectSet = null; if (projectList != null && !projectList.isEmpty()) { projectSet = new HashSet<>(projectList); List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId); resultList = getUnauthorizedProjects(projectSet, authedProjectList); } result.put(Constants.DATA_LIST, resultList); putMsg(result, Status.SUCCESS); return result; } /** * get unauthorized project * * @param projectSet project set * @param authedProjectList authed project list * @return project list that authorization */ private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) { List<Project> resultList; Set<Project> authedProjectSet = null; if (authedProjectList != null && !authedProjectList.isEmpty()) { authedProjectSet = new HashSet<>(authedProjectList); projectSet.removeAll(authedProjectSet); } resultList = new ArrayList<>(projectSet); return resultList; } /** * query authorized project * * @param loginUser login user * @param userId user id * @return projects which the user have permission to see, Except for items created by this user */ @Override public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); if (loginUser.getId() != userId && isNotAdmin(loginUser, result)) { return result; } List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId); result.put(Constants.DATA_LIST, projects); putMsg(result, Status.SUCCESS); return result; } /** * query authorized project * * @param loginUser login user * @return projects which the user have permission to see, Except for items created by this user */ @Override public Map<String, Object> queryProjectCreatedByUser(User loginUser) { Map<String, Object> result = new HashMap<>(); List<Project> projects = projectMapper.queryProjectCreatedByUser(loginUser.getId()); result.put(Constants.DATA_LIST, projects); putMsg(result, Status.SUCCESS); return result; } /** * query authorized and user create project list by user * * @param loginUser login user * @return */ @Override public Map<String, Object> queryProjectCreatedAndAuthorizedByUser(User loginUser) { Map<String, Object> result = new HashMap<>(); List<Project> projects = null; if (loginUser.getUserType() == UserType.ADMIN_USER) { projects = projectMapper.selectList(null); } else { projects = projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId()); } result.put(Constants.DATA_LIST, projects); putMsg(result, Status.SUCCESS); return result; } /** * check whether have read permission * * @param user user * @param project project * @return true if the user have permission to see the project, otherwise return false */ private boolean checkReadPermission(User user, Project project) { int permissionId = queryPermission(user, project); return (permissionId & Constants.READ_PERMISSION) != 0; } /** * query permission id * * @param user user * @param project project * @return permission */ private int queryPermission(User user, Project project) { if (user.getUserType() == UserType.ADMIN_USER) { return Constants.READ_PERMISSION; } if (project.getUserId() == user.getId()) { return Constants.ALL_PERMISSIONS; } ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId()); if (projectUser == null) { return 0; } return projectUser.getPerm(); } /** * query all project list * * @return project list */ @Override public Map<String, Object> queryAllProjectList() { Map<String, Object> result = new HashMap<>(); List<Project> projects = projectMapper.queryAllProject(); result.put(Constants.DATA_LIST, projects); putMsg(result, Status.SUCCESS); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.UsersService; import org.apache.dolphinscheduler.api.utils.CheckUtils; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.EncryptionUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.DatasourceUser; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.ResourcesUser; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UDFUser; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; import java.io.IOException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * users service impl */ @Service public class UsersServiceImpl extends BaseServiceImpl implements UsersService { private static final Logger logger = LoggerFactory.getLogger(UsersServiceImpl.class); @Autowired private UserMapper userMapper; @Autowired private TenantMapper tenantMapper; @Autowired private ProjectUserMapper projectUserMapper; @Autowired private ResourceUserMapper resourceUserMapper; @Autowired private ResourceMapper resourceMapper; @Autowired private DataSourceUserMapper datasourceUserMapper; @Autowired private UDFUserMapper udfUserMapper; @Autowired private AlertGroupMapper alertGroupMapper; @Autowired private ProcessDefinitionMapper processDefinitionMapper; /** * create user, only system admin have permission * * @param loginUser login user * @param userName user name * @param userPassword user password * @param email email * @param tenantId tenant id * @param phone phone * @param queue queue * @return create result code * @throws Exception exception */ @Override @Transactional(rollbackFor = Exception.class) public Map<String, Object> createUser(User loginUser, String userName, String userPassword, String email, int tenantId, String phone, String queue, int state) throws IOException { Map<String, Object> result = new HashMap<>(); //check all user params String msg = this.checkUserParams(userName, userPassword, email, phone); if (!StringUtils.isEmpty(msg)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg); return result; } if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } if (!checkTenantExists(tenantId)) { putMsg(result, Status.TENANT_NOT_EXIST); return result; } User user = createUser(userName, userPassword, email, tenantId, phone, queue, state); Tenant tenant = tenantMapper.queryById(tenantId); // resource upload startup if (PropertyUtils.getResUploadStartupState()) { // if tenant not exists if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))) { createTenantDirIfNotExists(tenant.getTenantCode()); } String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(), user.getId()); HadoopUtils.getInstance().mkdir(userPath); } putMsg(result, Status.SUCCESS); return result; } @Override @Transactional(rollbackFor = RuntimeException.class) public User createUser(String userName, String userPassword, String email, int tenantId, String phone, String queue, int state) { User user = new User(); Date now = new Date(); user.setUserName(userName); user.setUserPassword(EncryptionUtils.getMd5(userPassword)); user.setEmail(email); user.setTenantId(tenantId); user.setPhone(phone); user.setState(state); // create general users, administrator users are currently built-in user.setUserType(UserType.GENERAL_USER); user.setCreateTime(now); user.setUpdateTime(now); if (StringUtils.isEmpty(queue)) { queue = ""; } user.setQueue(queue); // save user userMapper.insert(user); return user; } /*** * create User for ldap login */ @Override @Transactional(rollbackFor = Exception.class) public User createUser(UserType userType, String userId, String email) { User user = new User(); Date now = new Date(); user.setUserName(userId); user.setEmail(email); // create general users, administrator users are currently built-in user.setUserType(userType); user.setCreateTime(now); user.setUpdateTime(now); user.setQueue(""); // save user userMapper.insert(user); return user; } /** * get user by user name * * @param userName user name * @return exist user or null */ @Override public User getUserByUserName(String userName) { return userMapper.queryByUserNameAccurately(userName); } /** * query user by id * * @param id id * @return user info */ @Override public User queryUser(int id) { return userMapper.selectById(id); } @Override public List<User> queryUser(List<Integer> ids) { if (CollectionUtils.isEmpty(ids)) { return new ArrayList<>(); } return userMapper.selectByIds(ids); } /** * query user * * @param name name * @return user info */ @Override public User queryUser(String name) { return userMapper.queryByUserNameAccurately(name); } /** * query user * * @param name name * @param password password * @return user info */ @Override public User queryUser(String name, String password) { String md5 = EncryptionUtils.getMd5(password); return userMapper.queryUserByNamePassword(name, md5); } /** * get user id by user name * * @param name user name * @return if name empty 0, user not exists -1, user exist user id */ @Override public int getUserIdByName(String name) { //executor name query int executorId = 0; if (StringUtils.isNotEmpty(name)) { User executor = queryUser(name); if (null != executor) { executorId = executor.getId(); } else { executorId = -1; } } return executorId; } /** * query user list * * @param loginUser login user * @param pageNo page number * @param searchVal search avlue * @param pageSize page size * @return user list page */ @Override public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Map<String, Object> result = new HashMap<>(); if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } Page<User> page = new Page<>(pageNo, pageSize); IPage<User> scheduleList = userMapper.queryUserPaging(page, searchVal); PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize); pageInfo.setTotalCount((int) scheduleList.getTotal()); pageInfo.setLists(scheduleList.getRecords()); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } /** * updateProcessInstance user * * * @param loginUser * @param userId user id * @param userName user name * @param userPassword user password * @param email email * @param tenantId tennat id * @param phone phone * @param queue queue * @return update result code * @throws Exception exception */ @Override public Map<String, Object> updateUser(User loginUser, int userId, String userName, String userPassword, String email, int tenantId, String phone, String queue, int state) throws IOException { Map<String, Object> result = new HashMap<>(); result.put(Constants.STATUS, false); if (check(result, !hasPerm(loginUser, userId), Status.USER_NO_OPERATION_PERM)) { return result; } User user = userMapper.selectById(userId); if (user == null) { putMsg(result, Status.USER_NOT_EXIST, userId); return result; } if (StringUtils.isNotEmpty(userName)) { if (!CheckUtils.checkUserName(userName)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); return result; } User tempUser = userMapper.queryByUserNameAccurately(userName); if (tempUser != null && tempUser.getId() != userId) { putMsg(result, Status.USER_NAME_EXIST); return result; } user.setUserName(userName); } if (StringUtils.isNotEmpty(userPassword)) { if (!CheckUtils.checkPassword(userPassword)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userPassword); return result; } user.setUserPassword(EncryptionUtils.getMd5(userPassword)); } if (StringUtils.isNotEmpty(email)) { if (!CheckUtils.checkEmail(email)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, email); return result; } user.setEmail(email); } if (StringUtils.isNotEmpty(phone) && !CheckUtils.checkPhone(phone)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone); return result; } user.setPhone(phone); user.setQueue(queue); user.setState(state); Date now = new Date(); user.setUpdateTime(now); //if user switches the tenant, the user's resources need to be copied to the new tenant if (user.getTenantId() != tenantId) { Tenant oldTenant = tenantMapper.queryById(user.getTenantId()); //query tenant Tenant newTenant = tenantMapper.queryById(tenantId); if (newTenant != null) { // if hdfs startup if (PropertyUtils.getResUploadStartupState() && oldTenant != null) { String newTenantCode = newTenant.getTenantCode(); String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode()); String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode()); // if old tenant dir exists if (HadoopUtils.getInstance().exists(oldResourcePath)) { String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode); String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode); //file resources list List<Resource> fileResourcesList = resourceMapper.queryResourceList( null, userId, ResourceType.FILE.ordinal()); if (CollectionUtils.isNotEmpty(fileResourcesList)) { ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList); ResourceComponent resourceComponent = resourceTreeVisitor.visit(); copyResourceFiles(resourceComponent, oldResourcePath, newResourcePath); } //udf resources List<Resource> udfResourceList = resourceMapper.queryResourceList( null, userId, ResourceType.UDF.ordinal()); if (CollectionUtils.isNotEmpty(udfResourceList)) { ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList); ResourceComponent resourceComponent = resourceTreeVisitor.visit(); copyResourceFiles(resourceComponent, oldUdfsPath, newUdfsPath); } //Delete the user from the old tenant directory String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(), userId); HadoopUtils.getInstance().delete(oldUserPath, true); } else { // if old tenant dir not exists , create createTenantDirIfNotExists(oldTenant.getTenantCode()); } if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))) { //create user in the new tenant directory String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(), user.getId()); HadoopUtils.getInstance().mkdir(newUserPath); } else { // if new tenant dir not exists , create createTenantDirIfNotExists(newTenant.getTenantCode()); } } } user.setTenantId(tenantId); } // updateProcessInstance user userMapper.updateById(user); putMsg(result, Status.SUCCESS); return result; } /** * delete user * * @param loginUser login user * @param id user id * @return delete result code * @throws Exception exception when operate hdfs */ @Override public Map<String, Object> deleteUserById(User loginUser, int id) throws IOException { Map<String, Object> result = new HashMap<>(); //only admin can operate if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM, id); return result; } //check exist User tempUser = userMapper.selectById(id); if (tempUser == null) { putMsg(result, Status.USER_NOT_EXIST, id); return result; } // delete user User user = userMapper.queryTenantCodeByUserId(id); if (user != null) { if (PropertyUtils.getResUploadStartupState()) { String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(), id); if (HadoopUtils.getInstance().exists(userPath)) { HadoopUtils.getInstance().delete(userPath, true); } } } userMapper.deleteById(id); putMsg(result, Status.SUCCESS); return result; } /** * grant project * * @param loginUser login user * @param userId user id * @param projectIds project id array * @return grant result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) { Map<String, Object> result = new HashMap<>(); result.put(Constants.STATUS, false); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } //check exist User tempUser = userMapper.selectById(userId); if (tempUser == null) { putMsg(result, Status.USER_NOT_EXIST, userId); return result; } //if the selected projectIds are empty, delete all items associated with the user projectUserMapper.deleteProjectRelation(0, userId); if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) { return result; } String[] projectIdArr = projectIds.split(","); for (String projectId : projectIdArr) { Date now = new Date(); ProjectUser projectUser = new ProjectUser(); projectUser.setUserId(userId); projectUser.setProjectId(Integer.parseInt(projectId)); projectUser.setPerm(7); projectUser.setCreateTime(now); projectUser.setUpdateTime(now); projectUserMapper.insert(projectUser); } putMsg(result, Status.SUCCESS); return result; } /** * grant resource * * @param loginUser login user * @param userId user id * @param resourceIds resource id array * @return grant result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) { Map<String, Object> result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } User user = userMapper.selectById(userId); if (user == null) { putMsg(result, Status.USER_NOT_EXIST, userId); return result; } Set<Integer> needAuthorizeResIds = new HashSet<>(); if (StringUtils.isNotBlank(resourceIds)) { String[] resourceFullIdArr = resourceIds.split(","); // need authorize resource id set for (String resourceFullId : resourceFullIdArr) { String[] resourceIdArr = resourceFullId.split("-"); for (int i = 0; i <= resourceIdArr.length - 1; i++) { int resourceIdValue = Integer.parseInt(resourceIdArr[i]); needAuthorizeResIds.add(resourceIdValue); } } } //get the authorized resource id list by user id List<Integer> resIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, Constants.AUTHORIZE_WRITABLE_PERM); List<Resource> oldAuthorizedRes = CollectionUtils.isEmpty(resIds) ? new ArrayList<>() : resourceMapper.queryResourceListById(resIds); //if resource type is UDF,need check whether it is bound by UDF function Set<Integer> oldAuthorizedResIds = oldAuthorizedRes.stream().map(Resource::getId).collect(Collectors.toSet()); //get the unauthorized resource id list oldAuthorizedResIds.removeAll(needAuthorizeResIds); if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) { // get all resource id of process definitions those is released List<Map<String, Object>> list = processDefinitionMapper.listResourcesByUser(userId); Map<Integer, Set<Integer>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); Set<Integer> resourceIdSet = resourceProcessMap.keySet(); resourceIdSet.retainAll(oldAuthorizedResIds); if (CollectionUtils.isNotEmpty(resourceIdSet)) { logger.error("can't be deleted,because it is used of process definition"); for (Integer resId : resourceIdSet) { logger.error("resource id:{} is used of process definition {}", resId, resourceProcessMap.get(resId)); } putMsg(result, Status.RESOURCE_IS_USED); return result; } } resourceUserMapper.deleteResourceUser(userId, 0); if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) { return result; } for (int resourceIdValue : needAuthorizeResIds) { Resource resource = resourceMapper.selectById(resourceIdValue); if (resource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } Date now = new Date(); ResourcesUser resourcesUser = new ResourcesUser(); resourcesUser.setUserId(userId); resourcesUser.setResourcesId(resourceIdValue); if (resource.isDirectory()) { resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM); } else { resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM); } resourcesUser.setCreateTime(now); resourcesUser.setUpdateTime(now); resourceUserMapper.insert(resourcesUser); } putMsg(result, Status.SUCCESS); return result; } /** * grant udf function * * @param loginUser login user * @param userId user id * @param udfIds udf id array * @return grant result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) { Map<String, Object> result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } User user = userMapper.selectById(userId); if (user == null) { putMsg(result, Status.USER_NOT_EXIST, userId); return result; } udfUserMapper.deleteByUserId(userId); if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) { return result; } String[] resourcesIdArr = udfIds.split(","); for (String udfId : resourcesIdArr) { Date now = new Date(); UDFUser udfUser = new UDFUser(); udfUser.setUserId(userId); udfUser.setUdfId(Integer.parseInt(udfId)); udfUser.setPerm(7); udfUser.setCreateTime(now); udfUser.setUpdateTime(now); udfUserMapper.insert(udfUser); } putMsg(result, Status.SUCCESS); return result; } /** * grant datasource * * @param loginUser login user * @param userId user id * @param datasourceIds data source id array * @return grant result code */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) { Map<String, Object> result = new HashMap<>(); result.put(Constants.STATUS, false); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } User user = userMapper.selectById(userId); if (user == null) { putMsg(result, Status.USER_NOT_EXIST, userId); return result; } datasourceUserMapper.deleteByUserId(userId); if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS)) { return result; } String[] datasourceIdArr = datasourceIds.split(","); for (String datasourceId : datasourceIdArr) { Date now = new Date(); DatasourceUser datasourceUser = new DatasourceUser(); datasourceUser.setUserId(userId); datasourceUser.setDatasourceId(Integer.parseInt(datasourceId)); datasourceUser.setPerm(7); datasourceUser.setCreateTime(now); datasourceUser.setUpdateTime(now); datasourceUserMapper.insert(datasourceUser); } putMsg(result, Status.SUCCESS); return result; } /** * query user info * * @param loginUser login user * @return user info */ @Override public Map<String, Object> getUserInfo(User loginUser) { Map<String, Object> result = new HashMap<>(); User user = null; if (loginUser.getUserType() == UserType.ADMIN_USER) { user = loginUser; } else { user = userMapper.queryDetailsById(loginUser.getId()); List<AlertGroup> alertGroups = alertGroupMapper.queryByUserId(loginUser.getId()); StringBuilder sb = new StringBuilder(); if (alertGroups != null && !alertGroups.isEmpty()) { for (int i = 0; i < alertGroups.size() - 1; i++) { sb.append(alertGroups.get(i).getGroupName() + ","); } sb.append(alertGroups.get(alertGroups.size() - 1)); user.setAlertGroup(sb.toString()); } } result.put(Constants.DATA_LIST, user); putMsg(result, Status.SUCCESS); return result; } /** * query user list * * @param loginUser login user * @return user list */ @Override public Map<String, Object> queryAllGeneralUsers(User loginUser) { Map<String, Object> result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } List<User> userList = userMapper.queryAllGeneralUser(); result.put(Constants.DATA_LIST, userList); putMsg(result, Status.SUCCESS); return result; } /** * query user list * * @param loginUser login user * @return user list */ @Override public Map<String, Object> queryUserList(User loginUser) { Map<String, Object> result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } List<User> userList = userMapper.selectList(null); result.put(Constants.DATA_LIST, userList); putMsg(result, Status.SUCCESS); return result; } /** * verify user name exists * * @param userName user name * @return true if user name not exists, otherwise return false */ @Override public Result<Object> verifyUserName(String userName) { Result<Object> result = new Result<>(); User user = userMapper.queryByUserNameAccurately(userName); if (user != null) { putMsg(result, Status.USER_NAME_EXIST); } else { putMsg(result, Status.SUCCESS); } return result; } /** * unauthorized user * * @param loginUser login user * @param alertgroupId alert group id * @return unauthorize result code */ @Override public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) { Map<String, Object> result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } List<User> userList = userMapper.selectList(null); List<User> resultUsers = new ArrayList<>(); Set<User> userSet = null; if (userList != null && !userList.isEmpty()) { userSet = new HashSet<>(userList); List<User> authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId); Set<User> authedUserSet = null; if (authedUserList != null && !authedUserList.isEmpty()) { authedUserSet = new HashSet<>(authedUserList); userSet.removeAll(authedUserSet); } resultUsers = new ArrayList<>(userSet); } result.put(Constants.DATA_LIST, resultUsers); putMsg(result, Status.SUCCESS); return result; } /** * authorized user * * @param loginUser login user * @param alertgroupId alert group id * @return authorized result code */ @Override public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) { Map<String, Object> result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; } List<User> userList = userMapper.queryUserListByAlertGroupId(alertgroupId); result.put(Constants.DATA_LIST, userList); putMsg(result, Status.SUCCESS); return result; } /** * @param tenantId tenant id * @return true if tenant exists, otherwise return false */ private boolean checkTenantExists(int tenantId) { return tenantMapper.queryById(tenantId) != null; } /** * @return if check failed return the field, otherwise return null */ private String checkUserParams(String userName, String password, String email, String phone) { String msg = null; if (!CheckUtils.checkUserName(userName)) { msg = userName; } else if (!CheckUtils.checkPassword(password)) { msg = password; } else if (!CheckUtils.checkEmail(email)) { msg = email; } else if (!CheckUtils.checkPhone(phone)) { msg = phone; } return msg; } /** * copy resource files * * @param resourceComponent resource component * @param srcBasePath src base path * @param dstBasePath dst base path * @throws IOException io exception */ private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException { List<ResourceComponent> components = resourceComponent.getChildren(); if (CollectionUtils.isNotEmpty(components)) { for (ResourceComponent component : components) { // verify whether exist if (!HadoopUtils.getInstance().exists(String.format("%s/%s", srcBasePath, component.getFullName()))) { logger.error("resource file: {} not exist,copy error", component.getFullName()); throw new ServiceException(Status.RESOURCE_NOT_EXIST); } if (!component.isDirctory()) { // copy it to dst HadoopUtils.getInstance().copy(String.format("%s/%s", srcBasePath, component.getFullName()), String.format("%s/%s", dstBasePath, component.getFullName()), false, true); continue; } if (CollectionUtils.isEmpty(component.getChildren())) { // if not exist,need create it if (!HadoopUtils.getInstance().exists(String.format("%s/%s", dstBasePath, component.getFullName()))) { HadoopUtils.getInstance().mkdir(String.format("%s/%s", dstBasePath, component.getFullName())); } } else { copyResourceFiles(component, srcBasePath, dstBasePath); } } } } /** * register user, default state is 0, default tenant_id is 1, no phone, no queue * * @param userName user name * @param userPassword user password * @param repeatPassword repeat password * @param email email * @return register result code * @throws Exception exception */ @Override @Transactional(rollbackFor = RuntimeException.class) public Map<String, Object> registerUser(String userName, String userPassword, String repeatPassword, String email) { Map<String, Object> result = new HashMap<>(); //check user params String msg = this.checkUserParams(userName, userPassword, email, ""); if (!StringUtils.isEmpty(msg)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, msg); return result; } if (!userPassword.equals(repeatPassword)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same"); return result; } User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal()); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, user); return result; } /** * activate user, only system admin have permission, change user state code 0 to 1 * * @param loginUser login user * @param userName user name * @return create result code */ @Override public Map<String, Object> activateUser(User loginUser, String userName) { Map<String, Object> result = new HashMap<>(); result.put(Constants.STATUS, false); if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } if (!CheckUtils.checkUserName(userName)) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); return result; } User user = userMapper.queryByUserNameAccurately(userName); if (user == null) { putMsg(result, Status.USER_NOT_EXIST, userName); return result; } if (user.getState() != Flag.NO.ordinal()) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); return result; } user.setState(Flag.YES.ordinal()); Date now = new Date(); user.setUpdateTime(now); userMapper.updateById(user); User responseUser = userMapper.queryByUserNameAccurately(userName); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, responseUser); return result; } /** * activate user, only system admin have permission, change users state code 0 to 1 * * @param loginUser login user * @param userNames user name * @return create result code */ @Override public Map<String, Object> batchActivateUser(User loginUser, List<String> userNames) { Map<String, Object> result = new HashMap<>(); if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } int totalSuccess = 0; List<String> successUserNames = new ArrayList<>(); Map<String, Object> successRes = new HashMap<>(); int totalFailed = 0; List<Map<String, String>> failedInfo = new ArrayList<>(); Map<String, Object> failedRes = new HashMap<>(); for (String userName : userNames) { Map<String, Object> tmpResult = activateUser(loginUser, userName); if (tmpResult.get(Constants.STATUS) != Status.SUCCESS) { totalFailed++; Map<String, String> failedBody = new HashMap<>(); failedBody.put("userName", userName); Status status = (Status) tmpResult.get(Constants.STATUS); String errorMessage = MessageFormat.format(status.getMsg(), userName); failedBody.put("msg", errorMessage); failedInfo.add(failedBody); } else { totalSuccess++; successUserNames.add(userName); } } successRes.put("sum", totalSuccess); successRes.put("userName", successUserNames); failedRes.put("sum", totalFailed); failedRes.put("info", failedInfo); Map<String, Object> res = new HashMap<>(); res.put("success", successRes); res.put("failed", failedRes); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, res); return result; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import com.fasterxml.jackson.core.type.TypeReference; /** * project controller test */ public class ProjectControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(ProjectControllerTest.class); private String projectId; @Before public void before() throws Exception { projectId = testCreateProject("project_test1", "the test project"); } @After public void after() throws Exception { testDeleteProject(projectId); } private String testCreateProject(String projectName, String description) throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("projectName",projectName); paramsMap.add("description",description); MvcResult mvcResult = mockMvc.perform(post("/projects/create") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), new TypeReference<Result<String>>() {}); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); Assert.assertNotNull(result.getData()); logger.info("create project return result:{}", mvcResult.getResponse().getContentAsString()); return (String)result.getData(); } @Test public void testUpdateProject() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("projectId", projectId); paramsMap.add("projectName","project_test_update"); paramsMap.add("desc","the test project update"); MvcResult mvcResult = mockMvc.perform(post("/projects/update") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info("update project return result:{}", mvcResult.getResponse().getContentAsString()); } @Test public void testQueryProjectById() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("projectId", projectId); MvcResult mvcResult = mockMvc.perform(get("/projects/query-by-id") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); logger.info("query project by id :{}, return result:{}", projectId, mvcResult.getResponse().getContentAsString()); } @Test public void testQueryProjectListPaging() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("searchVal","test"); paramsMap.add("pageSize","2"); paramsMap.add("pageNo","2"); MvcResult mvcResult = mockMvc.perform(get("/projects/list-paging") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info("query list-paging project return result:{}", mvcResult.getResponse().getContentAsString()); } @Test public void testQueryUnauthorizedProject() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("userId","2"); MvcResult mvcResult = mockMvc.perform(get("/projects/unauth-project") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info("query unauth project return result:{}", mvcResult.getResponse().getContentAsString()); } @Test public void testQueryAuthorizedProject() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("userId",String.valueOf(user.getId())); MvcResult mvcResult = mockMvc.perform(get("/projects/authed-project") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info("query authed project return result:{}", mvcResult.getResponse().getContentAsString()); } @Test public void testQueryAllProjectList() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MvcResult mvcResult = mockMvc.perform(get("/projects/query-project-list") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info("query all project return result:{}", mvcResult.getResponse().getContentAsString()); } @Ignore @Test public void testImportProcessDefinition() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("file","test"); MvcResult mvcResult = mockMvc.perform(post("/projects/import-definition") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.TEXT_PLAIN)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.IMPORT_PROCESS_DEFINE_ERROR.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } private void testDeleteProject(String projectId) throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("projectId", projectId); MvcResult mvcResult = mockMvc.perform(get("/projects/delete") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info("delete project return result:{}", mvcResult.getResponse().getContentAsString()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * project service test **/ @RunWith(MockitoJUnitRunner.class) public class ProjectServiceTest { private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class); @InjectMocks private ProjectServiceImpl projectService; @Mock private ProjectMapper projectMapper; @Mock private ProjectUserMapper projectUserMapper; @Mock private ProcessDefinitionMapper processDefinitionMapper; private String projectName = "ProjectServiceTest"; private String userName = "ProjectServiceTest"; @Test public void testCreateProject() { User loginUser = getLoginUser(); loginUser.setId(1); Map<String, Object> result = projectService.createProject(loginUser, projectName, getDesc()); logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //project name exist Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject()); result = projectService.createProject(loginUser, projectName, projectName); logger.info(result.toString()); Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS)); //success Mockito.when(projectMapper.insert(Mockito.any(Project.class))).thenReturn(1); result = projectService.createProject(loginUser, "test", "test"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testQueryById() { //not exist Map<String, Object> result = projectService.queryById(Integer.MAX_VALUE); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); logger.info(result.toString()); //success Mockito.when(projectMapper.selectById(1)).thenReturn(getProject()); result = projectService.queryById(1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testCheckProjectAndAuth() { Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser()); User loginUser = getLoginUser(); Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, null, projectName); logger.info(result.toString()); Status status = (Status) result.get(Constants.STATUS); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); Project project = getProject(); //USER_NO_OPERATION_PROJECT_PERM project.setUserId(2); result = projectService.checkProjectAndAuth(loginUser, project, projectName); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS)); //success project.setUserId(1); result = projectService.checkProjectAndAuth(loginUser, project, projectName); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); Map<String, Object> result2 = new HashMap<>(); result2 = projectService.checkProjectAndAuth(loginUser, null, projectName); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result2.get(Constants.STATUS)); Project project1 = getProject(); // USER_NO_OPERATION_PROJECT_PERM project1.setUserId(2); result2 = projectService.checkProjectAndAuth(loginUser, project1, projectName); Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result2.get(Constants.STATUS)); //success project1.setUserId(1); projectService.checkProjectAndAuth(loginUser, project1, projectName); } @Test public void testHasProjectAndPerm() { // Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser()); User loginUser = getLoginUser(); Project project = getProject(); Map<String, Object> result = new HashMap<>(); // not exist user User tempUser = new User(); tempUser.setId(Integer.MAX_VALUE); boolean checkResult = projectService.hasProjectAndPerm(tempUser, project, result); logger.info(result.toString()); Assert.assertFalse(checkResult); //success result = new HashMap<>(); project.setUserId(1); checkResult = projectService.hasProjectAndPerm(loginUser, project, result); logger.info(result.toString()); Assert.assertTrue(checkResult); } @Test public void testQueryProjectListPaging() { IPage<Project> page = new Page<>(1, 10); page.setRecords(getList()); page.setTotal(1L); Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(1), Mockito.eq(projectName))).thenReturn(page); User loginUser = getLoginUser(); // project owner Map<String, Object> result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName); logger.info(result.toString()); PageInfo<Project> pageInfo = (PageInfo<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists())); //admin Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(0), Mockito.eq(projectName))).thenReturn(page); loginUser.setUserType(UserType.ADMIN_USER); result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName); logger.info(result.toString()); pageInfo = (PageInfo<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists())); } @Test public void testDeleteProject() { Mockito.when(projectMapper.selectById(1)).thenReturn(getProject()); User loginUser = getLoginUser(); //PROJECT_NOT_FOUNT Map<String, Object> result = projectService.deleteProject(loginUser, 12); logger.info(result.toString()); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); loginUser.setId(2); //USER_NO_OPERATION_PROJECT_PERM result = projectService.deleteProject(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS)); //DELETE_PROJECT_ERROR_DEFINES_NOT_NULL Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(getProcessDefinitions()); loginUser.setUserType(UserType.ADMIN_USER); result = projectService.deleteProject(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL, result.get(Constants.STATUS)); //success Mockito.when(projectMapper.deleteById(1)).thenReturn(1); Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(new ArrayList<>()); result = projectService.deleteProject(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testUpdate() { User loginUser = getLoginUser(); Project project = getProject(); project.setId(2); Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project); Mockito.when(projectMapper.selectById(1)).thenReturn(getProject()); // PROJECT_NOT_FOUNT Map<String, Object> result = projectService.update(loginUser, 12, projectName, "desc"); logger.info(result.toString()); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); //PROJECT_ALREADY_EXISTS result = projectService.update(loginUser, 1, projectName, "desc"); logger.info(result.toString()); Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS)); //success project.setUserId(1); Mockito.when(projectMapper.updateById(Mockito.any(Project.class))).thenReturn(1); result = projectService.update(loginUser, 1, "test", "desc"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testQueryAuthorizedProject() { User loginUser = getLoginUser(); Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList()); //USER_NO_OPERATION_PERM Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, 3); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success loginUser.setUserType(UserType.ADMIN_USER); result = projectService.queryAuthorizedProject(loginUser, 1); logger.info(result.toString()); List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); loginUser.setUserType(UserType.GENERAL_USER); result = projectService.queryAuthorizedProject(loginUser, loginUser.getId()); projects = (List<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } @Test public void testQueryCreatedProject() { User loginUser = getLoginUser(); Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(getList()); //success loginUser.setUserType(UserType.ADMIN_USER); Map<String, Object> result = projectService.queryProjectCreatedByUser(loginUser); logger.info(result.toString()); List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } @Test public void testQueryProjectCreatedAndAuthorizedByUser() { Map<String, Object> result = null; User loginUser = getLoginUser(); // not admin user Mockito.when(projectMapper.queryProjectCreatedAndAuthorizedByUserId(1)).thenReturn(getList()); result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); List<Project> notAdminUserResult = (List<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(notAdminUserResult)); //admin user loginUser.setUserType(UserType.ADMIN_USER); Mockito.when(projectMapper.selectList(null)).thenReturn(getList()); result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } @Test public void testQueryAllProjectList() { Mockito.when(projectMapper.queryAllProject()).thenReturn(getList()); Map<String, Object> result = projectService.queryAllProjectList(); logger.info(result.toString()); List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } @Test public void testQueryUnauthorizedProject() { // Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList()); Mockito.when(projectMapper.queryProjectExceptUserId(2)).thenReturn(getList()); User loginUser = new User(); loginUser.setUserType(UserType.ADMIN_USER); Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, 2); logger.info(result.toString()); List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } private Project getProject() { Project project = new Project(); project.setId(1); project.setName(projectName); project.setUserId(1); return project; } private List<Project> getList() { List<Project> list = new ArrayList<>(); list.add(getProject()); return list; } /** * create admin user */ private User getLoginUser() { User loginUser = new User(); loginUser.setUserType(UserType.GENERAL_USER); loginUser.setUserName(userName); loginUser.setId(1); return loginUser; } /** * get project user */ private ProjectUser getProjectUser() { ProjectUser projectUser = new ProjectUser(); projectUser.setProjectId(1); projectUser.setUserId(1); return projectUser; } private List<ProcessDefinition> getProcessDefinitions() { List<ProcessDefinition> list = new ArrayList<>(); ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setProjectId(1); list.add(processDefinition); return list; } private List<Integer> getProjectIds() { return Collections.singletonList(1); } private String getDesc() { return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" + ".deleteProjectRelation(projectId,userId)projectUserMappe" + "r.deleteProjectRelation(projectId,userId)projectUserMapper" + ".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)"; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.UsersServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.EncryptionUtils; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * users service test */ @RunWith(MockitoJUnitRunner.class) public class UsersServiceTest { private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class); @InjectMocks private UsersServiceImpl usersService; @Mock private UserMapper userMapper; @Mock private TenantMapper tenantMapper; @Mock private ResourceMapper resourceMapper; @Mock private AlertGroupMapper alertGroupMapper; @Mock private DataSourceUserMapper datasourceUserMapper; @Mock private ProjectUserMapper projectUserMapper; @Mock private ResourceUserMapper resourceUserMapper; @Mock private UDFUserMapper udfUserMapper; private String queueName = "UsersServiceTestQueue"; @Before public void before() { } @After public void after() { } @Test public void testCreateUserForLdap() { String userName = "user1"; String email = "user1@ldap.com"; User user = usersService.createUser(UserType.ADMIN_USER, userName, email); Assert.assertNotNull(user); } @Test public void testCreateUser() { User user = new User(); user.setUserType(UserType.ADMIN_USER); String userName = "userTest0001~"; String userPassword = "userTest"; String email = "123@qq.com"; int tenantId = Integer.MAX_VALUE; String phone = "13456432345"; int state = 1; try { //userName error Map<String, Object> result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userName = "userTest0001"; userPassword = "userTest000111111111111111"; //password error result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userPassword = "userTest0001"; email = "1q.com"; //email error result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); email = "122222@qq.com"; phone = "2233"; //phone error result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); phone = "13456432345"; //tenantId not exists result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS)); //success Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); result = usersService.createUser(user, userName, userPassword, email, 1, phone, queueName, state); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { logger.error(Status.CREATE_USER_ERROR.getMsg(), e); Assert.assertTrue(false); } } @Test public void testQueryUser() { String userName = "userTest0001"; String userPassword = "userTest0001"; when(userMapper.queryUserByNamePassword(userName, EncryptionUtils.getMd5(userPassword))).thenReturn(getGeneralUser()); User queryUser = usersService.queryUser(userName, userPassword); logger.info(queryUser.toString()); Assert.assertTrue(queryUser != null); } @Test public void testSelectByIds() { List<Integer> ids = new ArrayList<>(); List<User> users = usersService.queryUser(ids); Assert.assertTrue(users.isEmpty()); ids.add(1); List<User> userList = new ArrayList<>(); userList.add(new User()); when(userMapper.selectByIds(ids)).thenReturn(userList); List<User> userList1 = usersService.queryUser(ids); Assert.assertFalse(userList1.isEmpty()); } @Test public void testGetUserIdByName() { User user = new User(); user.setId(1); user.setUserType(UserType.ADMIN_USER); user.setUserName("test_user"); //user name null int userId = usersService.getUserIdByName(""); Assert.assertEquals(0, userId); //user not exist when(usersService.queryUser(user.getUserName())).thenReturn(null); int userNotExistId = usersService.getUserIdByName(user.getUserName()); Assert.assertEquals(-1, userNotExistId); //user exist when(usersService.queryUser(user.getUserName())).thenReturn(user); int userExistId = usersService.getUserIdByName(user.getUserName()); Assert.assertEquals(user.getId(), userExistId); } @Test public void testQueryUserList() { User user = new User(); //no operate Map<String, Object> result = usersService.queryUserList(user); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success user.setUserType(UserType.ADMIN_USER); when(userMapper.selectList(null)).thenReturn(getUserList()); result = usersService.queryUserList(user); List<User> userList = (List<User>) result.get(Constants.DATA_LIST); Assert.assertTrue(userList.size() > 0); } @Test public void testQueryUserListPage() { User user = new User(); IPage<User> page = new Page<>(1, 10); page.setRecords(getUserList()); when(userMapper.queryUserPaging(any(Page.class), eq("userTest"))).thenReturn(page); //no operate Map<String, Object> result = usersService.queryUserList(user, "userTest", 1, 10); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success user.setUserType(UserType.ADMIN_USER); result = usersService.queryUserList(user, "userTest", 1, 10); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); PageInfo<User> pageInfo = (PageInfo<User>) result.get(Constants.DATA_LIST); Assert.assertTrue(pageInfo.getLists().size() > 0); } @Test public void testUpdateUser() { String userName = "userTest0001"; String userPassword = "userTest0001"; try { //user not exist Map<String, Object> result = usersService.updateUser(getLoginUser(), 0,userName,userPassword,"3443@qq.com",1,"13457864543","queue", 1); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); logger.info(result.toString()); //success when(userMapper.selectById(1)).thenReturn(getUser()); result = usersService.updateUser(getLoginUser(), 1,userName,userPassword,"32222s@qq.com",1,"13457864543","queue", 1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { logger.error("update user error", e); Assert.assertTrue(false); } } @Test public void testDeleteUserById() { User loginUser = new User(); try { when(userMapper.queryTenantCodeByUserId(1)).thenReturn(getUser()); when(userMapper.selectById(1)).thenReturn(getUser()); //no operate Map<String, Object> result = usersService.deleteUserById(loginUser, 3); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // user not exist loginUser.setUserType(UserType.ADMIN_USER); result = usersService.deleteUserById(loginUser, 3); logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success result = usersService.deleteUserById(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { logger.error("delete user error", e); Assert.assertTrue(false); } } @Test public void testGrantProject() { when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); String projectIds = "100000,120000"; Map<String, Object> result = usersService.grantProject(loginUser, 1, projectIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //user not exist loginUser.setUserType(UserType.ADMIN_USER); result = usersService.grantProject(loginUser, 2, projectIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success when(projectUserMapper.deleteProjectRelation(Mockito.anyInt(), Mockito.anyInt())).thenReturn(1); result = usersService.grantProject(loginUser, 1, projectIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testGrantResources() { String resourceIds = "100000,120000"; when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); Map<String, Object> result = usersService.grantResources(loginUser, 1, resourceIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //user not exist loginUser.setUserType(UserType.ADMIN_USER); result = usersService.grantResources(loginUser, 2, resourceIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success when(resourceMapper.selectById(Mockito.anyInt())).thenReturn(getResource()); when(resourceUserMapper.deleteResourceUser(1, 0)).thenReturn(1); result = usersService.grantResources(loginUser, 1, resourceIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testGrantUDFFunction() { String udfIds = "100000,120000"; when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); Map<String, Object> result = usersService.grantUDFFunction(loginUser, 1, udfIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //user not exist loginUser.setUserType(UserType.ADMIN_USER); result = usersService.grantUDFFunction(loginUser, 2, udfIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success when(udfUserMapper.deleteByUserId(1)).thenReturn(1); result = usersService.grantUDFFunction(loginUser, 1, udfIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testGrantDataSource() { String datasourceIds = "100000,120000"; when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); Map<String, Object> result = usersService.grantDataSource(loginUser, 1, datasourceIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //user not exist loginUser.setUserType(UserType.ADMIN_USER); result = usersService.grantDataSource(loginUser, 2, datasourceIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success when(datasourceUserMapper.deleteByUserId(Mockito.anyInt())).thenReturn(1); result = usersService.grantDataSource(loginUser, 1, datasourceIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } private User getLoginUser(){ User loginUser = new User(); loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); return loginUser; } @Test public void getUserInfo() { User loginUser = new User(); loginUser.setUserName("admin"); loginUser.setUserType(UserType.ADMIN_USER); // get admin user Map<String, Object> result = usersService.getUserInfo(loginUser); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); User tempUser = (User) result.get(Constants.DATA_LIST); //check userName Assert.assertEquals("admin", tempUser.getUserName()); //get general user loginUser.setUserType(null); loginUser.setId(1); when(userMapper.queryDetailsById(1)).thenReturn(getGeneralUser()); when(alertGroupMapper.queryByUserId(1)).thenReturn(getAlertGroups()); result = usersService.getUserInfo(loginUser); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); tempUser = (User) result.get(Constants.DATA_LIST); //check userName Assert.assertEquals("userTest0001", tempUser.getUserName()); } @Test public void testQueryAllGeneralUsers() { User loginUser = new User(); //no operate Map<String, Object> result = usersService.queryAllGeneralUsers(loginUser); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success loginUser.setUserType(UserType.ADMIN_USER); when(userMapper.queryAllGeneralUser()).thenReturn(getUserList()); result = usersService.queryAllGeneralUsers(loginUser); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List<User> userList = (List<User>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(userList)); } @Test public void testVerifyUserName() { //not exist user Result result = usersService.verifyUserName("admin89899"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); //exist user when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser()); result = usersService.verifyUserName("userTest0001"); logger.info(result.toString()); Assert.assertEquals(Status.USER_NAME_EXIST.getMsg(), result.getMsg()); } @Test public void testUnauthorizedUser() { User loginUser = new User(); when(userMapper.selectList(null)).thenReturn(getUserList()); when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList()); //no operate Map<String, Object> result = usersService.unauthorizedUser(loginUser, 2); logger.info(result.toString()); loginUser.setUserType(UserType.ADMIN_USER); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success result = usersService.unauthorizedUser(loginUser, 2); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testAuthorizedUser() { User loginUser = new User(); when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList()); //no operate Map<String, Object> result = usersService.authorizedUser(loginUser, 2); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success loginUser.setUserType(UserType.ADMIN_USER); result = usersService.authorizedUser(loginUser, 2); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List<User> userList = (List<User>) result.get(Constants.DATA_LIST); logger.info(result.toString()); Assert.assertTrue(CollectionUtils.isNotEmpty(userList)); } @Test public void testRegisterUser() { String userName = "userTest0002~"; String userPassword = "userTest"; String repeatPassword = "userTest"; String email = "123@qq.com"; try { //userName error Map<String, Object> result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userName = "userTest0002"; userPassword = "userTest000111111111111111"; //password error result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userPassword = "userTest0002"; email = "1q.com"; //email error result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //repeatPassword error email = "7400@qq.com"; repeatPassword = "userPassword"; result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //success repeatPassword = "userTest0002"; result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { Assert.assertTrue(false); } } @Test public void testActivateUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); String userName = "userTest0002~"; try { //not admin Map<String, Object> result = usersService.activateUser(user, userName); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //userName error user.setUserType(UserType.ADMIN_USER); result = usersService.activateUser(user, userName); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //user not exist userName = "userTest10013"; result = usersService.activateUser(user, userName); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //user state error userName = "userTest0001"; when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getUser()); result = usersService.activateUser(user, userName); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //success when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getDisabledUser()); result = usersService.activateUser(user, userName); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { Assert.assertTrue(false); } } @Test public void testBatchActivateUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); List<String> userNames = new ArrayList<>(); userNames.add("userTest0001"); userNames.add("userTest0002"); userNames.add("userTest0003~"); userNames.add("userTest0004"); try { //not admin Map<String, Object> result = usersService.batchActivateUser(user, userNames); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //batch activate user names user.setUserType(UserType.ADMIN_USER); when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser()); when(userMapper.queryByUserNameAccurately("userTest0002")).thenReturn(getDisabledUser()); result = usersService.batchActivateUser(user, userNames); Map<String, Object> responseData = (Map<String, Object>) result.get(Constants.DATA_LIST); Map<String, Object> successData = (Map<String, Object>) responseData.get("success"); int totalSuccess = (Integer) successData.get("sum"); Map<String, Object> failedData = (Map<String, Object>) responseData.get("failed"); int totalFailed = (Integer) failedData.get("sum"); Assert.assertEquals(1, totalSuccess); Assert.assertEquals(3, totalFailed); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { Assert.assertTrue(false); } } /** * get disabled user */ private User getDisabledUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); user.setUserName("userTest0001"); user.setUserPassword("userTest0001"); user.setState(0); return user; } /** * get user */ private User getGeneralUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); user.setUserName("userTest0001"); user.setUserPassword("userTest0001"); return user; } private List<User> getUserList() { List<User> userList = new ArrayList<>(); userList.add(getGeneralUser()); return userList; } /** * get user */ private User getUser() { User user = new User(); user.setUserType(UserType.ADMIN_USER); user.setUserName("userTest0001"); user.setUserPassword("userTest0001"); user.setState(1); return user; } /** * get tenant * * @return tenant */ private Tenant getTenant() { Tenant tenant = new Tenant(); tenant.setId(1); return tenant; } /** * get resource * * @return resource */ private Resource getResource() { Resource resource = new Resource(); resource.setPid(-1); resource.setUserId(1); resource.setDescription("ResourcesServiceTest.jar"); resource.setAlias("ResourcesServiceTest.jar"); resource.setFullName("/ResourcesServiceTest.jar"); resource.setType(ResourceType.FILE); return resource; } private List<AlertGroup> getAlertGroups() { List<AlertGroup> alertGroups = new ArrayList<>(); AlertGroup alertGroup = new AlertGroup(); alertGroups.add(alertGroup); return alertGroups; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,326
[Bug][api-server] delete the user ,when query the project by join the table of t_ds_user will return null.
delete the user ,when query the project by join the table of t_ds_user will return null. Suggested that [ from t_ds_project p join t_ds_user u on p.user_id = u.id ]changed to [ from t_ds_project p left join t_ds_user u on p.user_id = u.id ]
https://github.com/apache/dolphinscheduler/issues/5326
https://github.com/apache/dolphinscheduler/pull/5394
6b8461e901de65000f4f8f34ebfe011946e787d3
cbbe9c333c39a0163c100c5ced270f76301c97b3
"2021-04-20T03:12:14Z"
java
"2021-04-29T06:50:05Z"
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/createProject.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <m-popover ref="popover" :nameText="item ? $t('Edit') : $t('Create Project')" :ok-text="item ? $t('Edit') : $t('Submit')" @close="_close" @ok="_ok"> <template slot="content"> <div class="projects-create-model"> <m-list-box-f> <template slot="name"><strong>*</strong>{{ $t('Project Name') }}</template> <template slot="content"> <el-input v-model="projectName" :placeholder="$t('Please enter name')" maxlength="60" size="small" type="input"> </el-input> </template> </m-list-box-f> <m-list-box-f> <template slot="name">{{ $t('Description') }}</template> <template slot="content"> <el-input v-model="description" :placeholder="$t('Please enter description')" size="small" type="textarea"> </el-input> </template> </m-list-box-f> </div> </template> </m-popover> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import store from '@/conf/home/store' import mPopover from '@/module/components/popup/popover' import mListBoxF from '@/module/components/listBoxF/listBoxF' export default { name: 'projects-create', data () { return { store, description: '', projectName: '' } }, props: { item: Object }, methods: { _ok () { if (!this._verification()) { return } let param = { projectName: _.trim(this.projectName), description: _.trim(this.description) } // edit if (this.item) { param.projectId = this.item.id } this.$refs.popover.spinnerLoading = true this.store.dispatch(`projects/${this.item ? 'updateProjects' : 'createProjects'}`, param).then(res => { this.$emit('_onUpdate') this.$message({ message: res.msg, type: 'success', offset: 70 }) this.$refs.popover.spinnerLoading = false }).catch(e => { this.$message.error(e.msg || '') this.$refs.popover.spinnerLoading = false }) }, _close () { this.$emit('close') }, _verification () { if (!this.projectName) { this.$message.warning(`${i18n.$t('Please enter name')}`) return false } return true } }, watch: {}, created () { if (this.item) { this.projectName = this.item.name this.description = this.item.description } }, mounted () { }, components: { mPopover, mListBoxF } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,386
[Bug][Alert] ERROR: relation "t_ds_plugin_define" does not exist
**To Reproduce** Steps to reproduce the behavior, for example: 1. Run `docker-compose up -d` 2. Run `docker logs -f dolphinscheduler-alert` 3. See error **Expected behavior** Bug fixed **Screenshots** Alert logs: ``` [INFO] 2021-04-26 00:27:39.678 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[104] - Registering Alert Plugin 'DingTalk' [INFO] 2021-04-26 00:27:39.681 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[86] - -- Loaded Alert Plugin DingTalk -- [INFO] 2021-04-26 00:27:41.901 com.alibaba.druid.pool.DruidDataSource:[994] - {dataSource-1} inited Exception in thread "main" java.lang.RuntimeException: load Alert Plugin Failed ! at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:104) at org.apache.dolphinscheduler.alert.AlertServer.start(AlertServer.java:145) at org.apache.dolphinscheduler.alert.AlertServer.main(AlertServer.java:161) Caused by: org.springframework.jdbc.BadSqlGrammarException: ### Error querying database. Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.xml ### The error may involve defaultParameterMap ### The error occurred while setting parameters ### SQL: select * from t_ds_plugin_define where plugin_name = ? and plugin_type = ? ### Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy11.selectList(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:223) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:158) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:76) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy15.queryByNameAndType(Unknown Source) at org.apache.dolphinscheduler.dao.PluginDao.addOrUpdatePluginDefine(PluginDao.java:67) at org.apache.dolphinscheduler.alert.plugin.AlertPluginManager.installPlugin(AlertPluginManager.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:99) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugins(DolphinPluginLoader.java:85) at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:102) ... 2 more Caused by: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.query(PreparedStatementHandler.java:64) at org.apache.ibatis.executor.statement.RoutingStatementHandler.query(RoutingStatementHandler.java:79) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy23.query(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doQuery(MybatisSimpleExecutor.java:67) at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:324) at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:109) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:83) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:147) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 14 more End start alert-server. 2021-04-26 00:27:43,637 INFO exited: alert (exit status 0; expected) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5386
https://github.com/apache/dolphinscheduler/pull/5387
d45b27ce2e54c972e64d6b00b359643e34c1d2a8
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
"2021-04-25T16:30:21Z"
java
"2021-04-29T08:17:01Z"
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.alert; import static org.apache.dolphinscheduler.alert.utils.Constants.ALERT_PROPERTIES_PATH; import static org.apache.dolphinscheduler.common.Constants.ALERT_RPC_PORT; import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; import org.apache.dolphinscheduler.alert.processor.AlertRequestProcessor; import org.apache.dolphinscheduler.alert.runner.AlertSender; import org.apache.dolphinscheduler.alert.utils.Constants; import org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader; import org.apache.dolphinscheduler.common.plugin.DolphinPluginManagerConfig; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.DaoFactory; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.spi.utils.StringUtils; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableList; /** * alert of start */ public class AlertServer { private static final Logger logger = LoggerFactory.getLogger(AlertServer.class); /** * Alert Dao */ private AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); private AlertSender alertSender; private static AlertServer instance; private AlertPluginManager alertPluginManager; private DolphinPluginManagerConfig alertPluginManagerConfig; public static final String ALERT_PLUGIN_BINDING = "alert.plugin.binding"; public static final String ALERT_PLUGIN_DIR = "alert.plugin.dir"; public static final String MAVEN_LOCAL_REPOSITORY = "maven.local.repository"; /** * netty server */ private NettyRemotingServer server; private static class AlertServerHolder { private static final AlertServer INSTANCE = new AlertServer(); } public static final AlertServer getInstance() { return AlertServerHolder.INSTANCE; } private AlertServer() { } private void initPlugin() { alertPluginManager = new AlertPluginManager(); alertPluginManagerConfig = new DolphinPluginManagerConfig(); alertPluginManagerConfig.setPlugins(PropertyUtils.getString(ALERT_PLUGIN_BINDING)); if (StringUtils.isNotBlank(PropertyUtils.getString(ALERT_PLUGIN_DIR))) { alertPluginManagerConfig.setInstalledPluginsDir(PropertyUtils.getString(ALERT_PLUGIN_DIR, Constants.ALERT_PLUGIN_PATH).trim()); } if (StringUtils.isNotBlank(PropertyUtils.getString(MAVEN_LOCAL_REPOSITORY))) { alertPluginManagerConfig.setMavenLocalRepository(PropertyUtils.getString(MAVEN_LOCAL_REPOSITORY).trim()); } DolphinPluginLoader alertPluginLoader = new DolphinPluginLoader(alertPluginManagerConfig, ImmutableList.of(alertPluginManager)); try { alertPluginLoader.loadPlugins(); } catch (Exception e) { throw new RuntimeException("load Alert Plugin Failed !", e); } } /** * init netty remoting server */ private void initRemoteServer() { NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(ALERT_RPC_PORT); this.server = new NettyRemotingServer(serverConfig); this.server.registerProcessor(CommandType.ALERT_SEND_REQUEST, new AlertRequestProcessor(alertDao, alertPluginManager)); this.server.start(); } /** * Cyclic alert info sending alert */ private void runSender() { while (Stopper.isRunning()) { try { Thread.sleep(Constants.ALERT_SCAN_INTERVAL); } catch (InterruptedException e) { logger.error(e.getMessage(), e); Thread.currentThread().interrupt(); } if (alertPluginManager == null || alertPluginManager.getAlertChannelMap().size() == 0) { logger.warn("No Alert Plugin . Can not send alert info. "); } else { List<Alert> alerts = alertDao.listWaitExecutionAlert(); alertSender = new AlertSender(alerts, alertDao, alertPluginManager); alertSender.run(); } } } /** * start */ public void start() { PropertyUtils.loadPropertyFile(ALERT_PROPERTIES_PATH); initPlugin(); initRemoteServer(); logger.info("alert server ready start "); runSender(); } /** * stop */ public void stop() { this.server.close(); logger.info("alert server shut down"); } public static void main(String[] args) { AlertServer alertServer = AlertServer.getInstance(); alertServer.start(); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { alertServer.stop(); } }); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,386
[Bug][Alert] ERROR: relation "t_ds_plugin_define" does not exist
**To Reproduce** Steps to reproduce the behavior, for example: 1. Run `docker-compose up -d` 2. Run `docker logs -f dolphinscheduler-alert` 3. See error **Expected behavior** Bug fixed **Screenshots** Alert logs: ``` [INFO] 2021-04-26 00:27:39.678 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[104] - Registering Alert Plugin 'DingTalk' [INFO] 2021-04-26 00:27:39.681 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[86] - -- Loaded Alert Plugin DingTalk -- [INFO] 2021-04-26 00:27:41.901 com.alibaba.druid.pool.DruidDataSource:[994] - {dataSource-1} inited Exception in thread "main" java.lang.RuntimeException: load Alert Plugin Failed ! at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:104) at org.apache.dolphinscheduler.alert.AlertServer.start(AlertServer.java:145) at org.apache.dolphinscheduler.alert.AlertServer.main(AlertServer.java:161) Caused by: org.springframework.jdbc.BadSqlGrammarException: ### Error querying database. Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.xml ### The error may involve defaultParameterMap ### The error occurred while setting parameters ### SQL: select * from t_ds_plugin_define where plugin_name = ? and plugin_type = ? ### Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy11.selectList(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:223) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:158) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:76) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy15.queryByNameAndType(Unknown Source) at org.apache.dolphinscheduler.dao.PluginDao.addOrUpdatePluginDefine(PluginDao.java:67) at org.apache.dolphinscheduler.alert.plugin.AlertPluginManager.installPlugin(AlertPluginManager.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:99) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugins(DolphinPluginLoader.java:85) at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:102) ... 2 more Caused by: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.query(PreparedStatementHandler.java:64) at org.apache.ibatis.executor.statement.RoutingStatementHandler.query(RoutingStatementHandler.java:79) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy23.query(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doQuery(MybatisSimpleExecutor.java:67) at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:324) at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:109) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:83) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:147) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 14 more End start alert-server. 2021-04-26 00:27:43,637 INFO exited: alert (exit status 0; expected) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5386
https://github.com/apache/dolphinscheduler/pull/5387
d45b27ce2e54c972e64d6b00b359643e34c1d2a8
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
"2021-04-25T16:30:21Z"
java
"2021-04-29T08:17:01Z"
dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/AlertServerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.alert; import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; import org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader; import org.apache.dolphinscheduler.common.plugin.DolphinPluginManagerConfig; import org.apache.dolphinscheduler.alert.runner.AlertSender; import org.apache.dolphinscheduler.alert.utils.Constants; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.DaoFactory; import org.apache.dolphinscheduler.dao.PluginDao; import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.spi.alert.AlertChannel; import java.util.concurrent.ConcurrentHashMap; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @PrepareForTest({AlertServer.class, DaoFactory.class}) public class AlertServerTest { @Before public void before() { } @Test public void testMain() throws Exception { AlertDao alertDao = PowerMockito.mock(AlertDao.class); PowerMockito.mockStatic(DaoFactory.class); PowerMockito.when(DaoFactory.getDaoInstance(AlertDao.class)).thenReturn(alertDao); PluginDao pluginDao = PowerMockito.mock(PluginDao.class); PowerMockito.when(DaoFactory.getDaoInstance(PluginDao.class)).thenReturn(pluginDao); AlertChannel alertChannelMock = PowerMockito.mock(AlertChannel.class); AlertPluginManager alertPluginManager = PowerMockito.mock(AlertPluginManager.class); PowerMockito.whenNew(AlertPluginManager.class).withNoArguments().thenReturn(alertPluginManager); ConcurrentHashMap alertChannelMap = new ConcurrentHashMap<>(); alertChannelMap.put("pluginName", alertChannelMock); PowerMockito.when(alertPluginManager.getPluginNameById(Mockito.anyInt())).thenReturn("pluginName"); PowerMockito.when(alertPluginManager.getAlertChannelMap()).thenReturn(alertChannelMap); DolphinPluginManagerConfig alertPluginManagerConfig = PowerMockito.mock(DolphinPluginManagerConfig.class); PowerMockito.whenNew(DolphinPluginManagerConfig.class).withNoArguments().thenReturn(alertPluginManagerConfig); NettyRemotingServer nettyRemotingServer = PowerMockito.mock(NettyRemotingServer.class); PowerMockito.whenNew(NettyRemotingServer.class).withAnyArguments().thenReturn(nettyRemotingServer); AlertSender alertSender = PowerMockito.mock(AlertSender.class); PowerMockito.whenNew(AlertSender.class).withAnyArguments().thenReturn(alertSender); DolphinPluginLoader dolphinPluginLoader = PowerMockito.mock(DolphinPluginLoader.class); PowerMockito.whenNew(DolphinPluginLoader.class).withAnyArguments().thenReturn(dolphinPluginLoader); AlertServer alertServer = AlertServer.getInstance(); Assert.assertNotNull(alertServer); new Thread(() -> { alertServer.start(); }) .start(); Thread.sleep(5 * Constants.ALERT_SCAN_INTERVAL); alertServer.stop(); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,386
[Bug][Alert] ERROR: relation "t_ds_plugin_define" does not exist
**To Reproduce** Steps to reproduce the behavior, for example: 1. Run `docker-compose up -d` 2. Run `docker logs -f dolphinscheduler-alert` 3. See error **Expected behavior** Bug fixed **Screenshots** Alert logs: ``` [INFO] 2021-04-26 00:27:39.678 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[104] - Registering Alert Plugin 'DingTalk' [INFO] 2021-04-26 00:27:39.681 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[86] - -- Loaded Alert Plugin DingTalk -- [INFO] 2021-04-26 00:27:41.901 com.alibaba.druid.pool.DruidDataSource:[994] - {dataSource-1} inited Exception in thread "main" java.lang.RuntimeException: load Alert Plugin Failed ! at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:104) at org.apache.dolphinscheduler.alert.AlertServer.start(AlertServer.java:145) at org.apache.dolphinscheduler.alert.AlertServer.main(AlertServer.java:161) Caused by: org.springframework.jdbc.BadSqlGrammarException: ### Error querying database. Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.xml ### The error may involve defaultParameterMap ### The error occurred while setting parameters ### SQL: select * from t_ds_plugin_define where plugin_name = ? and plugin_type = ? ### Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy11.selectList(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:223) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:158) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:76) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy15.queryByNameAndType(Unknown Source) at org.apache.dolphinscheduler.dao.PluginDao.addOrUpdatePluginDefine(PluginDao.java:67) at org.apache.dolphinscheduler.alert.plugin.AlertPluginManager.installPlugin(AlertPluginManager.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:99) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugins(DolphinPluginLoader.java:85) at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:102) ... 2 more Caused by: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.query(PreparedStatementHandler.java:64) at org.apache.ibatis.executor.statement.RoutingStatementHandler.query(RoutingStatementHandler.java:79) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy23.query(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doQuery(MybatisSimpleExecutor.java:67) at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:324) at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:109) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:83) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:147) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 14 more End start alert-server. 2021-04-26 00:27:43,637 INFO exited: alert (exit status 0; expected) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5386
https://github.com/apache/dolphinscheduler/pull/5387
d45b27ce2e54c972e64d6b00b359643e34c1d2a8
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
"2021-04-25T16:30:21Z"
java
"2021-04-29T08:17:01Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao; import static java.util.Objects.requireNonNull; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.apache.dolphinscheduler.dao.entity.PluginDefine; import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class PluginDao extends AbstractBaseDao { private final Logger logger = LoggerFactory.getLogger(getClass()); @Autowired private PluginDefineMapper pluginDefineMapper; @Override protected void init() { pluginDefineMapper = ConnectionFactory.getInstance().getMapper(PluginDefineMapper.class); } /** * add pluginDefine * * @param pluginDefine plugin define entiy * @return plugin define id */ public int addPluginDefine(PluginDefine pluginDefine) { return pluginDefineMapper.insert(pluginDefine); } /** * add or update plugin define * * @param pluginDefine new pluginDefine */ public int addOrUpdatePluginDefine(PluginDefine pluginDefine) { requireNonNull(pluginDefine, "pluginDefine is null"); requireNonNull(pluginDefine.getPluginName(), "pluginName is null"); requireNonNull(pluginDefine.getPluginType(), "pluginType is null"); List<PluginDefine> pluginDefineList = pluginDefineMapper.queryByNameAndType(pluginDefine.getPluginName(), pluginDefine.getPluginType()); if (CollectionUtils.isEmpty(pluginDefineList)) { return pluginDefineMapper.insert(pluginDefine); } PluginDefine currPluginDefine = pluginDefineList.get(0); if (!currPluginDefine.getPluginParams().equals(pluginDefine.getPluginParams())) { currPluginDefine.setUpdateTime(pluginDefine.getUpdateTime()); currPluginDefine.setPluginParams(pluginDefine.getPluginParams()); pluginDefineMapper.updateById(currPluginDefine); } return currPluginDefine.getId(); } /** * query plugin define by id * * @param pluginDefineId plugin define id * @return PluginDefine */ public PluginDefine getPluginDefineById(int pluginDefineId) { return pluginDefineMapper.selectById(pluginDefineId); } public PluginDefineMapper getPluginDefineMapper() { return pluginDefineMapper; } public void setPluginDefineMapper(PluginDefineMapper pluginDefineMapper) { this.pluginDefineMapper = pluginDefineMapper; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,386
[Bug][Alert] ERROR: relation "t_ds_plugin_define" does not exist
**To Reproduce** Steps to reproduce the behavior, for example: 1. Run `docker-compose up -d` 2. Run `docker logs -f dolphinscheduler-alert` 3. See error **Expected behavior** Bug fixed **Screenshots** Alert logs: ``` [INFO] 2021-04-26 00:27:39.678 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[104] - Registering Alert Plugin 'DingTalk' [INFO] 2021-04-26 00:27:39.681 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[86] - -- Loaded Alert Plugin DingTalk -- [INFO] 2021-04-26 00:27:41.901 com.alibaba.druid.pool.DruidDataSource:[994] - {dataSource-1} inited Exception in thread "main" java.lang.RuntimeException: load Alert Plugin Failed ! at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:104) at org.apache.dolphinscheduler.alert.AlertServer.start(AlertServer.java:145) at org.apache.dolphinscheduler.alert.AlertServer.main(AlertServer.java:161) Caused by: org.springframework.jdbc.BadSqlGrammarException: ### Error querying database. Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.xml ### The error may involve defaultParameterMap ### The error occurred while setting parameters ### SQL: select * from t_ds_plugin_define where plugin_name = ? and plugin_type = ? ### Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy11.selectList(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:223) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:158) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:76) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy15.queryByNameAndType(Unknown Source) at org.apache.dolphinscheduler.dao.PluginDao.addOrUpdatePluginDefine(PluginDao.java:67) at org.apache.dolphinscheduler.alert.plugin.AlertPluginManager.installPlugin(AlertPluginManager.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:99) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugins(DolphinPluginLoader.java:85) at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:102) ... 2 more Caused by: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.query(PreparedStatementHandler.java:64) at org.apache.ibatis.executor.statement.RoutingStatementHandler.query(RoutingStatementHandler.java:79) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy23.query(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doQuery(MybatisSimpleExecutor.java:67) at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:324) at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:109) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:83) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:147) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 14 more End start alert-server. 2021-04-26 00:27:43,637 INFO exited: alert (exit status 0; expected) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5386
https://github.com/apache/dolphinscheduler/pull/5387
d45b27ce2e54c972e64d6b00b359643e34c1d2a8
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
"2021-04-25T16:30:21Z"
java
"2021-04-29T08:17:01Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.PluginDefine; import org.apache.ibatis.annotations.Param; import java.util.List; import com.baomidou.mybatisplus.core.mapper.BaseMapper; public interface PluginDefineMapper extends BaseMapper<PluginDefine> { /** * query all plugin define * * @return PluginDefine list */ List<PluginDefine> queryAllPluginDefineList(); /** * query by plugin type * * @param pluginType pluginType * @return PluginDefine list */ List<PluginDefine> queryByPluginType(@Param("pluginType") String pluginType); /** * query detail by id * * @param id id * @return PluginDefineDetail */ PluginDefine queryDetailById(@Param("id") int id); /** * query by name and type * * @param pluginName * @param pluginType * @return */ List<PluginDefine> queryByNameAndType(@Param("pluginName") String pluginName, @Param("pluginType") String pluginType); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,386
[Bug][Alert] ERROR: relation "t_ds_plugin_define" does not exist
**To Reproduce** Steps to reproduce the behavior, for example: 1. Run `docker-compose up -d` 2. Run `docker logs -f dolphinscheduler-alert` 3. See error **Expected behavior** Bug fixed **Screenshots** Alert logs: ``` [INFO] 2021-04-26 00:27:39.678 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[104] - Registering Alert Plugin 'DingTalk' [INFO] 2021-04-26 00:27:39.681 org.apache.dolphinscheduler.alert.plugin.AlertPluginManager:[86] - -- Loaded Alert Plugin DingTalk -- [INFO] 2021-04-26 00:27:41.901 com.alibaba.druid.pool.DruidDataSource:[994] - {dataSource-1} inited Exception in thread "main" java.lang.RuntimeException: load Alert Plugin Failed ! at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:104) at org.apache.dolphinscheduler.alert.AlertServer.start(AlertServer.java:145) at org.apache.dolphinscheduler.alert.AlertServer.main(AlertServer.java:161) Caused by: org.springframework.jdbc.BadSqlGrammarException: ### Error querying database. Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ### The error may exist in org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.xml ### The error may involve defaultParameterMap ### The error occurred while setting parameters ### SQL: select * from t_ds_plugin_define where plugin_name = ? and plugin_type = ? ### Cause: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 ; bad SQL grammar []; nested exception is org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator.doTranslate(SQLErrorCodeSQLExceptionTranslator.java:239) at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:72) at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:74) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:440) at com.sun.proxy.$Proxy11.selectList(Unknown Source) at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:223) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:158) at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:76) at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:61) at com.sun.proxy.$Proxy15.queryByNameAndType(Unknown Source) at org.apache.dolphinscheduler.dao.PluginDao.addOrUpdatePluginDefine(PluginDao.java:67) at org.apache.dolphinscheduler.alert.plugin.AlertPluginManager.installPlugin(AlertPluginManager.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:111) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugin(DolphinPluginLoader.java:99) at org.apache.dolphinscheduler.common.plugin.DolphinPluginLoader.loadPlugins(DolphinPluginLoader.java:85) at org.apache.dolphinscheduler.alert.AlertServer.initPlugin(AlertServer.java:102) ... 2 more Caused by: org.postgresql.util.PSQLException: ERROR: relation "t_ds_plugin_define" does not exist Position: 23 at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440) at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:2183) at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:308) at org.postgresql.jdbc.PgStatement.executeInternal(PgStatement.java:441) at org.postgresql.jdbc.PgStatement.execute(PgStatement.java:365) at org.postgresql.jdbc.PgPreparedStatement.executeWithFlags(PgPreparedStatement.java:143) at org.postgresql.jdbc.PgPreparedStatement.execute(PgPreparedStatement.java:132) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:497) at org.apache.ibatis.executor.statement.PreparedStatementHandler.query(PreparedStatementHandler.java:64) at org.apache.ibatis.executor.statement.RoutingStatementHandler.query(RoutingStatementHandler.java:79) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:63) at com.sun.proxy.$Proxy23.query(Unknown Source) at com.baomidou.mybatisplus.core.executor.MybatisSimpleExecutor.doQuery(MybatisSimpleExecutor.java:67) at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:324) at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:109) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:83) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:147) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:426) ... 14 more End start alert-server. 2021-04-26 00:27:43,637 INFO exited: alert (exit status 0; expected) ``` **Which version of Dolphin Scheduler:** -[dev] **Additional context** Add any other context about the problem here. **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/5386
https://github.com/apache/dolphinscheduler/pull/5387
d45b27ce2e54c972e64d6b00b359643e34c1d2a8
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
"2021-04-25T16:30:21Z"
java
"2021-04-29T08:17:01Z"
dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.xml
<?xml version="1.0" encoding="UTF-8" ?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" > <mapper namespace="org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper"> <select id="queryAllPluginDefineList" resultType="org.apache.dolphinscheduler.dao.entity.PluginDefine"> select * from t_ds_plugin_define where 1=1 </select> <select id="queryByPluginType" resultType="org.apache.dolphinscheduler.dao.entity.PluginDefine"> select id,plugin_name,plugin_type,create_time,update_time from t_ds_plugin_define where plugin_type = #{pluginType} </select> <select id="queryByNameAndType" resultType="org.apache.dolphinscheduler.dao.entity.PluginDefine"> select * from t_ds_plugin_define where plugin_name = #{pluginName} and plugin_type = #{pluginType} </select> <select id="queryDetailById" resultType="org.apache.dolphinscheduler.dao.entity.PluginDefine"> select id,plugin_name,plugin_type,plugin_params,create_time,update_time from t_ds_plugin_define where id = #{id} </select> </mapper>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,346
[Improvement][UI] Resource files support copy path operations
The current resource file does not support copying paths When using the path is very easy to output errors. Add copy button. Support copy operation ![image](https://user-images.githubusercontent.com/39816903/115647469-6ca1f380-a356-11eb-9341-30bb2d8257a0.png)
https://github.com/apache/dolphinscheduler/issues/5346
https://github.com/apache/dolphinscheduler/pull/5348
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
"2021-04-22T02:35:20Z"
java
"2021-04-29T08:29:29Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/flink.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="flink-model"> <m-list-box> <div slot="text">{{$t('Program Type')}}</div> <div slot="content"> <el-select style="width: 130px;" size="small" v-model="programType" :disabled="isDetails"> <el-option v-for="city in programTypeList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="programType !== 'PYTHON'"> <div slot="text">{{$t('Main Class')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="mainClass" :placeholder="$t('Please enter main class')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Jar Package')}}</div> <div slot="content"> <treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Deploy Mode')}}</div> <div slot="content"> <el-radio-group v-model="deployMode" size="small"> <el-radio :label="'cluster'" :disabled="isDetails"></el-radio> <el-radio :label="'local'" :disabled="isDetails"></el-radio> </el-radio-group> </div> </m-list-box> <m-list-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('Flink Version')}}</div> <div slot="content"> <el-select style="width: 100px;" size="small" v-model="flinkVersion" :disabled="isDetails"> <el-option v-for="version in flinkVersionList" :key="version.code" :value="version.code" :label="version.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('App Name')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="appName" :placeholder="$t('Please enter app name(optional)')"> </el-input> </div> </m-list-box> <m-list-4-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('JobManager Memory')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="jobManagerMemory" :placeholder="$t('Please enter JobManager memory')"> </el-input> </div> <div slot="text-2">{{$t('TaskManager Memory')}}</div> <div slot="content-2"> <el-input :disabled="isDetails" type="input" size="small" v-model="taskManagerMemory" :placeholder="$t('Please enter TaskManager memory')"> </el-input> </div> </m-list-4-box> <m-list-4-box v-if="deployMode === 'cluster'"> <div slot="text">{{$t('Slot Number')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="slot" :placeholder="$t('Please enter Slot number')"> </el-input> </div> <div slot="text-2" v-if="flinkVersion === '<1.10'">{{$t('TaskManager Number')}}</div> <div slot="content-2" v-if="flinkVersion === '<1.10'"> <el-input :disabled="isDetails" type="input" size="small" v-model="taskManager" :placeholder="$t('Please enter TaskManager number')"> </el-input> </div> </m-list-4-box> <m-list-4-box> <div slot="text">{{$t('Parallelism')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="parallelism" :placeholder="$t('Please enter Parallelism')"> </el-input> </div> </m-list-4-box> <m-list-box> <div slot="text">{{$t('Main Arguments')}}</div> <div slot="content"> <el-input :autosize="{minRows:2}" :disabled="isDetails" type="textarea" size="small" v-model="mainArgs" :placeholder="$t('Please enter main arguments')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Option Parameters')}}</div> <div slot="content"> <el-input :disabled="isDetails" :autosize="{minRows:2}" type="textarea" size="small" v-model="others" :placeholder="$t('Please enter option parameters')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mLocalParams from './_source/localParams' import mListBox from './_source/listBox' import mList4Box from './_source/list4Box' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' export default { name: 'flink', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Master jar package(List) mainJarLists: [], mainJarList: [], // Deployment method deployMode: 'cluster', // Resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // Custom function localParams: [], // Slot number slot: 1, // Parallelism parallelism: 1, // TaskManager mumber taskManager: '2', // JobManager memory jobManagerMemory: '1G', // TaskManager memory taskManagerMemory: '2G', // Flink app name appName: '', // Main arguments mainArgs: '', // Option parameters others: '', // Program type programType: 'SCALA', // Program type(List) programTypeList: [{ code: 'JAVA' }, { code: 'SCALA' }, { code: 'PYTHON' }], flinkVersion: '<1.10', // Flink Versions(List) flinkVersionList: [{ code: '<1.10' }, { code: '>=1.10' }], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [] } }, props: { backfillItem: Object }, mixins: [disabledState], methods: { /** * getResourceId */ marjarId (name) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + name }).then(res => { this.mainJar = res.id }).catch(e => { this.$message.error(e.msg || '') }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, /** * return resourceList */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, /** * verification */ _verification () { if (this.programType !== 'PYTHON' && !this.mainClass) { this.$message.warning(`${i18n.$t('Please enter main class')}`) return false } if (!this.mainJar) { this.$message.warning(`${i18n.$t('Please enter main jar package')}`) return false } if (!this.jobManagerMemory) { this.$message.warning(`${i18n.$t('Please enter JobManager memory')}`) return false } if (!Number.isInteger(parseInt(this.jobManagerMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!this.taskManagerMemory) { this.$message.warning(`${i18n.$t('Please enter TaskManager memory')}`) return false } if (!Number.isInteger(parseInt(this.taskManagerMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!Number.isInteger(parseInt(this.slot))) { this.$message.warning(`${i18n.$t('Please enter Slot number')}`) return false } if (!Number.isInteger(parseInt(this.parallelism))) { this.$message.warning(`${i18n.$t('Please enter Parallelism')}`) return false } if (this.flinkVersion === '<1.10' && !Number.isInteger(parseInt(this.taskManager))) { this.$message.warning(`${i18n.$t('Please enter TaskManager number')}`) return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: _.map(this.resourceList, v => { return { id: v } }), localParams: this.localParams, flinkVersion: this.flinkVersion, slot: this.slot, parallelism: this.parallelism, taskManager: this.taskManager, jobManagerMemory: this.jobManagerMemory, taskManagerMemory: this.taskManagerMemory, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType }) return true }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.mainJarList = this.mainJarList.concat(noResources) } } } }, watch: { // Listening type programType (type) { if (type === 'PYTHON') { this.mainClass = '' } }, // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: this.resourceIdArr, localParams: this.localParams, slot: this.slot, parallelism: this.parallelism, taskManager: this.taskManager, jobManagerMemory: this.jobManagerMemory, taskManagerMemory: this.taskManagerMemory, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType } } }, created () { let item = this.store.state.dag.resourcesListS let items = this.store.state.dag.resourcesListJar this.diGuiTree(item) this.diGuiTree(items) this.mainJarList = item this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' if (o.params.mainJar.res) { this.marjarId(o.params.mainJar.res) } else if (o.params.mainJar.res === '') { this.mainJar = '' } else { this.mainJar = o.params.mainJar.id || '' } this.deployMode = o.params.deployMode || '' this.flinkVersion = o.params.flinkVersion || '<1.10' this.slot = o.params.slot || 1 this.parallelism = o.params.parallelism || 1 this.taskManager = o.params.taskManager || '2' this.jobManagerMemory = o.params.jobManagerMemory || '1G' this.taskManagerMemory = o.params.taskManagerMemory || '2G' this.appName = o.params.appName || '' this.mainArgs = o.params.mainArgs || '' this.others = o.params.others this.programType = o.params.programType || 'SCALA' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mLocalParams, mListBox, mList4Box, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,346
[Improvement][UI] Resource files support copy path operations
The current resource file does not support copying paths When using the path is very easy to output errors. Add copy button. Support copy operation ![image](https://user-images.githubusercontent.com/39816903/115647469-6ca1f380-a356-11eb-9341-30bb2d8257a0.png)
https://github.com/apache/dolphinscheduler/issues/5346
https://github.com/apache/dolphinscheduler/pull/5348
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
"2021-04-22T02:35:20Z"
java
"2021-04-29T08:29:29Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="mr-model"> <m-list-box> <div slot="text">{{$t('Program Type')}}</div> <div slot="content"> <el-select v-model="programType" :disabled="isDetails" style="width: 110px;" size="small"> <el-option v-for="city in programTypeList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="programType !== 'PYTHON'"> <div slot="text">{{$t('Main Class')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="mainClass" :placeholder="$t('Please enter main class')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Jar Package')}}</div> <div slot="content"> <treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please enter main jar package')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('App Name')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="appName" :placeholder="$t('Please enter app name(optional)')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Arguments')}}</div> <div slot="content"> <el-input :autosize="{minRows:2}" :disabled="isDetails" type="textarea" size="small" v-model="mainArgs" :placeholder="$t('Please enter main arguments')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Option Parameters')}}</div> <div slot="content"> <el-input :disabled="isDetails" :autosize="{minRows:2}" type="textarea" size="small" v-model="others" :placeholder="$t('Please enter option parameters')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mLocalParams from './_source/localParams' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' export default { name: 'mr', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Main jar package (List) mainJarLists: [], mainJarList: [], // Resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // Custom parameter localParams: [], // MR app name appName: '', // Main arguments mainArgs: '', // Option parameters others: '', // Program type programType: 'JAVA', // Program type(List) programTypeList: [{ code: 'JAVA' }, { code: 'PYTHON' }], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [] } }, props: { backfillItem: Object }, mixins: [disabledState], methods: { /** * getResourceId */ marjarId (name) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + name }).then(res => { this.mainJar = res.id }).catch(e => { this.$message.error(e.msg || '') }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, /** * return resourceList */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.mainJarList = this.mainJarList.concat(noResources) } } }, /** * verification */ _verification () { if (this.programType !== 'PYTHON' && !this.mainClass) { this.$message.warning(`${i18n.$t('Please enter main class')}`) return false } if (!this.mainJar) { this.$message.warning(`${i18n.$t('Please enter main jar package')}`) return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { id: this.mainJar }, resourceList: _.map(this.resourceList, v => { return { id: v } }), localParams: this.localParams, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType }) return true } }, watch: { /** * monitor */ programType (type) { if (type === 'PYTHON') { this.mainClass = '' } }, // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { mainClass: this.mainClass, mainJar: { id: this.mainJar }, resourceList: this.resourceIdArr, localParams: this.localParams, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType } } }, created () { let item = this.store.state.dag.resourcesListS let items = this.store.state.dag.resourcesListJar this.diGuiTree(item) this.diGuiTree(items) this.mainJarList = item this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' if (o.params.mainJar.res) { this.marjarId(o.params.mainJar.res) } else if (o.params.mainJar.res === '') { this.mainJar = '' } else { this.mainJar = o.params.mainJar.id || '' } this.appName = o.params.appName || '' this.mainArgs = o.params.mainArgs || '' this.others = o.params.others this.programType = o.params.programType || 'JAVA' // backfill resourceList let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let backResource = o.params.resourceList || [] let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mLocalParams, mListBox, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,346
[Improvement][UI] Resource files support copy path operations
The current resource file does not support copying paths When using the path is very easy to output errors. Add copy button. Support copy operation ![image](https://user-images.githubusercontent.com/39816903/115647469-6ca1f380-a356-11eb-9341-30bb2d8257a0.png)
https://github.com/apache/dolphinscheduler/issues/5346
https://github.com/apache/dolphinscheduler/pull/5348
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
"2021-04-22T02:35:20Z"
java
"2021-04-29T08:29:29Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="python-model"> <m-list-box> <div slot="text">{{$t('Script')}}</div> <div slot="content"> <div class="form-mirror"> <textarea id="code-python-mirror" name="code-python-mirror" style="opacity: 0;"> </textarea> <a class="ans-modal-box-max"> <em class="el-icon-full-screen" @click="setEditorVal"></em> </a> </div> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="resourceOptions" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> <el-dialog :visible.sync="scriptBoxDialog" append-to-body="true" width="80%"> <m-script-box :item="item" @getSriptBoxValue="getSriptBoxValue" @closeAble="closeAble"></m-script-box> </el-dialog> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mScriptBox from './_source/scriptBox' import mLocalParams from './_source/localParams' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' let editor export default { name: 'python', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // script rawScript: '', // Custom parameter localParams: [], // resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], resourceOptions: [], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [], item: '', scriptBoxDialog: false } }, mixins: [disabledState], props: { backfillItem: Object }, methods: { /** * return localParams */ _onLocalParams (a) { this.localParams = a }, setEditorVal () { this.item = editor.getValue() this.scriptBoxDialog = true }, getSriptBoxValue (val) { editor.setValue(val) }, /** * return resourceList */ // _onResourcesData (a) { // this.resourceList = a // }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, /** * verification */ _verification () { // rawScript 验证 if (!editor.getValue()) { this.$message.warning(`${i18n.$t('Please enter script(required)')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // storage this.$emit('on-params', { resourceList: _.map(this.resourceList, v => { return { id: v } }), localParams: this.localParams, rawScript: editor.getValue() }) return true }, /** * Processing code highlighting */ _handlerEditor () { // editor editor = codemirror('code-python-mirror', { mode: 'python', readOnly: this.isDetails }) this.keypress = () => { if (!editor.getOption('readOnly')) { editor.showHint({ completeSingle: false }) } } // Monitor keyboard editor.on('keypress', this.keypress) editor.setValue(this.rawScript) return editor }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.resourceOptions.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.resourceOptions = this.resourceOptions.concat(noResources) } } } }, watch: { // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.resourceOptions.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { resourceList: this.resourceIdArr, localParams: this.localParams } } }, created () { let item = this.store.state.dag.resourcesListS this.diGuiTree(item) this.resourceOptions = item let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.rawScript = o.params.rawScript || '' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { setTimeout(() => { this._handlerEditor() }, 200) }, destroyed () { editor.toTextArea() // Uninstall editor.off($('.code-python-mirror'), 'keypress', this.keypress) }, components: { mLocalParams, mListBox, Treeselect, mScriptBox } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,346
[Improvement][UI] Resource files support copy path operations
The current resource file does not support copying paths When using the path is very easy to output errors. Add copy button. Support copy operation ![image](https://user-images.githubusercontent.com/39816903/115647469-6ca1f380-a356-11eb-9341-30bb2d8257a0.png)
https://github.com/apache/dolphinscheduler/issues/5346
https://github.com/apache/dolphinscheduler/pull/5348
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
"2021-04-22T02:35:20Z"
java
"2021-04-29T08:29:29Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="shell-model"> <m-list-box> <div slot="text">{{$t('Script')}}</div> <div slot="content"> <div class="form-mirror"> <textarea id="code-shell-mirror" name="code-shell-mirror" style="opacity: 0"> </textarea> <a class="ans-modal-box-max"> <em class="el-icon-full-screen" @click="setEditorVal"></em> </a> </div> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="options" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="true"> </m-local-params> </div> </m-list-box> <el-dialog :visible.sync="scriptBoxDialog" append-to-body="true" width="80%"> <m-script-box :item="item" @getSriptBoxValue="getSriptBoxValue" @closeAble="closeAble"></m-script-box> </el-dialog> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mScriptBox from './_source/scriptBox' import mLocalParams from './_source/localParams' import disabledState from '@/module/mixin/disabledState' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' let editor export default { name: 'shell', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // script rawScript: '', // Custom parameter localParams: [], // resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // define options options: [], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [], item: '', scriptBoxDialog: false } }, mixins: [disabledState], props: { backfillItem: Object }, methods: { /** * return localParams */ _onLocalParams (a) { this.localParams = a }, setEditorVal () { this.item = editor.getValue() this.scriptBoxDialog = true }, getSriptBoxValue (val) { editor.setValue(val) // this.scriptBoxDialog = false }, closeAble () { // this.scriptBoxDialog = false }, /** * return resourceList * */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, /** * verification */ _verification () { // rawScript verification if (!editor.getValue()) { this.$message.warning(`${i18n.$t('Please enter script(required)')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // Process resourcelist let dataProcessing = _.map(this.resourceList, v => { return { id: v } }) // storage this.$emit('on-params', { resourceList: dataProcessing, localParams: this.localParams, rawScript: editor.getValue() }) return true }, /** * Processing code highlighting */ _handlerEditor () { // editor editor = codemirror('code-shell-mirror', { mode: 'shell', readOnly: this.isDetails }) this.keypress = () => { if (!editor.getOption('readOnly')) { editor.showHint({ completeSingle: false }) } } // Monitor keyboard editor.on('keypress', this.keypress) editor.setValue(this.rawScript) return editor }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.options.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.options = this.options.concat(noResources) } } } }, watch: { // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.options.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { resourceList: this.resourceIdArr, localParams: this.localParams } } }, created () { let item = this.store.state.dag.resourcesListS this.diGuiTree(item) this.options = item let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.rawScript = o.params.rawScript || '' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { setTimeout(() => { this._handlerEditor() }, 200) }, destroyed () { if (editor) { editor.toTextArea() // Uninstall editor.off($('.code-shell-mirror'), 'keypress', this.keypress) } }, components: { mLocalParams, mListBox, mScriptBox, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,346
[Improvement][UI] Resource files support copy path operations
The current resource file does not support copying paths When using the path is very easy to output errors. Add copy button. Support copy operation ![image](https://user-images.githubusercontent.com/39816903/115647469-6ca1f380-a356-11eb-9341-30bb2d8257a0.png)
https://github.com/apache/dolphinscheduler/issues/5346
https://github.com/apache/dolphinscheduler/pull/5348
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
"2021-04-22T02:35:20Z"
java
"2021-04-29T08:29:29Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="spark-model"> <m-list-box> <div slot="text">{{$t('Program Type')}}</div> <div slot="content"> <el-select style="width: 130px;" size="small" v-model="programType" :disabled="isDetails"> <el-option v-for="city in programTypeList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Spark Version')}}</div> <div slot="content"> <el-select style="width: 130px;" size="small" v-model="sparkVersion" :disabled="isDetails"> <el-option v-for="city in sparkVersionList" :key="city.code" :value="city.code" :label="city.code"> </el-option> </el-select> </div> </m-list-box> <m-list-box v-if="programType !== 'PYTHON'"> <div slot="text">{{$t('Main Class')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="mainClass" :placeholder="$t('Please enter main class')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Main Jar Package')}}</div> <div slot="content"> <treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Deploy Mode')}}</div> <div slot="content"> <el-radio-group v-model="deployMode" size="small"> <el-radio :label="'cluster'" :disabled="isDetails"></el-radio> <el-radio :label="'client'" :disabled="isDetails"></el-radio> <el-radio :label="'local'" :disabled="isDetails"></el-radio> </el-radio-group> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('App Name')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="appName" :placeholder="$t('Please enter app name(optional)')"> </el-input> </div> </m-list-box> <m-list-4-box> <div slot="text">{{$t('Driver Cores')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="driverCores" :placeholder="$t('Please enter Driver cores')"> </el-input> </div> <div slot="text-2">{{$t('Driver Memory')}}</div> <div slot="content-2"> <el-input :disabled="isDetails" type="input" size="small" v-model="driverMemory" :placeholder="$t('Please enter Driver memory')"> </el-input> </div> </m-list-4-box> <m-list-4-box> <div slot="text">{{$t('Executor Number')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="numExecutors" :placeholder="$t('Please enter Executor number')"> </el-input> </div> <div slot="text-2">{{$t('Executor Memory')}}</div> <div slot="content-2"> <el-input :disabled="isDetails" type="input" size="small" v-model="executorMemory" :placeholder="$t('Please enter Executor memory')"> </el-input> </div> </m-list-4-box> <m-list-4-box> <div slot="text">{{$t('Executor Cores')}}</div> <div slot="content"> <el-input :disabled="isDetails" type="input" size="small" v-model="executorCores" :placeholder="$t('Please enter Executor cores')"> </el-input> </div> </m-list-4-box> <m-list-box> <div slot="text">{{$t('Main Arguments')}}</div> <div slot="content"> <el-input :autosize="{minRows:2}" :disabled="isDetails" type="textarea" size="small" v-model="mainArgs" :placeholder="$t('Please enter main arguments')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Option Parameters')}}</div> <div slot="content"> <el-input :disabled="isDetails" :autosize="{minRows:2}" type="textarea" size="small" v-model="others" :placeholder="$t('Please enter option parameters')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Resources')}}</div> <div slot="content"> <treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')"> <div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div> </treeselect> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams" :hide="false"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mLocalParams from './_source/localParams' import mListBox from './_source/listBox' import mList4Box from './_source/list4Box' import Treeselect from '@riophae/vue-treeselect' import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' export default { name: 'spark', data () { return { valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Master jar package(List) mainJarLists: [], mainJarList: [], // Deployment method deployMode: 'cluster', // Resource(list) resourceList: [], // Cache ResourceList cacheResourceList: [], // Custom function localParams: [], // Driver cores driverCores: 1, // Driver memory driverMemory: '512M', // Executor number numExecutors: 2, // Executor memory executorMemory: '2G', // Executor cores executorCores: 2, // Spark app name appName: '', // Main arguments mainArgs: '', // Option parameters others: '', // Program type programType: 'SCALA', // Program type(List) programTypeList: [{ code: 'JAVA' }, { code: 'SCALA' }, { code: 'PYTHON' }], // Spark version sparkVersion: 'SPARK2', // Spark version(LIst) sparkVersionList: [{ code: 'SPARK2' }, { code: 'SPARK1' }], normalizer (node) { return { label: node.name } }, allNoResources: [], noRes: [] } }, props: { backfillItem: Object }, mixins: [disabledState], methods: { /** * getResourceId */ marjarId (name) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + name }).then(res => { this.mainJar = res.id }).catch(e => { this.$message.error(e.msg || '') }) }, /** * return localParams */ _onLocalParams (a) { this.localParams = a }, /** * return resourceList */ _onResourcesData (a) { this.resourceList = a }, /** * cache resourceList */ _onCacheResourcesData (a) { this.cacheResourceList = a }, diGuiTree (item) { // Recursive convenience tree structure item.forEach(item => { item.children === '' || item.children === undefined || item.children === null || item.children.length === 0 ? this.operationTree(item) : this.diGuiTree(item.children) }) }, operationTree (item) { if (item.dirctory) { item.isDisabled = true } delete item.children }, searchTree (element, id) { // 根据id查找节点 if (element.id === id) { return element } else if (element.children !== null) { let i let result = null for (i = 0; result === null && i < element.children.length; i++) { result = this.searchTree(element.children[i], id) } return result } return null }, dataProcess (backResource) { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return item.id }) Array.prototype.diff = function (a) { return this.filter(function (i) { return a.indexOf(i) < 0 }) } let diffSet = this.resourceList.diff(resourceIdArr) let optionsCmp = [] if (diffSet.length > 0) { diffSet.forEach(item => { backResource.forEach(item1 => { if (item === item1.id || item === item1.res) { optionsCmp.push(item1) } }) }) } let noResources = [{ id: -1, name: $t('Unauthorized or deleted resources'), fullName: '/' + $t('Unauthorized or deleted resources'), children: [] }] if (optionsCmp.length > 0) { this.allNoResources = optionsCmp optionsCmp = optionsCmp.map(item => { return { id: item.id, name: item.name, fullName: item.res } }) optionsCmp.forEach(item => { item.isNew = true }) noResources[0].children = optionsCmp this.mainJarList = this.mainJarList.concat(noResources) } } }, /** * verification */ _verification () { if (this.programType !== 'PYTHON' && !this.mainClass) { this.$message.warning(`${i18n.$t('Please enter main class')}`) return false } if (!this.mainJar) { this.$message.warning(`${i18n.$t('Please enter main jar package')}`) return false } if (!this.driverCores) { this.$message.warning(`${i18n.$t('Please enter Driver cores')}`) return false } if (!Number.isInteger(parseInt(this.driverCores))) { this.$message.warning(`${i18n.$t('Core number should be positive integer')}`) return false } if (!this.driverMemory) { this.$message.warning(`${i18n.$t('Please enter Driver memory')}`) return false } if (!Number.isInteger(parseInt(this.driverMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!this.executorCores) { this.$message.warning(`${i18n.$t('Please enter Executor cores')}`) return false } if (!Number.isInteger(parseInt(this.executorCores))) { this.$message.warning(`${i18n.$t('Core number should be positive integer')}`) return false } if (!this.executorMemory) { this.$message.warning(`${i18n.$t('Please enter Executor memory')}`) return false } if (!Number.isInteger(parseInt(this.executorMemory))) { this.$message.warning(`${i18n.$t('Memory should be a positive integer')}`) return false } if (!this.numExecutors) { this.$message.warning(`${i18n.$t('Please enter Executor number')}`) return false } if (!Number.isInteger(parseInt(this.numExecutors))) { this.$message.warning(`${i18n.$t('The Executor number should be a positive integer')}`) return false } // noRes if (this.noRes.length > 0) { this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // Process resourcelist let dataProcessing = _.map(this.resourceList, v => { return { id: v } }) // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: dataProcessing, localParams: this.localParams, driverCores: this.driverCores, driverMemory: this.driverMemory, numExecutors: this.numExecutors, executorMemory: this.executorMemory, executorCores: this.executorCores, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType, sparkVersion: this.sparkVersion }) return true } }, watch: { // Listening type programType (type) { if (type === 'PYTHON') { this.mainClass = '' } }, // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) }, resourceIdArr (arr) { let result = [] arr.forEach(item => { this.allNoResources.forEach(item1 => { if (item.id === item1.id) { // resultBool = true result.push(item1) } }) }) this.noRes = result } }, computed: { resourceIdArr () { let isResourceId = [] let resourceIdArr = [] if (this.resourceList.length > 0) { this.resourceList.forEach(v => { this.mainJarList.forEach(v1 => { if (this.searchTree(v1, v)) { isResourceId.push(this.searchTree(v1, v)) } }) }) resourceIdArr = isResourceId.map(item => { return { id: item.id, name: item.name, res: item.fullName } }) } return resourceIdArr }, cacheParams () { return { mainClass: this.mainClass, mainJar: { id: this.mainJar }, deployMode: this.deployMode, resourceList: this.resourceIdArr, localParams: this.localParams, driverCores: this.driverCores, driverMemory: this.driverMemory, numExecutors: this.numExecutors, executorMemory: this.executorMemory, executorCores: this.executorCores, appName: this.appName, mainArgs: this.mainArgs, others: this.others, programType: this.programType, sparkVersion: this.sparkVersion } } }, created () { let item = this.store.state.dag.resourcesListS let items = this.store.state.dag.resourcesListJar this.diGuiTree(item) this.diGuiTree(items) this.mainJarList = item this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' if (o.params.mainJar.res) { this.marjarId(o.params.mainJar.res) } else if (o.params.mainJar.res === '') { this.mainJar = '' } else { this.mainJar = o.params.mainJar.id || '' } this.deployMode = o.params.deployMode || '' this.driverCores = o.params.driverCores || 1 this.driverMemory = o.params.driverMemory || '512M' this.numExecutors = o.params.numExecutors || 2 this.executorMemory = o.params.executorMemory || '2G' this.executorCores = o.params.executorCores || 2 this.appName = o.params.appName || '' this.mainArgs = o.params.mainArgs || '' this.others = o.params.others this.programType = o.params.programType || 'SCALA' this.sparkVersion = o.params.sparkVersion || 'SPARK2' // backfill resourceList let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { _.map(resourceList, v => { if (!v.id) { this.store.dispatch('dag/getResourceId', { type: 'FILE', fullName: '/' + v.res }).then(res => { this.resourceList.push(res.id) this.dataProcess(backResource) }).catch(e => { this.resourceList.push(v.res) this.dataProcess(backResource) }) } else { this.resourceList.push(v.id) this.dataProcess(backResource) } }) this.cacheResourceList = resourceList } // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mLocalParams, mListBox, mList4Box, Treeselect } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,346
[Improvement][UI] Resource files support copy path operations
The current resource file does not support copying paths When using the path is very easy to output errors. Add copy button. Support copy operation ![image](https://user-images.githubusercontent.com/39816903/115647469-6ca1f380-a356-11eb-9341-30bb2d8257a0.png)
https://github.com/apache/dolphinscheduler/issues/5346
https://github.com/apache/dolphinscheduler/pull/5348
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
"2021-04-22T02:35:20Z"
java
"2021-04-29T08:29:29Z"
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': 'User Name', 'Please enter user name': 'Please enter user name', Password: 'Password', 'Please enter your password': 'Please enter your password', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22', Login: 'Login', Home: 'Home', 'Failed to create node to save': 'Failed to create node to save', 'Global parameters': 'Global parameters', 'Local parameters': 'Local parameters', 'Copy success': 'Copy success', 'The browser does not support automatic copying': 'The browser does not support automatic copying', 'Whether to save the DAG graph': 'Whether to save the DAG graph', 'Current node settings': 'Current node settings', 'View history': 'View history', 'View log': 'View log', 'Force success': 'Force success', 'Enter this child node': 'Enter this child node', 'Node name': 'Node name', 'Please enter name (required)': 'Please enter name (required)', 'Run flag': 'Run flag', Normal: 'Normal', 'Prohibition execution': 'Prohibition execution', 'Please enter description': 'Please enter description', 'Number of failed retries': 'Number of failed retries', Times: 'Times', 'Failed retry interval': 'Failed retry interval', Minute: 'Minute', 'Delay execution time': 'Delay execution time', 'Delay execution': 'Delay execution', 'Forced success': 'Forced success', Cancel: 'Cancel', 'Confirm add': 'Confirm add', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process', 'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process', 'Name already exists': 'Name already exists', 'Download Log': 'Download Log', 'Refresh Log': 'Refresh Log', 'Enter full screen': 'Enter full screen', 'Cancel full screen': 'Cancel full screen', Close: 'Close', 'Update log success': 'Update log success', 'No more logs': 'No more logs', 'No log': 'No log', 'Loading Log...': 'Loading Log...', 'Set the DAG diagram name': 'Set the DAG diagram name', 'Please enter description(optional)': 'Please enter description(optional)', 'Set global': 'Set global', 'Whether to go online the process definition': 'Whether to go online the process definition', 'Whether to update the process definition': 'Whether to update the process definition', Add: 'Add', 'DAG graph name cannot be empty': 'DAG graph name cannot be empty', 'Create Datasource': 'Create Datasource', 'Project Home': 'Project Home', 'Project Manage': 'Project', 'Create Project': 'Create Project', 'Cron Manage': 'Cron Manage', 'Copy Workflow': 'Copy Workflow', 'Tenant Manage': 'Tenant Manage', 'Create Tenant': 'Create Tenant', 'User Manage': 'User Manage', 'Create User': 'Create User', 'User Information': 'User Information', 'Edit Password': 'Edit Password', Success: 'Success', Failed: 'Failed', Delete: 'Delete', 'Please choose': 'Please choose', 'Please enter a positive integer': 'Please enter a positive integer', 'Program Type': 'Program Type', 'Main Class': 'Main Class', 'Main Jar Package': 'Main Jar Package', 'Please enter main jar package': 'Please enter main jar package', 'Please enter main class': 'Please enter main class', 'Main Arguments': 'Main Arguments', 'Please enter main arguments': 'Please enter main arguments', 'Option Parameters': 'Option Parameters', 'Please enter option parameters': 'Please enter option parameters', Resources: 'Resources', 'Custom Parameters': 'Custom Parameters', 'Custom template': 'Custom template', Datasource: 'Datasource', methods: 'methods', 'Please enter method(optional)': 'Please enter method(optional)', Script: 'Script', 'Please enter script(required)': 'Please enter script(required)', 'Deploy Mode': 'Deploy Mode', 'Driver Cores': 'Driver Cores', 'Please enter Driver cores': 'Please enter Driver cores', 'Driver Memory': 'Driver Memory', 'Please enter Driver memory': 'Please enter Driver memory', 'Executor Number': 'Executor Number', 'Please enter Executor number': 'Please enter Executor number', 'The Executor number should be a positive integer': 'The Executor number should be a positive integer', 'Executor Memory': 'Executor Memory', 'Please enter Executor memory': 'Please enter Executor memory', 'Executor Cores': 'Executor Cores', 'Please enter Executor cores': 'Please enter Executor cores', 'Memory should be a positive integer': 'Memory should be a positive integer', 'Core number should be positive integer': 'Core number should be positive integer', 'Flink Version': 'Flink Version', 'JobManager Memory': 'JobManager Memory', 'Please enter JobManager memory': 'Please enter JobManager memory', 'TaskManager Memory': 'TaskManager Memory', 'Please enter TaskManager memory': 'Please enter TaskManager memory', 'Slot Number': 'Slot Number', 'Please enter Slot number': 'Please enter Slot number', Parallelism: 'Parallelism', 'Please enter Parallelism': 'Please enter Parallelism', 'TaskManager Number': 'TaskManager Number', 'Please enter TaskManager number': 'Please enter TaskManager number', 'App Name': 'App Name', 'Please enter app name(optional)': 'Please enter app name(optional)', 'SQL Type': 'SQL Type', 'Send Email': 'Send Email', 'Log display': 'Log display', 'rows of result': 'rows of result', Title: 'Title', 'Please enter the title of email': 'Please enter the title of email', Table: 'Table', TableMode: 'Table', Attachment: 'Attachment', 'SQL Parameter': 'SQL Parameter', 'SQL Statement': 'SQL Statement', 'UDF Function': 'UDF Function', 'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)', 'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)', 'One form or attachment must be selected': 'One form or attachment must be selected', 'Mail subject required': 'Mail subject required', 'Child Node': 'Child Node', 'Please select a sub-Process': 'Please select a sub-Process', Edit: 'Edit', 'Switch To This Version': 'Switch To This Version', 'Datasource Name': 'Datasource Name', 'Please enter datasource name': 'Please enter datasource name', IP: 'IP', 'Please enter IP': 'Please enter IP', Port: 'Port', 'Please enter port': 'Please enter port', 'Database Name': 'Database Name', 'Please enter database name': 'Please enter database name', 'Oracle Connect Type': 'ServiceName or SID', 'Oracle Service Name': 'ServiceName', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc connect parameters', 'Test Connect': 'Test Connect', 'Please enter resource name': 'Please enter resource name', 'Please enter resource folder name': 'Please enter resource folder name', 'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement', 'Please enter IP/hostname': 'Please enter IP/hostname', 'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format', '#': '#', 'Datasource Type': 'Datasource Type', 'Datasource Parameter': 'Datasource Parameter', 'Create Time': 'Create Time', 'Update Time': 'Update Time', Operation: 'Operation', 'Current Version': 'Current Version', 'Click to view': 'Click to view', 'Delete?': 'Delete?', 'Switch Version Successfully': 'Switch Version Successfully', 'Confirm Switch To This Version?': 'Confirm Switch To This Version?', Confirm: 'Confirm', 'Task status statistics': 'Task Status Statistics', Number: 'Number', State: 'State', 'Process Status Statistics': 'Process Status Statistics', 'Process Definition Statistics': 'Process Definition Statistics', 'Project Name': 'Project Name', 'Please enter name': 'Please enter name', 'Owned Users': 'Owned Users', 'Process Pid': 'Process Pid', 'Zk registration directory': 'Zk registration directory', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': 'Last heartbeat time', 'Edit Tenant': 'Edit Tenant', 'OS Tenant Code': 'OS Tenant Code', 'Tenant Name': 'Tenant Name', Queue: 'Yarn Queue', 'Please select a queue': 'default is tenant association queue', 'Please enter the os tenant code in English': 'Please enter the os tenant code in English', 'Please enter os tenant code in English': 'Please enter os tenant code in English', 'Please enter os tenant code': 'Please enter os tenant code', 'Please enter tenant Name': 'Please enter tenant Name', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed', 'Edit User': 'Edit User', Tenant: 'Tenant', Email: 'Email', Phone: 'Phone', 'User Type': 'User Type', 'Please enter phone number': 'Please enter phone number', 'Please enter email': 'Please enter email', 'Please enter the correct email format': 'Please enter the correct email format', 'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format', Project: 'Project', Authorize: 'Authorize', 'File resources': 'File resources', 'UDF resources': 'UDF resources', 'UDF resources directory': 'UDF resources directory', 'Please select UDF resources directory': 'Please select UDF resources directory', 'Alarm group': 'Alarm group', 'Alarm group required': 'Alarm group required', 'Edit alarm group': 'Edit alarm group', 'Create alarm group': 'Create alarm group', 'Create Alarm Instance': 'Create Alarm Instance', 'Edit Alarm Instance': 'Edit Alarm Instance', 'Group Name': 'Group Name', 'Alarm instance name': 'Alarm instance name', 'Alarm plugin name': 'Alarm plugin name', 'Select plugin': 'Select plugin', 'Please enter group name': 'Please enter group name', 'Instance parameter exception': 'Instance parameter exception', 'Group Type': 'Group Type', 'Alarm plugin instance': 'Alarm plugin instance', Remarks: 'Remarks', SMS: 'SMS', 'Managing Users': 'Managing Users', Permission: 'Permission', Administrator: 'Administrator', 'Confirm Password': 'Confirm Password', 'Please enter confirm password': 'Please enter confirm password', 'Password cannot be in Chinese': 'Password cannot be in Chinese', 'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password', 'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese', 'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password', 'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password', 'Please select the datasource': 'Please select the datasource', 'Please select resources': 'Please select resources', Query: 'Query', 'Non Query': 'Non Query', 'prop(required)': 'prop(required)', 'value(optional)': 'value(optional)', 'value(required)': 'value(required)', 'prop is empty': 'prop is empty', 'value is empty': 'value is empty', 'prop is repeat': 'prop is repeat', 'Start Time': 'Start Time', 'End Time': 'End Time', crontab: 'crontab', 'Failure Strategy': 'Failure Strategy', online: 'online', offline: 'offline', 'Task Status': 'Task Status', 'Process Instance': 'Process Instance', 'Task Instance': 'Task Instance', 'Select date range': 'Select date range', startDate: 'startDate', endDate: 'endDate', Date: 'Date', Waiting: 'Waiting', Execution: 'Execution', Finish: 'Finish', 'Create File': 'Create File', 'Create folder': 'Create folder', 'File Name': 'File Name', 'Folder Name': 'Folder Name', 'File Format': 'File Format', 'Folder Format': 'Folder Format', 'File Content': 'File Content', 'Upload File Size': 'Upload File size cannot exceed 1g', Create: 'Create', 'Please enter the resource content': 'Please enter the resource content', 'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines', 'File Details': 'File Details', 'Download Details': 'Download Details', Return: 'Return', Save: 'Save', 'File Manage': 'File Manage', 'Upload Files': 'Upload Files', 'Create UDF Function': 'Create UDF Function', 'Upload UDF Resources': 'Upload UDF Resources', 'Service-Master': 'Service-Master', 'Service-Worker': 'Service-Worker', 'Process Name': 'Process Name', Executor: 'Executor', 'Run Type': 'Run Type', 'Scheduling Time': 'Scheduling Time', 'Run Times': 'Run Times', host: 'host', 'fault-tolerant sign': 'fault-tolerant sign', Rerun: 'Rerun', 'Recovery Failed': 'Recovery Failed', Stop: 'Stop', Pause: 'Pause', 'Recovery Suspend': 'Recovery Suspend', Gantt: 'Gantt', 'Node Type': 'Node Type', 'Submit Time': 'Submit Time', Duration: 'Duration', 'Retry Count': 'Retry Count', 'Task Name': 'Task Name', 'Task Date': 'Task Date', 'Source Table': 'Source Table', 'Record Number': 'Record Number', 'Target Table': 'Target Table', 'Online viewing type is not supported': 'Online viewing type is not supported', Size: 'Size', Rename: 'Rename', Download: 'Download', Export: 'Export', 'Version Info': 'Version Info', Submit: 'Submit', 'Edit UDF Function': 'Edit UDF Function', type: 'type', 'UDF Function Name': 'UDF Function Name', FILE: 'FILE', UDF: 'UDF', 'File Subdirectory': 'File Subdirectory', 'Please enter a function name': 'Please enter a function name', 'Package Name': 'Package Name', 'Please enter a Package name': 'Please enter a Package name', Parameter: 'Parameter', 'Please enter a parameter': 'Please enter a parameter', 'UDF Resources': 'UDF Resources', 'Upload Resources': 'Upload Resources', Instructions: 'Instructions', 'Please enter a instructions': 'Please enter a instructions', 'Please enter a UDF function name': 'Please enter a UDF function name', 'Select UDF Resources': 'Select UDF Resources', 'Class Name': 'Class Name', 'Jar Package': 'Jar Package', 'Library Name': 'Library Name', 'UDF Resource Name': 'UDF Resource Name', 'File Size': 'File Size', Description: 'Description', 'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items', 'Select Line Connection': 'Select Line Connection', 'Delete selected lines or nodes': 'Delete selected lines or nodes', 'Full Screen': 'Full Screen', Unpublished: 'Unpublished', 'Start Process': 'Start Process', 'Execute from the current node': 'Execute from the current node', 'Recover tolerance fault process': 'Recover tolerance fault process', 'Resume the suspension process': 'Resume the suspension process', 'Execute from the failed nodes': 'Execute from the failed nodes', 'Complement Data': 'Complement Data', 'Scheduling execution': 'Scheduling execution', 'Recovery waiting thread': 'Recovery waiting thread', 'Submitted successfully': 'Submitted successfully', Executing: 'Executing', 'Ready to pause': 'Ready to pause', 'Ready to stop': 'Ready to stop', 'Need fault tolerance': 'Need fault tolerance', Kill: 'Kill', 'Waiting for thread': 'Waiting for thread', 'Waiting for dependence': 'Waiting for dependence', Start: 'Start', Copy: 'Copy', 'Copy name': 'Copy name', 'Please enter keyword': 'Please enter keyword', 'File Upload': 'File Upload', 'Drag the file into the current upload window': 'Drag the file into the current upload window', 'Drag area upload': 'Drag area upload', Upload: 'Upload', 'ReUpload File': 'ReUpload File', 'Please enter file name': 'Please enter file name', 'Please select the file to upload': 'Please select the file to upload', 'Resources manage': 'Resources', Security: 'Security', Logout: 'Logout', 'No data': 'No data', 'Uploading...': 'Uploading...', 'Loading...': 'Loading...', List: 'List', 'Unable to download without proper url': 'Unable to download without proper url', Process: 'Process', 'Process definition': 'Process definition', 'Task record': 'Task record', 'Warning group manage': 'Warning group manage', 'Warning instance manage': 'Warning instance manage', 'Servers manage': 'Servers manage', 'UDF manage': 'UDF manage', 'Resource manage': 'Resource manage', 'Function manage': 'Function manage', 'Edit password': 'Edit password', 'Ordinary users': 'Ordinary users', 'Create process': 'Create process', 'Import process': 'Import process', 'Timing state': 'Timing state', Timing: 'Timing', TreeView: 'TreeView', 'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat', 'Mailbox input is illegal': 'Mailbox input is illegal', 'Please set the parameters before starting': 'Please set the parameters before starting', Continue: 'Continue', End: 'End', 'Node execution': 'Node execution', 'Backward execution': 'Backward execution', 'Forward execution': 'Forward execution', 'Execute only the current node': 'Execute only the current node', 'Notification strategy': 'Notification strategy', 'Notification group': 'Notification group', 'Please select a notification group': 'Please select a notification group', receivers: 'receivers', receiverCcs: 'receiverCcs', 'Whether it is a complement process?': 'Whether it is a complement process?', 'Schedule date': 'Schedule date', 'Mode of execution': 'Mode of execution', 'Serial execution': 'Serial execution', 'Parallel execution': 'Parallel execution', 'Set parameters before timing': 'Set parameters before timing', 'Start and stop time': 'Start and stop time', 'Please select time': 'Please select time', 'Please enter crontab': 'Please enter crontab', none_1: 'none', success_1: 'success', failure_1: 'failure', All_1: 'All', Toolbar: 'Toolbar', 'View variables': 'View variables', 'Format DAG': 'Format DAG', 'Refresh DAG status': 'Refresh DAG status', Return_1: 'Return', 'Please enter format': 'Please enter format', 'connection parameter': 'connection parameter', 'Process definition details': 'Process definition details', 'Create process definition': 'Create process definition', 'Scheduled task list': 'Scheduled task list', 'Process instance details': 'Process instance details', 'Create Resource': 'Create Resource', 'User Center': 'User Center', 'Please enter method': 'Please enter method', None: 'None', Name: 'Name', 'Process priority': 'Process priority', 'Task priority': 'Task priority', 'Task timeout alarm': 'Task timeout alarm', 'Timeout strategy': 'Timeout strategy', 'Timeout alarm': 'Timeout alarm', 'Timeout failure': 'Timeout failure', 'Timeout period': 'Timeout period', 'Waiting Dependent complete': 'Waiting Dependent complete', 'Waiting Dependent start': 'Waiting Dependent start', 'Check interval': 'Check interval', 'Timeout must be longer than check interval': 'Timeout must be longer than check interval', 'Timeout strategy must be selected': 'Timeout strategy must be selected', 'Timeout must be a positive integer': 'Timeout must be a positive integer', 'Add dependency': 'Add dependency', and: 'and', or: 'or', month: 'month', week: 'week', day: 'day', hour: 'hour', Running: 'Running', 'Waiting for dependency to complete': 'Waiting for dependency to complete', Selected: 'Selected', CurrentHour: 'CurrentHour', Last1Hour: 'Last1Hour', Last2Hours: 'Last2Hours', Last3Hours: 'Last3Hours', Last24Hours: 'Last24Hours', today: 'today', Last1Days: 'Last1Days', Last2Days: 'Last2Days', Last3Days: 'Last3Days', Last7Days: 'Last7Days', ThisWeek: 'ThisWeek', LastWeek: 'LastWeek', LastMonday: 'LastMonday', LastTuesday: 'LastTuesday', LastWednesday: 'LastWednesday', LastThursday: 'LastThursday', LastFriday: 'LastFriday', LastSaturday: 'LastSaturday', LastSunday: 'LastSunday', ThisMonth: 'ThisMonth', LastMonth: 'LastMonth', LastMonthBegin: 'LastMonthBegin', LastMonthEnd: 'LastMonthEnd', 'Refresh status succeeded': 'Refresh status succeeded', 'Queue manage': 'Yarn Queue manage', 'Create queue': 'Create queue', 'Edit queue': 'Edit queue', 'Datasource manage': 'Datasource', 'History task record': 'History task record', 'Please go online': 'Please go online', 'Queue value': 'Queue value', 'Please enter queue value': 'Please enter queue value', 'Worker group manage': 'Worker group manage', 'Create worker group': 'Create worker group', 'Edit worker group': 'Edit worker group', 'Token manage': 'Token manage', 'Create token': 'Create token', 'Edit token': 'Edit token', Addresses: 'Addresses', 'Worker Addresses': 'Worker Addresses', 'Please select the worker addresses': 'Please select the worker addresses', 'Failure time': 'Failure time', 'Expiration time': 'Expiration time', User: 'User', 'Please enter token': 'Please enter token', 'Generate token': 'Generate token', Monitor: 'Monitor', Group: 'Group', 'Queue statistics': 'Queue statistics', 'Command status statistics': 'Command status statistics', 'Task kill': 'Task Kill', 'Task queue': 'Task queue', 'Error command count': 'Error command count', 'Normal command count': 'Normal command count', Manage: ' Manage', 'Number of connections': 'Number of connections', Sent: 'Sent', Received: 'Received', 'Min latency': 'Min latency', 'Avg latency': 'Avg latency', 'Max latency': 'Max latency', 'Node count': 'Node count', 'Query time': 'Query time', 'Node self-test status': 'Node self-test status', 'Health status': 'Health status', 'Max connections': 'Max connections', 'Threads connections': 'Threads connections', 'Max used connections': 'Max used connections', 'Threads running connections': 'Threads running connections', 'Worker group': 'Worker group', 'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0', 'Pre Statement': 'Pre Statement', 'Post Statement': 'Post Statement', 'Statement cannot be empty': 'Statement cannot be empty', 'Process Define Count': 'Work flow Define Count', 'Process Instance Running Count': 'Process Instance Running Count', 'command number of waiting for running': 'command number of waiting for running', 'failure command number': 'failure command number', 'tasks number of waiting running': 'tasks number of waiting running', 'task number of ready to kill': 'task number of ready to kill', 'Statistics manage': 'Statistics Manage', statistics: 'Statistics', 'select tenant': 'select tenant', 'Please enter Principal': 'Please enter Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path', 'The start time must not be the same as the end': 'The start time must not be the same as the end', 'Startup parameter': 'Startup parameter', 'Startup type': 'Startup type', 'warning of timeout': 'warning of timeout', 'Next five execution times': 'Next five execution times', 'Execute time': 'Execute time', 'Complement range': 'Complement range', 'Http Url': 'Http Url', 'Http Method': 'Http Method', 'Http Parameters': 'Http Parameters', 'Http Parameters Key': 'Http Parameters Key', 'Http Parameters Position': 'Http Parameters Position', 'Http Parameters Value': 'Http Parameters Value', 'Http Check Condition': 'Http Check Condition', 'Http Condition': 'Http Condition', 'Please Enter Http Url': 'Please Enter Http Url(required)', 'Please Enter Http Condition': 'Please Enter Http Condition', 'There is no data for this period of time': 'There is no data for this period of time', 'Worker addresses cannot be empty': 'Worker addresses cannot be empty', 'Please generate token': 'Please generate token', 'Spark Version': 'Spark Version', TargetDataBase: 'target database', TargetTable: 'target table', 'Please enter the table of target': 'Please enter the table of target', 'Please enter a Target Table(required)': 'Please enter a Target Table(required)', SpeedByte: 'speed(byte count)', SpeedRecord: 'speed(record count)', '0 means unlimited by byte': '0 means unlimited', '0 means unlimited by count': '0 means unlimited', 'Modify User': 'Modify User', 'Whether directory': 'Whether directory', Yes: 'Yes', No: 'No', 'Hadoop Custom Params': 'Hadoop Params', 'Sqoop Advanced Parameters': 'Sqoop Params', 'Sqoop Job Name': 'Job Name', 'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)', 'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)', 'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)', 'Please enter Target Dir(required)': 'Please enter Target Dir(required)', 'Please enter Export Dir(required)': 'Please enter Export Dir(required)', 'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)', 'Please enter Hive Table(required)': 'Please enter Hive Table(required)', 'Please enter Hive Partition Keys': 'Please enter Hive Partition Key', 'Please enter Hive Partition Values': 'Please enter Partition Value', 'Please enter Replace Delimiter': 'Please enter Replace Delimiter', 'Please enter Fields Terminated': 'Please enter Fields Terminated', 'Please enter Lines Terminated': 'Please enter Lines Terminated', 'Please enter Concurrency': 'Please enter Concurrency', 'Please enter Update Key': 'Please enter Update Key', 'Please enter Job Name(required)': 'Please enter Job Name(required)', 'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)', Direct: 'Direct', Type: 'Type', ModelType: 'ModelType', ColumnType: 'ColumnType', Database: 'Database', Column: 'Column', 'Map Column Hive': 'Map Column Hive', 'Map Column Java': 'Map Column Java', 'Export Dir': 'Export Dir', 'Hive partition Keys': 'Hive partition Keys', 'Hive partition Values': 'Hive partition Values', FieldsTerminated: 'FieldsTerminated', LinesTerminated: 'LinesTerminated', IsUpdate: 'IsUpdate', UpdateKey: 'UpdateKey', UpdateMode: 'UpdateMode', 'Target Dir': 'Target Dir', DeleteTargetDir: 'DeleteTargetDir', FileType: 'FileType', CompressionCodec: 'CompressionCodec', CreateHiveTable: 'CreateHiveTable', DropDelimiter: 'DropDelimiter', OverWriteSrc: 'OverWriteSrc', ReplaceDelimiter: 'ReplaceDelimiter', Concurrency: 'Concurrency', Form: 'Form', OnlyUpdate: 'OnlyUpdate', AllowInsert: 'AllowInsert', 'Data Source': 'Data Source', 'Data Target': 'Data Target', 'All Columns': 'All Columns', 'Some Columns': 'Some Columns', 'Branch flow': 'Branch flow', 'Custom Job': 'Custom Job', 'Custom Script': 'Custom Script', 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow', 'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required', 'No resources exist': 'No resources exist', 'Please delete all non-existing resources': 'Please delete all non-existing resources', 'Unauthorized or deleted resources': 'Unauthorized or deleted resources', 'Please delete all non-existent resources': 'Please delete all non-existent resources', Kinship: 'Workflow relationship', Reset: 'Reset', KinshipStateActive: 'Active', KinshipState1: 'Online', KinshipState0: 'Workflow is not online', KinshipState10: 'Scheduling is not online', 'Dag label display control': 'Dag label display control', Enable: 'Enable', Disable: 'Disable', 'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!', 'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading', 'User name length is between 3 and 39': 'User name length is between 3 and 39', 'Timeout Settings': 'Timeout Settings', 'Connect Timeout': 'Connect Timeout', 'Socket Timeout': 'Socket Timeout', 'Connect timeout be a positive integer': 'Connect timeout be a positive integer', 'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer', ms: 'ms', 'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': 'Please select the waterdrop resources', zkDirectory: 'zkDirectory', 'Directory detail': 'Directory detail', 'Connection name': 'Connection name', 'Current connection settings': 'Current connection settings', 'Please save the DAG before formatting': 'Please save the DAG before formatting', 'Batch copy': 'Batch copy', 'Related items': 'Related items', 'Project name is required': 'Project name is required', 'Batch move': 'Batch move', Version: 'Version', 'Pre tasks': 'Pre tasks', 'Running Memory': 'Running Memory', 'Max Memory': 'Max Memory', 'Min Memory': 'Min Memory', 'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate', Info: 'Info', 'Datasource userName': 'owner', 'Resource userName': 'owner' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,346
[Improvement][UI] Resource files support copy path operations
The current resource file does not support copying paths When using the path is very easy to output errors. Add copy button. Support copy operation ![image](https://user-images.githubusercontent.com/39816903/115647469-6ca1f380-a356-11eb-9341-30bb2d8257a0.png)
https://github.com/apache/dolphinscheduler/issues/5346
https://github.com/apache/dolphinscheduler/pull/5348
0f3eed9d00e6b688342c7885b0d2fc2528481ce0
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
"2021-04-22T02:35:20Z"
java
"2021-04-29T08:29:29Z"
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': '用户名', 'Please enter user name': '请输入用户名', Password: '密码', 'Please enter your password': '请输入密码', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间', Login: '登录', Home: '首页', 'Failed to create node to save': '未创建节点保存失败', 'Global parameters': '全局参数', 'Local parameters': '局部参数', 'Copy success': '复制成功', 'The browser does not support automatic copying': '该浏览器不支持自动复制', 'Whether to save the DAG graph': '是否保存DAG图', 'Current node settings': '当前节点设置', 'View history': '查看历史', 'View log': '查看日志', 'Force success': '强制成功', 'Enter this child node': '进入该子节点', 'Node name': '节点名称', 'Please enter name (required)': '请输入名称(必填)', 'Run flag': '运行标志', Normal: '正常', 'Prohibition execution': '禁止执行', 'Please enter description': '请输入描述', 'Number of failed retries': '失败重试次数', Times: '次', 'Failed retry interval': '失败重试间隔', Minute: '分', 'Delay execution time': '延时执行时间', 'Delay execution': '延时执行', 'Forced success': '强制成功过', Cancel: '取消', 'Confirm add': '确认添加', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流', 'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流', 'Name already exists': '名称已存在请重新输入', 'Download Log': '下载日志', 'Refresh Log': '刷新日志', 'Enter full screen': '进入全屏', 'Cancel full screen': '取消全屏', Close: '关闭', 'Update log success': '更新日志成功', 'No more logs': '暂无更多日志', 'No log': '暂无日志', 'Loading Log...': '正在努力请求日志中...', 'Set the DAG diagram name': '设置DAG图名称', 'Please enter description(optional)': '请输入描述(选填)', 'Set global': '设置全局', 'Whether to go online the process definition': '是否上线流程定义', 'Whether to update the process definition': '是否更新流程定义', Add: '添加', 'DAG graph name cannot be empty': 'DAG图名称不能为空', 'Create Datasource': '创建数据源', 'Project Home': '项目首页', 'Project Manage': '项目管理', 'Create Project': '创建项目', 'Cron Manage': '定时管理', 'Copy Workflow': '复制工作流', 'Tenant Manage': '租户管理', 'Create Tenant': '创建租户', 'User Manage': '用户管理', 'Create User': '创建用户', 'User Information': '用户信息', 'Edit Password': '密码修改', Success: '成功', Failed: '失败', Delete: '删除', 'Please choose': '请选择', 'Please enter a positive integer': '请输入正整数', 'Program Type': '程序类型', 'Main Class': '主函数的Class', 'Main Jar Package': '主Jar包', 'Please enter main jar package': '请选择主Jar包', 'Please enter main class': '请填写主函数的Class', 'Main Arguments': '主程序参数', 'Please enter main arguments': '请输入主程序参数', 'Option Parameters': '选项参数', 'Please enter option parameters': '请输入选项参数', Resources: '资源', 'Custom Parameters': '自定义参数', 'Custom template': '自定义模版', Datasource: '数据源', methods: '方法', 'Please enter method(optional)': '请输入方法(选填)', Script: '脚本', 'Please enter script(required)': '请输入脚本(必填)', 'Deploy Mode': '部署方式', 'Driver Cores': 'Driver核心数', 'Please enter Driver cores': '请输入Driver核心数', 'Driver Memory': 'Driver内存数', 'Please enter Driver memory': '请输入Driver内存数', 'Executor Number': 'Executor数量', 'Please enter Executor number': '请输入Executor数量', 'The Executor number should be a positive integer': 'Executor数量为正整数', 'Executor Memory': 'Executor内存数', 'Please enter Executor memory': '请输入Executor内存数', 'Executor Cores': 'Executor核心数', 'Please enter Executor cores': '请输入Executor核心数', 'Memory should be a positive integer': '内存数为数字', 'Core number should be positive integer': '核心数为正整数', 'Flink Version': 'Flink版本', 'JobManager Memory': 'JobManager内存数', 'Please enter JobManager memory': '请输入JobManager内存数', 'TaskManager Memory': 'TaskManager内存数', 'Please enter TaskManager memory': '请输入TaskManager内存数', 'Slot Number': 'Slot数量', 'Please enter Slot number': '请输入Slot数量', Parallelism: '并行度', 'Please enter Parallelism': '请输入并行度', 'TaskManager Number': 'TaskManager数量', 'Please enter TaskManager number': '请输入TaskManager数量', 'App Name': '任务名称', 'Please enter app name(optional)': '请输入任务名称(选填)', 'SQL Type': 'sql类型', 'Send Email': '发送邮件', 'Log display': '日志显示', 'rows of result': '行查询结果', Title: '主题', 'Please enter the title of email': '请输入邮件主题', Table: '表名', TableMode: '表格', Attachment: '附件', 'SQL Parameter': 'sql参数', 'SQL Statement': 'sql语句', 'UDF Function': 'UDF函数', 'Please enter a SQL Statement(required)': '请输入sql语句(必填)', 'Please enter a JSON Statement(required)': '请输入json语句(必填)', 'One form or attachment must be selected': '表格、附件必须勾选一个', 'Mail subject required': '邮件主题必填', 'Child Node': '子节点', 'Please select a sub-Process': '请选择子工作流', Edit: '编辑', 'Switch To This Version': '切换到该版本', 'Datasource Name': '数据源名称', 'Please enter datasource name': '请输入数据源名称', IP: 'IP主机名', 'Please enter IP': '请输入IP主机名', Port: '端口', 'Please enter port': '请输入端口', 'Database Name': '数据库名', 'Please enter database name': '请输入数据库名', 'Oracle Connect Type': '服务名或SID', 'Oracle Service Name': '服务名', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc连接参数', 'Test Connect': '测试连接', 'Please enter resource name': '请输入数据源名称', 'Please enter resource folder name': '请输入资源文件夹名称', 'Please enter a non-query SQL statement': '请输入非查询sql语句', 'Please enter IP/hostname': '请输入IP/主机名', 'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式', '#': '编号', 'Datasource Type': '数据源类型', 'Datasource Parameter': '数据源参数', 'Create Time': '创建时间', 'Update Time': '更新时间', Operation: '操作', 'Current Version': '当前版本', 'Click to view': '点击查看', 'Delete?': '确定删除吗?', 'Switch Version Successfully': '切换版本成功', 'Confirm Switch To This Version?': '确定切换到该版本吗?', Confirm: '确定', 'Task status statistics': '任务状态统计', Number: '数量', State: '状态', 'Process Status Statistics': '流程状态统计', 'Process Definition Statistics': '流程定义统计', 'Project Name': '项目名称', 'Please enter name': '请输入名称', 'Owned Users': '所属用户', 'Process Pid': '进程Pid', 'Zk registration directory': 'zk注册目录', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': '最后心跳时间', 'Edit Tenant': '编辑租户', 'OS Tenant Code': '操作系统租户', 'Tenant Name': '租户名称', Queue: '队列', 'Please select a queue': '默认为租户关联队列', 'Please enter the os tenant code in English': '请输入操作系统租户只允许英文', 'Please enter os tenant code in English': '请输入英文操作系统租户', 'Please enter os tenant code': '请输入操作系统租户', 'Please enter tenant Name': '请输入租户名称', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': '操作系统租户只允许字母或字母与数字组合', 'Edit User': '编辑用户', Tenant: '租户', Email: '邮件', Phone: '手机', 'User Type': '用户类型', 'Please enter phone number': '请输入手机', 'Please enter email': '请输入邮箱', 'Please enter the correct email format': '请输入正确的邮箱格式', 'Please enter the correct mobile phone format': '请输入正确的手机格式', Project: '项目', Authorize: '授权', 'File resources': '文件资源', 'UDF resources': 'UDF资源', 'UDF resources directory': 'UDF资源目录', 'Please select UDF resources directory': '请选择UDF资源目录', 'Alarm group': '告警组', 'Alarm group required': '告警组必填', 'Edit alarm group': '编辑告警组', 'Create alarm group': '创建告警组', 'Create Alarm Instance': '创建告警实例', 'Edit Alarm Instance': '编辑告警实例', 'Group Name': '组名称', 'Alarm instance name': '告警实例名称', 'Alarm plugin name': '告警插件名称', 'Select plugin': '选择插件', 'Please enter group name': '请输入组名称', 'Instance parameter exception': '实例参数异常', 'Group Type': '组类型', 'Alarm plugin instance': '告警插件实例', Remarks: '备注', SMS: '短信', 'Managing Users': '管理用户', Permission: '权限', Administrator: '管理员', 'Confirm Password': '确认密码', 'Please enter confirm password': '请输入确认密码', 'Password cannot be in Chinese': '密码不能为中文', 'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码', 'Confirmation password cannot be in Chinese': '确认密码不能为中文', 'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码', 'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认', 'Please select the datasource': '请选择数据源', 'Please select resources': '请选择资源', Query: '查询', 'Non Query': '非查询', 'prop(required)': 'prop(必填)', 'value(optional)': 'value(选填)', 'value(required)': 'value(必填)', 'prop is empty': 'prop不能为空', 'value is empty': 'value不能为空', 'prop is repeat': 'prop中有重复', 'Start Time': '开始时间', 'End Time': '结束时间', crontab: 'crontab', 'Failure Strategy': '失败策略', online: '上线', offline: '下线', 'Task Status': '任务状态', 'Process Instance': '工作流实例', 'Task Instance': '任务实例', 'Select date range': '选择日期区间', startDate: '开始日期', endDate: '结束日期', Date: '日期', Waiting: '等待', Execution: '执行中', Finish: '完成', 'Create File': '创建文件', 'Create folder': '创建文件夹', 'File Name': '文件名称', 'Folder Name': '文件夹名称', 'File Format': '文件格式', 'Folder Format': '文件夹格式', 'File Content': '文件内容', 'Upload File Size': '文件大小不能超过1G', Create: '创建', 'Please enter the resource content': '请输入资源内容', 'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行', 'File Details': '文件详情', 'Download Details': '下载详情', Return: '返回', Save: '保存', 'File Manage': '文件管理', 'Upload Files': '上传文件', 'Create UDF Function': '创建UDF函数', 'Upload UDF Resources': '上传UDF资源', 'Service-Master': '服务管理-Master', 'Service-Worker': '服务管理-Worker', 'Process Name': '工作流名称', Executor: '执行用户', 'Run Type': '运行类型', 'Scheduling Time': '调度时间', 'Run Times': '运行次数', host: 'host', 'fault-tolerant sign': '容错标识', Rerun: '重跑', 'Recovery Failed': '恢复失败', Stop: '停止', Pause: '暂停', 'Recovery Suspend': '恢复运行', Gantt: '甘特图', 'Node Type': '节点类型', 'Submit Time': '提交时间', Duration: '运行时长', 'Retry Count': '重试次数', 'Task Name': '任务名称', 'Task Date': '任务日期', 'Source Table': '源表', 'Record Number': '记录数', 'Target Table': '目标表', 'Online viewing type is not supported': '不支持在线查看类型', Size: '大小', Rename: '重命名', Download: '下载', Export: '导出', 'Version Info': '版本信息', Submit: '提交', 'Edit UDF Function': '编辑UDF函数', type: '类型', 'UDF Function Name': 'UDF函数名称', FILE: '文件', UDF: 'UDF', 'File Subdirectory': '文件子目录', 'Please enter a function name': '请输入函数名', 'Package Name': '包名类名', 'Please enter a Package name': '请输入包名类名', Parameter: '参数', 'Please enter a parameter': '请输入参数', 'UDF Resources': 'UDF资源', 'Upload Resources': '上传资源', Instructions: '使用说明', 'Please enter a instructions': '请输入使用说明', 'Please enter a UDF function name': '请输入UDF函数名称', 'Select UDF Resources': '请选择UDF资源', 'Class Name': '类名', 'Jar Package': 'jar包', 'Library Name': '库名', 'UDF Resource Name': 'UDF资源名称', 'File Size': '文件大小', Description: '描述', 'Drag Nodes and Selected Items': '拖动节点和选中项', 'Select Line Connection': '选择线条连接', 'Delete selected lines or nodes': '删除选中的线或节点', 'Full Screen': '全屏', Unpublished: '未发布', 'Start Process': '启动工作流', 'Execute from the current node': '从当前节点开始执行', 'Recover tolerance fault process': '恢复被容错的工作流', 'Resume the suspension process': '恢复运行流程', 'Execute from the failed nodes': '从失败节点开始执行', 'Complement Data': '补数', 'Scheduling execution': '调度执行', 'Recovery waiting thread': '恢复等待线程', 'Submitted successfully': '提交成功', Executing: '正在执行', 'Ready to pause': '准备暂停', 'Ready to stop': '准备停止', 'Need fault tolerance': '需要容错', Kill: 'Kill', 'Waiting for thread': '等待线程', 'Waiting for dependence': '等待依赖', Start: '运行', Copy: '复制节点', 'Copy name': '复制名称', 'Please enter keyword': '请输入关键词', 'File Upload': '文件上传', 'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!', 'Drag area upload': '拖动区域上传', Upload: '上传', 'ReUpload File': '重新上传文件', 'Please enter file name': '请输入文件名', 'Please select the file to upload': '请选择要上传的文件', 'Resources manage': '资源中心', Security: '安全中心', Logout: '退出', 'No data': '查询无数据', 'Uploading...': '文件上传中', 'Loading...': '正在努力加载中...', List: '列表', 'Unable to download without proper url': '无下载url无法下载', Process: '工作流', 'Process definition': '工作流定义', 'Task record': '任务记录', 'Warning group manage': '告警组管理', 'Warning instance manage': '告警实例管理', 'Servers manage': '服务管理', 'UDF manage': 'UDF管理', 'Resource manage': '资源管理', 'Function manage': '函数管理', 'Edit password': '修改密码', 'Ordinary users': '普通用户', 'Create process': '创建工作流', 'Import process': '导入工作流', 'Timing state': '定时状态', Timing: '定时', TreeView: '树形图', 'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复', 'Mailbox input is illegal': '邮箱输入不合法', 'Please set the parameters before starting': '启动前请先设置参数', Continue: '继续', End: '结束', 'Node execution': '节点执行', 'Backward execution': '向后执行', 'Forward execution': '向前执行', 'Execute only the current node': '仅执行当前节点', 'Notification strategy': '通知策略', 'Notification group': '通知组', 'Please select a notification group': '请选择通知组', receivers: '收件人', receiverCcs: '抄送人', 'Whether it is a complement process?': '是否补数', 'Schedule date': '调度日期', 'Mode of execution': '执行方式', 'Serial execution': '串行执行', 'Parallel execution': '并行执行', 'Set parameters before timing': '定时前请先设置参数', 'Start and stop time': '起止时间', 'Please select time': '请选择时间', 'Please enter crontab': '请输入crontab', none_1: '都不发', success_1: '成功发', failure_1: '失败发', All_1: '成功或失败都发', Toolbar: '工具栏', 'View variables': '查看变量', 'Format DAG': '格式化DAG', 'Refresh DAG status': '刷新DAG状态', Return_1: '返回上一节点', 'Please enter format': '请输入格式为', 'connection parameter': '连接参数', 'Process definition details': '流程定义详情', 'Create process definition': '创建流程定义', 'Scheduled task list': '定时任务列表', 'Process instance details': '流程实例详情', 'Create Resource': '创建资源', 'User Center': '用户中心', 'Please enter method': '请输入方法', None: '无', Name: '名称', 'Process priority': '流程优先级', 'Task priority': '任务优先级', 'Task timeout alarm': '任务超时告警', 'Timeout strategy': '超时策略', 'Timeout alarm': '超时告警', 'Timeout failure': '超时失败', 'Timeout period': '超时时长', 'Waiting Dependent complete': '等待依赖完成', 'Waiting Dependent start': '等待依赖启动', 'Check interval': '检查间隔', 'Timeout must be longer than check interval': '超时时间必须比检查间隔长', 'Timeout strategy must be selected': '超时策略必须选一个', 'Timeout must be a positive integer': '超时时长必须为正整数', 'Add dependency': '添加依赖', and: '且', or: '或', month: '月', week: '周', day: '日', hour: '时', Running: '正在运行', 'Waiting for dependency to complete': '等待依赖完成', Selected: '已选', CurrentHour: '当前小时', Last1Hour: '前1小时', Last2Hours: '前2小时', Last3Hours: '前3小时', Last24Hours: '前24小时', today: '今天', Last1Days: '昨天', Last2Days: '前两天', Last3Days: '前三天', Last7Days: '前七天', ThisWeek: '本周', LastWeek: '上周', LastMonday: '上周一', LastTuesday: '上周二', LastWednesday: '上周三', LastThursday: '上周四', LastFriday: '上周五', LastSaturday: '上周六', LastSunday: '上周日', ThisMonth: '本月', LastMonth: '上月', LastMonthBegin: '上月初', LastMonthEnd: '上月末', 'Refresh status succeeded': '刷新状态成功', 'Queue manage': 'Yarn 队列管理', 'Create queue': '创建队列', 'Edit queue': '编辑队列', 'Datasource manage': '数据源中心', 'History task record': '历史任务记录', 'Please go online': '不要忘记上线', 'Queue value': '队列值', 'Please enter queue value': '请输入队列值', 'Worker group manage': 'Worker分组管理', 'Create worker group': '创建Worker分组', 'Edit worker group': '编辑Worker分组', 'Token manage': '令牌管理', 'Create token': '创建令牌', 'Edit token': '编辑令牌', Addresses: '地址', 'Worker Addresses': 'Worker地址', 'Please select the worker addresses': '请选择Worker地址', 'Failure time': '失效时间', 'Expiration time': '失效时间', User: '用户', 'Please enter token': '请输入令牌', 'Generate token': '生成令牌', Monitor: '监控中心', Group: '分组', 'Queue statistics': '队列统计', 'Command status statistics': '命令状态统计', 'Task kill': '等待kill任务', 'Task queue': '等待执行任务', 'Error command count': '错误指令数', 'Normal command count': '正确指令数', Manage: '管理', 'Number of connections': '连接数', Sent: '发送量', Received: '接收量', 'Min latency': '最低延时', 'Avg latency': '平均延时', 'Max latency': '最大延时', 'Node count': '节点数', 'Query time': '当前查询时间', 'Node self-test status': '节点自检状态', 'Health status': '健康状态', 'Max connections': '最大连接数', 'Threads connections': '当前连接数', 'Max used connections': '同时使用连接最大数', 'Threads running connections': '数据库当前活跃连接数', 'Worker group': 'Worker分组', 'Please enter a positive integer greater than 0': '请输入大于 0 的正整数', 'Pre Statement': '前置sql', 'Post Statement': '后置sql', 'Statement cannot be empty': '语句不能为空', 'Process Define Count': '工作流定义数', 'Process Instance Running Count': '正在运行的流程数', 'command number of waiting for running': '待执行的命令数', 'failure command number': '执行失败的命令数', 'tasks number of waiting running': '待运行任务数', 'task number of ready to kill': '待杀死任务数', 'Statistics manage': '统计管理', statistics: '统计', 'select tenant': '选择租户', 'Please enter Principal': '请输入Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': '请输入kerberos认证参数 java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': '请输入kerberos认证参数 login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': '请输入kerberos认证参数 login.user.keytab.path', 'The start time must not be the same as the end': '开始时间和结束时间不能相同', 'Startup parameter': '启动参数', 'Startup type': '启动类型', 'warning of timeout': '超时告警', 'Next five execution times': '接下来五次执行时间', 'Execute time': '执行时间', 'Complement range': '补数范围', 'Http Url': '请求地址', 'Http Method': '请求类型', 'Http Parameters': '请求参数', 'Http Parameters Key': '参数名', 'Http Parameters Position': '参数位置', 'Http Parameters Value': '参数值', 'Http Check Condition': '校验条件', 'Http Condition': '校验内容', 'Please Enter Http Url': '请填写请求地址(必填)', 'Please Enter Http Condition': '请填写校验内容', 'There is no data for this period of time': '该时间段无数据', 'Worker addresses cannot be empty': 'Worker地址不能为空', 'Please generate token': '请生成Token', 'Spark Version': 'Spark版本', TargetDataBase: '目标库', TargetTable: '目标表', 'Please enter the table of target': '请输入目标表名', 'Please enter a Target Table(required)': '请输入目标表(必填)', SpeedByte: '限流(字节数)', SpeedRecord: '限流(记录数)', '0 means unlimited by byte': 'KB,0代表不限制', '0 means unlimited by count': '0代表不限制', 'Modify User': '修改用户', 'Whether directory': '是否文件夹', Yes: '是', No: '否', 'Hadoop Custom Params': 'Hadoop参数', 'Sqoop Advanced Parameters': 'Sqoop参数', 'Sqoop Job Name': '任务名称', 'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)', 'Please enter Mysql Table(required)': '请输入Mysql表名(必填)', 'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开', 'Please enter Target Dir(required)': '请输入目标路径(必填)', 'Please enter Export Dir(required)': '请输入数据源路径(必填)', 'Please enter Hive Database(required)': '请输入Hive数据库(必填)', 'Please enter Hive Table(required)': '请输入Hive表名(必填)', 'Please enter Hive Partition Keys': '请输入分区键', 'Please enter Hive Partition Values': '请输入分区值', 'Please enter Replace Delimiter': '请输入替换分隔符', 'Please enter Fields Terminated': '请输入列分隔符', 'Please enter Lines Terminated': '请输入行分隔符', 'Please enter Concurrency': '请输入并发度', 'Please enter Update Key': '请输入更新列', 'Please enter Job Name(required)': '请输入任务名称(必填)', 'Please enter Custom Shell(required)': '请输入自定义脚本', Direct: '流向', Type: '类型', ModelType: '模式', ColumnType: '列类型', Database: '数据库', Column: '列', 'Map Column Hive': 'Hive类型映射', 'Map Column Java': 'Java类型映射', 'Export Dir': '数据源路径', 'Hive partition Keys': 'Hive 分区键', 'Hive partition Values': 'Hive 分区值', FieldsTerminated: '列分隔符', LinesTerminated: '行分隔符', IsUpdate: '是否更新', UpdateKey: '更新列', UpdateMode: '更新类型', 'Target Dir': '目标路径', DeleteTargetDir: '是否删除目录', FileType: '保存格式', CompressionCodec: '压缩类型', CreateHiveTable: '是否创建新表', DropDelimiter: '是否删除分隔符', OverWriteSrc: '是否覆盖数据源', ReplaceDelimiter: '替换分隔符', Concurrency: '并发度', Form: '表单', OnlyUpdate: '只更新', AllowInsert: '无更新便插入', 'Data Source': '数据来源', 'Data Target': '数据目的', 'All Columns': '全表导入', 'Some Columns': '选择列', 'Branch flow': '分支流转', 'Custom Job': '自定义任务', 'Custom Script': '自定义脚本', 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点', 'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填', 'No resources exist': '不存在资源', 'Please delete all non-existing resources': '请删除所有不存在资源', 'Unauthorized or deleted resources': '未授权或已删除资源', 'Please delete all non-existent resources': '请删除所有未授权或已删除资源', Kinship: '工作流关系', Reset: '重置', KinshipStateActive: '当前选择', KinshipState1: '已上线', KinshipState0: '工作流未上线', KinshipState10: '调度未上线', 'Dag label display control': 'Dag节点名称显隐', Enable: '启用', Disable: '停用', 'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!', 'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存', 'User name length is between 3 and 39': '用户名长度在3~39之间', 'Timeout Settings': '超时设置', 'Connect Timeout': '连接超时', 'Socket Timeout': 'Socket超时', 'Connect timeout be a positive integer': '连接超时必须为数字', 'Socket Timeout be a positive integer': 'Socket超时必须为数字', ms: '毫秒', 'Please Enter Url': '请直接填写地址,例如:127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': '请选择waterdrop配置文件', zkDirectory: 'zk注册目录', 'Directory detail': '查看目录详情', 'Connection name': '连线名', 'Current connection settings': '当前连线设置', 'Please save the DAG before formatting': '格式化前请先保存DAG', 'Batch copy': '批量复制', 'Related items': '关联项目', 'Project name is required': '项目名称必填', 'Batch move': '批量移动', Version: '版本', 'Pre tasks': '前置任务', 'Running Memory': '运行内存', 'Max Memory': '最大内存', 'Min Memory': '最小内存', 'The workflow canvas is abnormal and cannot be saved, please recreate': '该工作流画布异常,无法保存,请重新创建', Info: '提示', 'Datasource userName': '所属用户', 'Resource userName': '所属用户' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,093
[Feature][server] Support for stored procedures and stored function calls and data source supports DB2
**Describe the feature** 1.Support for stored procedures and stored function calls 2. Task node page data source parameters can support the selection of a variety of data sources, rather than currently only limited to mysql, Oracle and other data sources 3. When the calling script is filled in on the page, there will be subtle differences between the calling scripts corresponding to various data sources **Is your feature request related to a problem? Please describe.** 1. Fixed Issues #3990 , data source supports DB2 **Describe the solution you'd like** 1.Support for stored procedures and stored function calls ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2. Task node page selection can support multiple types of data sources, instead of only configuring several data sources like mysql and Oracle 3. Call the script to fill in the page and verify the matching of the number of parameters --- **描述特征** 1.支持存储过程和存储函数调用 2.任务节点页面数据源参数可以支持选择多种类型数据源,而不是现在只限制mysql,oracle等几个数据源 3.调用脚本在页面填写,各个数据源对应的调用脚本会存在细微区别 **您的特性请求与问题有关吗?请描述** 1.修复issues #3990 ,数据源支持DB2 **描述你想要的解决方案** 1.支持存储过程和存储函数调用 ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2.任务节点页面选择可以支持多种类型数据源,而不是现在只配置mysql,oracle等几个数据源 3.调用脚本在页面填写,做参数个数匹配验证
https://github.com/apache/dolphinscheduler/issues/4093
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-11-20T11:10:34Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.utils.EnumUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.datax.DataxTask; import org.apache.dolphinscheduler.server.worker.task.flink.FlinkTask; import org.apache.dolphinscheduler.server.worker.task.http.HttpTask; import org.apache.dolphinscheduler.server.worker.task.mr.MapReduceTask; import org.apache.dolphinscheduler.server.worker.task.processdure.ProcedureTask; import org.apache.dolphinscheduler.server.worker.task.python.PythonTask; import org.apache.dolphinscheduler.server.worker.task.shell.ShellTask; import org.apache.dolphinscheduler.server.worker.task.spark.SparkTask; import org.apache.dolphinscheduler.server.worker.task.sql.SqlTask; import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopTask; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.slf4j.Logger; /** * task manaster */ public class TaskManager { /** * create new task * @param taskExecutionContext taskExecutionContext * @param logger logger * @return AbstractTask * @throws IllegalArgumentException illegal argument exception */ public static AbstractTask newTask(TaskExecutionContext taskExecutionContext, Logger logger, AlertClientService alertClientService) throws IllegalArgumentException { TaskType anEnum = EnumUtils.getEnum(TaskType.class, taskExecutionContext.getTaskType()); if (anEnum == null) { logger.error("not support task type: {}", taskExecutionContext.getTaskType()); throw new IllegalArgumentException("not support task type"); } switch (anEnum) { case SHELL: case WATERDROP: return new ShellTask(taskExecutionContext, logger); case PROCEDURE: return new ProcedureTask(taskExecutionContext, logger); case SQL: return new SqlTask(taskExecutionContext, logger, alertClientService); case MR: return new MapReduceTask(taskExecutionContext, logger); case SPARK: return new SparkTask(taskExecutionContext, logger); case FLINK: return new FlinkTask(taskExecutionContext, logger); case PYTHON: return new PythonTask(taskExecutionContext, logger); case HTTP: return new HttpTask(taskExecutionContext, logger); case DATAX: return new DataxTask(taskExecutionContext, logger); case SQOOP: return new SqoopTask(taskExecutionContext, logger); default: logger.error("not support task type: {}", taskExecutionContext.getTaskType()); throw new IllegalArgumentException("not support task type"); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,093
[Feature][server] Support for stored procedures and stored function calls and data source supports DB2
**Describe the feature** 1.Support for stored procedures and stored function calls 2. Task node page data source parameters can support the selection of a variety of data sources, rather than currently only limited to mysql, Oracle and other data sources 3. When the calling script is filled in on the page, there will be subtle differences between the calling scripts corresponding to various data sources **Is your feature request related to a problem? Please describe.** 1. Fixed Issues #3990 , data source supports DB2 **Describe the solution you'd like** 1.Support for stored procedures and stored function calls ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2. Task node page selection can support multiple types of data sources, instead of only configuring several data sources like mysql and Oracle 3. Call the script to fill in the page and verify the matching of the number of parameters --- **描述特征** 1.支持存储过程和存储函数调用 2.任务节点页面数据源参数可以支持选择多种类型数据源,而不是现在只限制mysql,oracle等几个数据源 3.调用脚本在页面填写,各个数据源对应的调用脚本会存在细微区别 **您的特性请求与问题有关吗?请描述** 1.修复issues #3990 ,数据源支持DB2 **描述你想要的解决方案** 1.支持存储过程和存储函数调用 ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2.任务节点页面选择可以支持多种类型数据源,而不是现在只配置mysql,oracle等几个数据源 3.调用脚本在页面填写,做参数个数匹配验证
https://github.com/apache/dolphinscheduler/issues/4093
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-11-20T11:10:34Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/procedure/ProcedureTask.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,093
[Feature][server] Support for stored procedures and stored function calls and data source supports DB2
**Describe the feature** 1.Support for stored procedures and stored function calls 2. Task node page data source parameters can support the selection of a variety of data sources, rather than currently only limited to mysql, Oracle and other data sources 3. When the calling script is filled in on the page, there will be subtle differences between the calling scripts corresponding to various data sources **Is your feature request related to a problem? Please describe.** 1. Fixed Issues #3990 , data source supports DB2 **Describe the solution you'd like** 1.Support for stored procedures and stored function calls ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2. Task node page selection can support multiple types of data sources, instead of only configuring several data sources like mysql and Oracle 3. Call the script to fill in the page and verify the matching of the number of parameters --- **描述特征** 1.支持存储过程和存储函数调用 2.任务节点页面数据源参数可以支持选择多种类型数据源,而不是现在只限制mysql,oracle等几个数据源 3.调用脚本在页面填写,各个数据源对应的调用脚本会存在细微区别 **您的特性请求与问题有关吗?请描述** 1.修复issues #3990 ,数据源支持DB2 **描述你想要的解决方案** 1.支持存储过程和存储函数调用 ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2.任务节点页面选择可以支持多种类型数据源,而不是现在只配置mysql,oracle等几个数据源 3.调用脚本在页面填写,做参数个数匹配验证
https://github.com/apache/dolphinscheduler/issues/4093
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-11-20T11:10:34Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/procedure/ProcedureTaskTest.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,093
[Feature][server] Support for stored procedures and stored function calls and data source supports DB2
**Describe the feature** 1.Support for stored procedures and stored function calls 2. Task node page data source parameters can support the selection of a variety of data sources, rather than currently only limited to mysql, Oracle and other data sources 3. When the calling script is filled in on the page, there will be subtle differences between the calling scripts corresponding to various data sources **Is your feature request related to a problem? Please describe.** 1. Fixed Issues #3990 , data source supports DB2 **Describe the solution you'd like** 1.Support for stored procedures and stored function calls ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2. Task node page selection can support multiple types of data sources, instead of only configuring several data sources like mysql and Oracle 3. Call the script to fill in the page and verify the matching of the number of parameters --- **描述特征** 1.支持存储过程和存储函数调用 2.任务节点页面数据源参数可以支持选择多种类型数据源,而不是现在只限制mysql,oracle等几个数据源 3.调用脚本在页面填写,各个数据源对应的调用脚本会存在细微区别 **您的特性请求与问题有关吗?请描述** 1.修复issues #3990 ,数据源支持DB2 **描述你想要的解决方案** 1.支持存储过程和存储函数调用 ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2.任务节点页面选择可以支持多种类型数据源,而不是现在只配置mysql,oracle等几个数据源 3.调用脚本在页面填写,做参数个数匹配验证
https://github.com/apache/dolphinscheduler/issues/4093
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-11-20T11:10:34Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/procedure.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="procedure-model"> <m-list-box> <div slot="text">{{$t('Datasource')}}</div> <div slot="content"> <m-datasource ref="refDs" @on-dsData="_onDsData" :supportType="['MYSQL','POSTGRESQL','CLICKHOUSE', 'ORACLE', 'SQLSERVER']" :data="{ type:type,datasource:datasource }"> </m-datasource> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('methods')}}</div> <div slot="content"> <el-input type="input" size="small" :disabled="isDetails" v-model="method" :placeholder="$t('Please enter method(optional)')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mDatasource from './_source/datasource' import mLocalParams from './_source/localParams' import disabledState from '@/module/mixin/disabledState' export default { name: 'procedure', data () { return { // method method: '', // Custom parameter localParams: [], // Data source type type: '', // data source datasource: '', // Return to the selected data source rtDatasource: '' } }, mixins: [disabledState], props: { backfillItem: Object }, methods: { /** * return type or datasource */ _onDsData (o) { this.type = o.type this.rtDatasource = o.datasource }, /** * return udp */ _onLocalParams (a) { }, /** * verification */ _verification () { // datasource Subcomponent verification if (!this.$refs.refDs._verifDatasource()) { return false } // Verification function if (!this.method) { this.$message.warning(`${i18n.$t('Please enter method')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // storage this.$emit('on-params', { type: this.type, datasource: this.rtDatasource, method: this.method, localParams: this.localParams }) return true } }, watch: { // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) } }, computed: { cacheParams () { return { type: this.type, datasource: this.rtDatasource, method: this.method, localParams: this.localParams } } }, created () { let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.name = o.name this.desc = o.desc // backfill this.type = o.params.type || '' this.datasource = o.params.datasource || '' this.method = o.params.method || '' // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mListBox, mDatasource, mLocalParams } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,093
[Feature][server] Support for stored procedures and stored function calls and data source supports DB2
**Describe the feature** 1.Support for stored procedures and stored function calls 2. Task node page data source parameters can support the selection of a variety of data sources, rather than currently only limited to mysql, Oracle and other data sources 3. When the calling script is filled in on the page, there will be subtle differences between the calling scripts corresponding to various data sources **Is your feature request related to a problem? Please describe.** 1. Fixed Issues #3990 , data source supports DB2 **Describe the solution you'd like** 1.Support for stored procedures and stored function calls ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2. Task node page selection can support multiple types of data sources, instead of only configuring several data sources like mysql and Oracle 3. Call the script to fill in the page and verify the matching of the number of parameters --- **描述特征** 1.支持存储过程和存储函数调用 2.任务节点页面数据源参数可以支持选择多种类型数据源,而不是现在只限制mysql,oracle等几个数据源 3.调用脚本在页面填写,各个数据源对应的调用脚本会存在细微区别 **您的特性请求与问题有关吗?请描述** 1.修复issues #3990 ,数据源支持DB2 **描述你想要的解决方案** 1.支持存储过程和存储函数调用 ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2.任务节点页面选择可以支持多种类型数据源,而不是现在只配置mysql,oracle等几个数据源 3.调用脚本在页面填写,做参数个数匹配验证
https://github.com/apache/dolphinscheduler/issues/4093
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-11-20T11:10:34Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': 'User Name', 'Please enter user name': 'Please enter user name', Password: 'Password', 'Please enter your password': 'Please enter your password', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22', Login: 'Login', Home: 'Home', 'Failed to create node to save': 'Failed to create node to save', 'Global parameters': 'Global parameters', 'Local parameters': 'Local parameters', 'Copy success': 'Copy success', 'The browser does not support automatic copying': 'The browser does not support automatic copying', 'Whether to save the DAG graph': 'Whether to save the DAG graph', 'Current node settings': 'Current node settings', 'View history': 'View history', 'View log': 'View log', 'Force success': 'Force success', 'Enter this child node': 'Enter this child node', 'Node name': 'Node name', 'Please enter name (required)': 'Please enter name (required)', 'Run flag': 'Run flag', Normal: 'Normal', 'Prohibition execution': 'Prohibition execution', 'Please enter description': 'Please enter description', 'Number of failed retries': 'Number of failed retries', Times: 'Times', 'Failed retry interval': 'Failed retry interval', Minute: 'Minute', 'Delay execution time': 'Delay execution time', 'Delay execution': 'Delay execution', 'Forced success': 'Forced success', Cancel: 'Cancel', 'Confirm add': 'Confirm add', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process', 'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process', 'Name already exists': 'Name already exists', 'Download Log': 'Download Log', 'Refresh Log': 'Refresh Log', 'Enter full screen': 'Enter full screen', 'Cancel full screen': 'Cancel full screen', Close: 'Close', 'Update log success': 'Update log success', 'No more logs': 'No more logs', 'No log': 'No log', 'Loading Log...': 'Loading Log...', 'Set the DAG diagram name': 'Set the DAG diagram name', 'Please enter description(optional)': 'Please enter description(optional)', 'Set global': 'Set global', 'Whether to go online the process definition': 'Whether to go online the process definition', 'Whether to update the process definition': 'Whether to update the process definition', Add: 'Add', 'DAG graph name cannot be empty': 'DAG graph name cannot be empty', 'Create Datasource': 'Create Datasource', 'Project Home': 'Project Home', 'Project Manage': 'Project', 'Create Project': 'Create Project', 'Cron Manage': 'Cron Manage', 'Copy Workflow': 'Copy Workflow', 'Tenant Manage': 'Tenant Manage', 'Create Tenant': 'Create Tenant', 'User Manage': 'User Manage', 'Create User': 'Create User', 'User Information': 'User Information', 'Edit Password': 'Edit Password', Success: 'Success', Failed: 'Failed', Delete: 'Delete', 'Please choose': 'Please choose', 'Please enter a positive integer': 'Please enter a positive integer', 'Program Type': 'Program Type', 'Main Class': 'Main Class', 'Main Jar Package': 'Main Jar Package', 'Please enter main jar package': 'Please enter main jar package', 'Please enter main class': 'Please enter main class', 'Main Arguments': 'Main Arguments', 'Please enter main arguments': 'Please enter main arguments', 'Option Parameters': 'Option Parameters', 'Please enter option parameters': 'Please enter option parameters', Resources: 'Resources', 'Custom Parameters': 'Custom Parameters', 'Custom template': 'Custom template', Datasource: 'Datasource', methods: 'methods', 'Please enter method(optional)': 'Please enter method(optional)', Script: 'Script', 'Please enter script(required)': 'Please enter script(required)', 'Deploy Mode': 'Deploy Mode', 'Driver Cores': 'Driver Cores', 'Please enter Driver cores': 'Please enter Driver cores', 'Driver Memory': 'Driver Memory', 'Please enter Driver memory': 'Please enter Driver memory', 'Executor Number': 'Executor Number', 'Please enter Executor number': 'Please enter Executor number', 'The Executor number should be a positive integer': 'The Executor number should be a positive integer', 'Executor Memory': 'Executor Memory', 'Please enter Executor memory': 'Please enter Executor memory', 'Executor Cores': 'Executor Cores', 'Please enter Executor cores': 'Please enter Executor cores', 'Memory should be a positive integer': 'Memory should be a positive integer', 'Core number should be positive integer': 'Core number should be positive integer', 'Flink Version': 'Flink Version', 'JobManager Memory': 'JobManager Memory', 'Please enter JobManager memory': 'Please enter JobManager memory', 'TaskManager Memory': 'TaskManager Memory', 'Please enter TaskManager memory': 'Please enter TaskManager memory', 'Slot Number': 'Slot Number', 'Please enter Slot number': 'Please enter Slot number', Parallelism: 'Parallelism', 'Please enter Parallelism': 'Please enter Parallelism', 'TaskManager Number': 'TaskManager Number', 'Please enter TaskManager number': 'Please enter TaskManager number', 'App Name': 'App Name', 'Please enter app name(optional)': 'Please enter app name(optional)', 'SQL Type': 'SQL Type', 'Send Email': 'Send Email', 'Log display': 'Log display', 'rows of result': 'rows of result', Title: 'Title', 'Please enter the title of email': 'Please enter the title of email', Table: 'Table', TableMode: 'Table', Attachment: 'Attachment', 'SQL Parameter': 'SQL Parameter', 'SQL Statement': 'SQL Statement', 'UDF Function': 'UDF Function', 'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)', 'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)', 'One form or attachment must be selected': 'One form or attachment must be selected', 'Mail subject required': 'Mail subject required', 'Child Node': 'Child Node', 'Please select a sub-Process': 'Please select a sub-Process', Edit: 'Edit', 'Switch To This Version': 'Switch To This Version', 'Datasource Name': 'Datasource Name', 'Please enter datasource name': 'Please enter datasource name', IP: 'IP', 'Please enter IP': 'Please enter IP', Port: 'Port', 'Please enter port': 'Please enter port', 'Database Name': 'Database Name', 'Please enter database name': 'Please enter database name', 'Oracle Connect Type': 'ServiceName or SID', 'Oracle Service Name': 'ServiceName', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc connect parameters', 'Test Connect': 'Test Connect', 'Please enter resource name': 'Please enter resource name', 'Please enter resource folder name': 'Please enter resource folder name', 'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement', 'Please enter IP/hostname': 'Please enter IP/hostname', 'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format', '#': '#', 'Datasource Type': 'Datasource Type', 'Datasource Parameter': 'Datasource Parameter', 'Create Time': 'Create Time', 'Update Time': 'Update Time', Operation: 'Operation', 'Current Version': 'Current Version', 'Click to view': 'Click to view', 'Delete?': 'Delete?', 'Switch Version Successfully': 'Switch Version Successfully', 'Confirm Switch To This Version?': 'Confirm Switch To This Version?', Confirm: 'Confirm', 'Task status statistics': 'Task Status Statistics', Number: 'Number', State: 'State', 'Process Status Statistics': 'Process Status Statistics', 'Process Definition Statistics': 'Process Definition Statistics', 'Project Name': 'Project Name', 'Please enter name': 'Please enter name', 'Owned Users': 'Owned Users', 'Process Pid': 'Process Pid', 'Zk registration directory': 'Zk registration directory', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': 'Last heartbeat time', 'Edit Tenant': 'Edit Tenant', 'OS Tenant Code': 'OS Tenant Code', 'Tenant Name': 'Tenant Name', Queue: 'Yarn Queue', 'Please select a queue': 'default is tenant association queue', 'Please enter the os tenant code in English': 'Please enter the os tenant code in English', 'Please enter os tenant code in English': 'Please enter os tenant code in English', 'Please enter os tenant code': 'Please enter os tenant code', 'Please enter tenant Name': 'Please enter tenant Name', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed', 'Edit User': 'Edit User', Tenant: 'Tenant', Email: 'Email', Phone: 'Phone', 'User Type': 'User Type', 'Please enter phone number': 'Please enter phone number', 'Please enter email': 'Please enter email', 'Please enter the correct email format': 'Please enter the correct email format', 'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format', Project: 'Project', Authorize: 'Authorize', 'File resources': 'File resources', 'UDF resources': 'UDF resources', 'UDF resources directory': 'UDF resources directory', 'Please select UDF resources directory': 'Please select UDF resources directory', 'Alarm group': 'Alarm group', 'Alarm group required': 'Alarm group required', 'Edit alarm group': 'Edit alarm group', 'Create alarm group': 'Create alarm group', 'Create Alarm Instance': 'Create Alarm Instance', 'Edit Alarm Instance': 'Edit Alarm Instance', 'Group Name': 'Group Name', 'Alarm instance name': 'Alarm instance name', 'Alarm plugin name': 'Alarm plugin name', 'Select plugin': 'Select plugin', 'Please enter group name': 'Please enter group name', 'Instance parameter exception': 'Instance parameter exception', 'Group Type': 'Group Type', 'Alarm plugin instance': 'Alarm plugin instance', Remarks: 'Remarks', SMS: 'SMS', 'Managing Users': 'Managing Users', Permission: 'Permission', Administrator: 'Administrator', 'Confirm Password': 'Confirm Password', 'Please enter confirm password': 'Please enter confirm password', 'Password cannot be in Chinese': 'Password cannot be in Chinese', 'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password', 'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese', 'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password', 'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password', 'Please select the datasource': 'Please select the datasource', 'Please select resources': 'Please select resources', Query: 'Query', 'Non Query': 'Non Query', 'prop(required)': 'prop(required)', 'value(optional)': 'value(optional)', 'value(required)': 'value(required)', 'prop is empty': 'prop is empty', 'value is empty': 'value is empty', 'prop is repeat': 'prop is repeat', 'Start Time': 'Start Time', 'End Time': 'End Time', crontab: 'crontab', 'Failure Strategy': 'Failure Strategy', online: 'online', offline: 'offline', 'Task Status': 'Task Status', 'Process Instance': 'Process Instance', 'Task Instance': 'Task Instance', 'Select date range': 'Select date range', startDate: 'startDate', endDate: 'endDate', Date: 'Date', Waiting: 'Waiting', Execution: 'Execution', Finish: 'Finish', 'Create File': 'Create File', 'Create folder': 'Create folder', 'File Name': 'File Name', 'Folder Name': 'Folder Name', 'File Format': 'File Format', 'Folder Format': 'Folder Format', 'File Content': 'File Content', 'Upload File Size': 'Upload File size cannot exceed 1g', Create: 'Create', 'Please enter the resource content': 'Please enter the resource content', 'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines', 'File Details': 'File Details', 'Download Details': 'Download Details', Return: 'Return', Save: 'Save', 'File Manage': 'File Manage', 'Upload Files': 'Upload Files', 'Create UDF Function': 'Create UDF Function', 'Upload UDF Resources': 'Upload UDF Resources', 'Service-Master': 'Service-Master', 'Service-Worker': 'Service-Worker', 'Process Name': 'Process Name', Executor: 'Executor', 'Run Type': 'Run Type', 'Scheduling Time': 'Scheduling Time', 'Run Times': 'Run Times', host: 'host', 'fault-tolerant sign': 'fault-tolerant sign', Rerun: 'Rerun', 'Recovery Failed': 'Recovery Failed', Stop: 'Stop', Pause: 'Pause', 'Recovery Suspend': 'Recovery Suspend', Gantt: 'Gantt', 'Node Type': 'Node Type', 'Submit Time': 'Submit Time', Duration: 'Duration', 'Retry Count': 'Retry Count', 'Task Name': 'Task Name', 'Task Date': 'Task Date', 'Source Table': 'Source Table', 'Record Number': 'Record Number', 'Target Table': 'Target Table', 'Online viewing type is not supported': 'Online viewing type is not supported', Size: 'Size', Rename: 'Rename', Download: 'Download', Export: 'Export', 'Version Info': 'Version Info', Submit: 'Submit', 'Edit UDF Function': 'Edit UDF Function', type: 'type', 'UDF Function Name': 'UDF Function Name', FILE: 'FILE', UDF: 'UDF', 'File Subdirectory': 'File Subdirectory', 'Please enter a function name': 'Please enter a function name', 'Package Name': 'Package Name', 'Please enter a Package name': 'Please enter a Package name', Parameter: 'Parameter', 'Please enter a parameter': 'Please enter a parameter', 'UDF Resources': 'UDF Resources', 'Upload Resources': 'Upload Resources', Instructions: 'Instructions', 'Please enter a instructions': 'Please enter a instructions', 'Please enter a UDF function name': 'Please enter a UDF function name', 'Select UDF Resources': 'Select UDF Resources', 'Class Name': 'Class Name', 'Jar Package': 'Jar Package', 'Library Name': 'Library Name', 'UDF Resource Name': 'UDF Resource Name', 'File Size': 'File Size', Description: 'Description', 'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items', 'Select Line Connection': 'Select Line Connection', 'Delete selected lines or nodes': 'Delete selected lines or nodes', 'Full Screen': 'Full Screen', Unpublished: 'Unpublished', 'Start Process': 'Start Process', 'Execute from the current node': 'Execute from the current node', 'Recover tolerance fault process': 'Recover tolerance fault process', 'Resume the suspension process': 'Resume the suspension process', 'Execute from the failed nodes': 'Execute from the failed nodes', 'Complement Data': 'Complement Data', 'Scheduling execution': 'Scheduling execution', 'Recovery waiting thread': 'Recovery waiting thread', 'Submitted successfully': 'Submitted successfully', Executing: 'Executing', 'Ready to pause': 'Ready to pause', 'Ready to stop': 'Ready to stop', 'Need fault tolerance': 'Need fault tolerance', Kill: 'Kill', 'Waiting for thread': 'Waiting for thread', 'Waiting for dependence': 'Waiting for dependence', Start: 'Start', Copy: 'Copy', 'Copy name': 'Copy name', 'Copy path': 'Copy path', 'Please enter keyword': 'Please enter keyword', 'File Upload': 'File Upload', 'Drag the file into the current upload window': 'Drag the file into the current upload window', 'Drag area upload': 'Drag area upload', Upload: 'Upload', 'ReUpload File': 'ReUpload File', 'Please enter file name': 'Please enter file name', 'Please select the file to upload': 'Please select the file to upload', 'Resources manage': 'Resources', Security: 'Security', Logout: 'Logout', 'No data': 'No data', 'Uploading...': 'Uploading...', 'Loading...': 'Loading...', List: 'List', 'Unable to download without proper url': 'Unable to download without proper url', Process: 'Process', 'Process definition': 'Process definition', 'Task record': 'Task record', 'Warning group manage': 'Warning group manage', 'Warning instance manage': 'Warning instance manage', 'Servers manage': 'Servers manage', 'UDF manage': 'UDF manage', 'Resource manage': 'Resource manage', 'Function manage': 'Function manage', 'Edit password': 'Edit password', 'Ordinary users': 'Ordinary users', 'Create process': 'Create process', 'Import process': 'Import process', 'Timing state': 'Timing state', Timing: 'Timing', TreeView: 'TreeView', 'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat', 'Mailbox input is illegal': 'Mailbox input is illegal', 'Please set the parameters before starting': 'Please set the parameters before starting', Continue: 'Continue', End: 'End', 'Node execution': 'Node execution', 'Backward execution': 'Backward execution', 'Forward execution': 'Forward execution', 'Execute only the current node': 'Execute only the current node', 'Notification strategy': 'Notification strategy', 'Notification group': 'Notification group', 'Please select a notification group': 'Please select a notification group', receivers: 'receivers', receiverCcs: 'receiverCcs', 'Whether it is a complement process?': 'Whether it is a complement process?', 'Schedule date': 'Schedule date', 'Mode of execution': 'Mode of execution', 'Serial execution': 'Serial execution', 'Parallel execution': 'Parallel execution', 'Set parameters before timing': 'Set parameters before timing', 'Start and stop time': 'Start and stop time', 'Please select time': 'Please select time', 'Please enter crontab': 'Please enter crontab', none_1: 'none', success_1: 'success', failure_1: 'failure', All_1: 'All', Toolbar: 'Toolbar', 'View variables': 'View variables', 'Format DAG': 'Format DAG', 'Refresh DAG status': 'Refresh DAG status', Return_1: 'Return', 'Please enter format': 'Please enter format', 'connection parameter': 'connection parameter', 'Process definition details': 'Process definition details', 'Create process definition': 'Create process definition', 'Scheduled task list': 'Scheduled task list', 'Process instance details': 'Process instance details', 'Create Resource': 'Create Resource', 'User Center': 'User Center', 'Please enter method': 'Please enter method', None: 'None', Name: 'Name', 'Process priority': 'Process priority', 'Task priority': 'Task priority', 'Task timeout alarm': 'Task timeout alarm', 'Timeout strategy': 'Timeout strategy', 'Timeout alarm': 'Timeout alarm', 'Timeout failure': 'Timeout failure', 'Timeout period': 'Timeout period', 'Waiting Dependent complete': 'Waiting Dependent complete', 'Waiting Dependent start': 'Waiting Dependent start', 'Check interval': 'Check interval', 'Timeout must be longer than check interval': 'Timeout must be longer than check interval', 'Timeout strategy must be selected': 'Timeout strategy must be selected', 'Timeout must be a positive integer': 'Timeout must be a positive integer', 'Add dependency': 'Add dependency', and: 'and', or: 'or', month: 'month', week: 'week', day: 'day', hour: 'hour', Running: 'Running', 'Waiting for dependency to complete': 'Waiting for dependency to complete', Selected: 'Selected', CurrentHour: 'CurrentHour', Last1Hour: 'Last1Hour', Last2Hours: 'Last2Hours', Last3Hours: 'Last3Hours', Last24Hours: 'Last24Hours', today: 'today', Last1Days: 'Last1Days', Last2Days: 'Last2Days', Last3Days: 'Last3Days', Last7Days: 'Last7Days', ThisWeek: 'ThisWeek', LastWeek: 'LastWeek', LastMonday: 'LastMonday', LastTuesday: 'LastTuesday', LastWednesday: 'LastWednesday', LastThursday: 'LastThursday', LastFriday: 'LastFriday', LastSaturday: 'LastSaturday', LastSunday: 'LastSunday', ThisMonth: 'ThisMonth', LastMonth: 'LastMonth', LastMonthBegin: 'LastMonthBegin', LastMonthEnd: 'LastMonthEnd', 'Refresh status succeeded': 'Refresh status succeeded', 'Queue manage': 'Yarn Queue manage', 'Create queue': 'Create queue', 'Edit queue': 'Edit queue', 'Datasource manage': 'Datasource', 'History task record': 'History task record', 'Please go online': 'Please go online', 'Queue value': 'Queue value', 'Please enter queue value': 'Please enter queue value', 'Worker group manage': 'Worker group manage', 'Create worker group': 'Create worker group', 'Edit worker group': 'Edit worker group', 'Token manage': 'Token manage', 'Create token': 'Create token', 'Edit token': 'Edit token', Addresses: 'Addresses', 'Worker Addresses': 'Worker Addresses', 'Please select the worker addresses': 'Please select the worker addresses', 'Failure time': 'Failure time', 'Expiration time': 'Expiration time', User: 'User', 'Please enter token': 'Please enter token', 'Generate token': 'Generate token', Monitor: 'Monitor', Group: 'Group', 'Queue statistics': 'Queue statistics', 'Command status statistics': 'Command status statistics', 'Task kill': 'Task Kill', 'Task queue': 'Task queue', 'Error command count': 'Error command count', 'Normal command count': 'Normal command count', Manage: ' Manage', 'Number of connections': 'Number of connections', Sent: 'Sent', Received: 'Received', 'Min latency': 'Min latency', 'Avg latency': 'Avg latency', 'Max latency': 'Max latency', 'Node count': 'Node count', 'Query time': 'Query time', 'Node self-test status': 'Node self-test status', 'Health status': 'Health status', 'Max connections': 'Max connections', 'Threads connections': 'Threads connections', 'Max used connections': 'Max used connections', 'Threads running connections': 'Threads running connections', 'Worker group': 'Worker group', 'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0', 'Pre Statement': 'Pre Statement', 'Post Statement': 'Post Statement', 'Statement cannot be empty': 'Statement cannot be empty', 'Process Define Count': 'Work flow Define Count', 'Process Instance Running Count': 'Process Instance Running Count', 'command number of waiting for running': 'command number of waiting for running', 'failure command number': 'failure command number', 'tasks number of waiting running': 'tasks number of waiting running', 'task number of ready to kill': 'task number of ready to kill', 'Statistics manage': 'Statistics Manage', statistics: 'Statistics', 'select tenant': 'select tenant', 'Please enter Principal': 'Please enter Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path', 'The start time must not be the same as the end': 'The start time must not be the same as the end', 'Startup parameter': 'Startup parameter', 'Startup type': 'Startup type', 'warning of timeout': 'warning of timeout', 'Next five execution times': 'Next five execution times', 'Execute time': 'Execute time', 'Complement range': 'Complement range', 'Http Url': 'Http Url', 'Http Method': 'Http Method', 'Http Parameters': 'Http Parameters', 'Http Parameters Key': 'Http Parameters Key', 'Http Parameters Position': 'Http Parameters Position', 'Http Parameters Value': 'Http Parameters Value', 'Http Check Condition': 'Http Check Condition', 'Http Condition': 'Http Condition', 'Please Enter Http Url': 'Please Enter Http Url(required)', 'Please Enter Http Condition': 'Please Enter Http Condition', 'There is no data for this period of time': 'There is no data for this period of time', 'Worker addresses cannot be empty': 'Worker addresses cannot be empty', 'Please generate token': 'Please generate token', 'Spark Version': 'Spark Version', TargetDataBase: 'target database', TargetTable: 'target table', 'Please enter the table of target': 'Please enter the table of target', 'Please enter a Target Table(required)': 'Please enter a Target Table(required)', SpeedByte: 'speed(byte count)', SpeedRecord: 'speed(record count)', '0 means unlimited by byte': '0 means unlimited', '0 means unlimited by count': '0 means unlimited', 'Modify User': 'Modify User', 'Whether directory': 'Whether directory', Yes: 'Yes', No: 'No', 'Hadoop Custom Params': 'Hadoop Params', 'Sqoop Advanced Parameters': 'Sqoop Params', 'Sqoop Job Name': 'Job Name', 'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)', 'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)', 'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)', 'Please enter Target Dir(required)': 'Please enter Target Dir(required)', 'Please enter Export Dir(required)': 'Please enter Export Dir(required)', 'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)', 'Please enter Hive Table(required)': 'Please enter Hive Table(required)', 'Please enter Hive Partition Keys': 'Please enter Hive Partition Key', 'Please enter Hive Partition Values': 'Please enter Partition Value', 'Please enter Replace Delimiter': 'Please enter Replace Delimiter', 'Please enter Fields Terminated': 'Please enter Fields Terminated', 'Please enter Lines Terminated': 'Please enter Lines Terminated', 'Please enter Concurrency': 'Please enter Concurrency', 'Please enter Update Key': 'Please enter Update Key', 'Please enter Job Name(required)': 'Please enter Job Name(required)', 'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)', Direct: 'Direct', Type: 'Type', ModelType: 'ModelType', ColumnType: 'ColumnType', Database: 'Database', Column: 'Column', 'Map Column Hive': 'Map Column Hive', 'Map Column Java': 'Map Column Java', 'Export Dir': 'Export Dir', 'Hive partition Keys': 'Hive partition Keys', 'Hive partition Values': 'Hive partition Values', FieldsTerminated: 'FieldsTerminated', LinesTerminated: 'LinesTerminated', IsUpdate: 'IsUpdate', UpdateKey: 'UpdateKey', UpdateMode: 'UpdateMode', 'Target Dir': 'Target Dir', DeleteTargetDir: 'DeleteTargetDir', FileType: 'FileType', CompressionCodec: 'CompressionCodec', CreateHiveTable: 'CreateHiveTable', DropDelimiter: 'DropDelimiter', OverWriteSrc: 'OverWriteSrc', ReplaceDelimiter: 'ReplaceDelimiter', Concurrency: 'Concurrency', Form: 'Form', OnlyUpdate: 'OnlyUpdate', AllowInsert: 'AllowInsert', 'Data Source': 'Data Source', 'Data Target': 'Data Target', 'All Columns': 'All Columns', 'Some Columns': 'Some Columns', 'Branch flow': 'Branch flow', 'Custom Job': 'Custom Job', 'Custom Script': 'Custom Script', 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow', 'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required', 'No resources exist': 'No resources exist', 'Please delete all non-existing resources': 'Please delete all non-existing resources', 'Unauthorized or deleted resources': 'Unauthorized or deleted resources', 'Please delete all non-existent resources': 'Please delete all non-existent resources', Kinship: 'Workflow relationship', Reset: 'Reset', KinshipStateActive: 'Active', KinshipState1: 'Online', KinshipState0: 'Workflow is not online', KinshipState10: 'Scheduling is not online', 'Dag label display control': 'Dag label display control', Enable: 'Enable', Disable: 'Disable', 'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!', 'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading', 'User name length is between 3 and 39': 'User name length is between 3 and 39', 'Timeout Settings': 'Timeout Settings', 'Connect Timeout': 'Connect Timeout', 'Socket Timeout': 'Socket Timeout', 'Connect timeout be a positive integer': 'Connect timeout be a positive integer', 'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer', ms: 'ms', 'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': 'Please select the waterdrop resources', zkDirectory: 'zkDirectory', 'Directory detail': 'Directory detail', 'Connection name': 'Connection name', 'Current connection settings': 'Current connection settings', 'Please save the DAG before formatting': 'Please save the DAG before formatting', 'Batch copy': 'Batch copy', 'Related items': 'Related items', 'Project name is required': 'Project name is required', 'Batch move': 'Batch move', Version: 'Version', 'Pre tasks': 'Pre tasks', 'Running Memory': 'Running Memory', 'Max Memory': 'Max Memory', 'Min Memory': 'Min Memory', 'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate', Info: 'Info', 'Datasource userName': 'owner', 'Resource userName': 'owner' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,093
[Feature][server] Support for stored procedures and stored function calls and data source supports DB2
**Describe the feature** 1.Support for stored procedures and stored function calls 2. Task node page data source parameters can support the selection of a variety of data sources, rather than currently only limited to mysql, Oracle and other data sources 3. When the calling script is filled in on the page, there will be subtle differences between the calling scripts corresponding to various data sources **Is your feature request related to a problem? Please describe.** 1. Fixed Issues #3990 , data source supports DB2 **Describe the solution you'd like** 1.Support for stored procedures and stored function calls ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2. Task node page selection can support multiple types of data sources, instead of only configuring several data sources like mysql and Oracle 3. Call the script to fill in the page and verify the matching of the number of parameters --- **描述特征** 1.支持存储过程和存储函数调用 2.任务节点页面数据源参数可以支持选择多种类型数据源,而不是现在只限制mysql,oracle等几个数据源 3.调用脚本在页面填写,各个数据源对应的调用脚本会存在细微区别 **您的特性请求与问题有关吗?请描述** 1.修复issues #3990 ,数据源支持DB2 **描述你想要的解决方案** 1.支持存储过程和存储函数调用 ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2.任务节点页面选择可以支持多种类型数据源,而不是现在只配置mysql,oracle等几个数据源 3.调用脚本在页面填写,做参数个数匹配验证
https://github.com/apache/dolphinscheduler/issues/4093
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-11-20T11:10:34Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': '用户名', 'Please enter user name': '请输入用户名', Password: '密码', 'Please enter your password': '请输入密码', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间', Login: '登录', Home: '首页', 'Failed to create node to save': '未创建节点保存失败', 'Global parameters': '全局参数', 'Local parameters': '局部参数', 'Copy success': '复制成功', 'The browser does not support automatic copying': '该浏览器不支持自动复制', 'Whether to save the DAG graph': '是否保存DAG图', 'Current node settings': '当前节点设置', 'View history': '查看历史', 'View log': '查看日志', 'Force success': '强制成功', 'Enter this child node': '进入该子节点', 'Node name': '节点名称', 'Please enter name (required)': '请输入名称(必填)', 'Run flag': '运行标志', Normal: '正常', 'Prohibition execution': '禁止执行', 'Please enter description': '请输入描述', 'Number of failed retries': '失败重试次数', Times: '次', 'Failed retry interval': '失败重试间隔', Minute: '分', 'Delay execution time': '延时执行时间', 'Delay execution': '延时执行', 'Forced success': '强制成功过', Cancel: '取消', 'Confirm add': '确认添加', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流', 'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流', 'Name already exists': '名称已存在请重新输入', 'Download Log': '下载日志', 'Refresh Log': '刷新日志', 'Enter full screen': '进入全屏', 'Cancel full screen': '取消全屏', Close: '关闭', 'Update log success': '更新日志成功', 'No more logs': '暂无更多日志', 'No log': '暂无日志', 'Loading Log...': '正在努力请求日志中...', 'Set the DAG diagram name': '设置DAG图名称', 'Please enter description(optional)': '请输入描述(选填)', 'Set global': '设置全局', 'Whether to go online the process definition': '是否上线流程定义', 'Whether to update the process definition': '是否更新流程定义', Add: '添加', 'DAG graph name cannot be empty': 'DAG图名称不能为空', 'Create Datasource': '创建数据源', 'Project Home': '项目首页', 'Project Manage': '项目管理', 'Create Project': '创建项目', 'Cron Manage': '定时管理', 'Copy Workflow': '复制工作流', 'Tenant Manage': '租户管理', 'Create Tenant': '创建租户', 'User Manage': '用户管理', 'Create User': '创建用户', 'User Information': '用户信息', 'Edit Password': '密码修改', Success: '成功', Failed: '失败', Delete: '删除', 'Please choose': '请选择', 'Please enter a positive integer': '请输入正整数', 'Program Type': '程序类型', 'Main Class': '主函数的Class', 'Main Jar Package': '主Jar包', 'Please enter main jar package': '请选择主Jar包', 'Please enter main class': '请填写主函数的Class', 'Main Arguments': '主程序参数', 'Please enter main arguments': '请输入主程序参数', 'Option Parameters': '选项参数', 'Please enter option parameters': '请输入选项参数', Resources: '资源', 'Custom Parameters': '自定义参数', 'Custom template': '自定义模版', Datasource: '数据源', methods: '方法', 'Please enter method(optional)': '请输入方法(选填)', Script: '脚本', 'Please enter script(required)': '请输入脚本(必填)', 'Deploy Mode': '部署方式', 'Driver Cores': 'Driver核心数', 'Please enter Driver cores': '请输入Driver核心数', 'Driver Memory': 'Driver内存数', 'Please enter Driver memory': '请输入Driver内存数', 'Executor Number': 'Executor数量', 'Please enter Executor number': '请输入Executor数量', 'The Executor number should be a positive integer': 'Executor数量为正整数', 'Executor Memory': 'Executor内存数', 'Please enter Executor memory': '请输入Executor内存数', 'Executor Cores': 'Executor核心数', 'Please enter Executor cores': '请输入Executor核心数', 'Memory should be a positive integer': '内存数为数字', 'Core number should be positive integer': '核心数为正整数', 'Flink Version': 'Flink版本', 'JobManager Memory': 'JobManager内存数', 'Please enter JobManager memory': '请输入JobManager内存数', 'TaskManager Memory': 'TaskManager内存数', 'Please enter TaskManager memory': '请输入TaskManager内存数', 'Slot Number': 'Slot数量', 'Please enter Slot number': '请输入Slot数量', Parallelism: '并行度', 'Please enter Parallelism': '请输入并行度', 'TaskManager Number': 'TaskManager数量', 'Please enter TaskManager number': '请输入TaskManager数量', 'App Name': '任务名称', 'Please enter app name(optional)': '请输入任务名称(选填)', 'SQL Type': 'sql类型', 'Send Email': '发送邮件', 'Log display': '日志显示', 'rows of result': '行查询结果', Title: '主题', 'Please enter the title of email': '请输入邮件主题', Table: '表名', TableMode: '表格', Attachment: '附件', 'SQL Parameter': 'sql参数', 'SQL Statement': 'sql语句', 'UDF Function': 'UDF函数', 'Please enter a SQL Statement(required)': '请输入sql语句(必填)', 'Please enter a JSON Statement(required)': '请输入json语句(必填)', 'One form or attachment must be selected': '表格、附件必须勾选一个', 'Mail subject required': '邮件主题必填', 'Child Node': '子节点', 'Please select a sub-Process': '请选择子工作流', Edit: '编辑', 'Switch To This Version': '切换到该版本', 'Datasource Name': '数据源名称', 'Please enter datasource name': '请输入数据源名称', IP: 'IP主机名', 'Please enter IP': '请输入IP主机名', Port: '端口', 'Please enter port': '请输入端口', 'Database Name': '数据库名', 'Please enter database name': '请输入数据库名', 'Oracle Connect Type': '服务名或SID', 'Oracle Service Name': '服务名', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc连接参数', 'Test Connect': '测试连接', 'Please enter resource name': '请输入数据源名称', 'Please enter resource folder name': '请输入资源文件夹名称', 'Please enter a non-query SQL statement': '请输入非查询sql语句', 'Please enter IP/hostname': '请输入IP/主机名', 'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式', '#': '编号', 'Datasource Type': '数据源类型', 'Datasource Parameter': '数据源参数', 'Create Time': '创建时间', 'Update Time': '更新时间', Operation: '操作', 'Current Version': '当前版本', 'Click to view': '点击查看', 'Delete?': '确定删除吗?', 'Switch Version Successfully': '切换版本成功', 'Confirm Switch To This Version?': '确定切换到该版本吗?', Confirm: '确定', 'Task status statistics': '任务状态统计', Number: '数量', State: '状态', 'Process Status Statistics': '流程状态统计', 'Process Definition Statistics': '流程定义统计', 'Project Name': '项目名称', 'Please enter name': '请输入名称', 'Owned Users': '所属用户', 'Process Pid': '进程Pid', 'Zk registration directory': 'zk注册目录', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': '最后心跳时间', 'Edit Tenant': '编辑租户', 'OS Tenant Code': '操作系统租户', 'Tenant Name': '租户名称', Queue: '队列', 'Please select a queue': '默认为租户关联队列', 'Please enter the os tenant code in English': '请输入操作系统租户只允许英文', 'Please enter os tenant code in English': '请输入英文操作系统租户', 'Please enter os tenant code': '请输入操作系统租户', 'Please enter tenant Name': '请输入租户名称', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': '操作系统租户只允许字母或字母与数字组合', 'Edit User': '编辑用户', Tenant: '租户', Email: '邮件', Phone: '手机', 'User Type': '用户类型', 'Please enter phone number': '请输入手机', 'Please enter email': '请输入邮箱', 'Please enter the correct email format': '请输入正确的邮箱格式', 'Please enter the correct mobile phone format': '请输入正确的手机格式', Project: '项目', Authorize: '授权', 'File resources': '文件资源', 'UDF resources': 'UDF资源', 'UDF resources directory': 'UDF资源目录', 'Please select UDF resources directory': '请选择UDF资源目录', 'Alarm group': '告警组', 'Alarm group required': '告警组必填', 'Edit alarm group': '编辑告警组', 'Create alarm group': '创建告警组', 'Create Alarm Instance': '创建告警实例', 'Edit Alarm Instance': '编辑告警实例', 'Group Name': '组名称', 'Alarm instance name': '告警实例名称', 'Alarm plugin name': '告警插件名称', 'Select plugin': '选择插件', 'Please enter group name': '请输入组名称', 'Instance parameter exception': '实例参数异常', 'Group Type': '组类型', 'Alarm plugin instance': '告警插件实例', Remarks: '备注', SMS: '短信', 'Managing Users': '管理用户', Permission: '权限', Administrator: '管理员', 'Confirm Password': '确认密码', 'Please enter confirm password': '请输入确认密码', 'Password cannot be in Chinese': '密码不能为中文', 'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码', 'Confirmation password cannot be in Chinese': '确认密码不能为中文', 'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码', 'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认', 'Please select the datasource': '请选择数据源', 'Please select resources': '请选择资源', Query: '查询', 'Non Query': '非查询', 'prop(required)': 'prop(必填)', 'value(optional)': 'value(选填)', 'value(required)': 'value(必填)', 'prop is empty': 'prop不能为空', 'value is empty': 'value不能为空', 'prop is repeat': 'prop中有重复', 'Start Time': '开始时间', 'End Time': '结束时间', crontab: 'crontab', 'Failure Strategy': '失败策略', online: '上线', offline: '下线', 'Task Status': '任务状态', 'Process Instance': '工作流实例', 'Task Instance': '任务实例', 'Select date range': '选择日期区间', startDate: '开始日期', endDate: '结束日期', Date: '日期', Waiting: '等待', Execution: '执行中', Finish: '完成', 'Create File': '创建文件', 'Create folder': '创建文件夹', 'File Name': '文件名称', 'Folder Name': '文件夹名称', 'File Format': '文件格式', 'Folder Format': '文件夹格式', 'File Content': '文件内容', 'Upload File Size': '文件大小不能超过1G', Create: '创建', 'Please enter the resource content': '请输入资源内容', 'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行', 'File Details': '文件详情', 'Download Details': '下载详情', Return: '返回', Save: '保存', 'File Manage': '文件管理', 'Upload Files': '上传文件', 'Create UDF Function': '创建UDF函数', 'Upload UDF Resources': '上传UDF资源', 'Service-Master': '服务管理-Master', 'Service-Worker': '服务管理-Worker', 'Process Name': '工作流名称', Executor: '执行用户', 'Run Type': '运行类型', 'Scheduling Time': '调度时间', 'Run Times': '运行次数', host: 'host', 'fault-tolerant sign': '容错标识', Rerun: '重跑', 'Recovery Failed': '恢复失败', Stop: '停止', Pause: '暂停', 'Recovery Suspend': '恢复运行', Gantt: '甘特图', 'Node Type': '节点类型', 'Submit Time': '提交时间', Duration: '运行时长', 'Retry Count': '重试次数', 'Task Name': '任务名称', 'Task Date': '任务日期', 'Source Table': '源表', 'Record Number': '记录数', 'Target Table': '目标表', 'Online viewing type is not supported': '不支持在线查看类型', Size: '大小', Rename: '重命名', Download: '下载', Export: '导出', 'Version Info': '版本信息', Submit: '提交', 'Edit UDF Function': '编辑UDF函数', type: '类型', 'UDF Function Name': 'UDF函数名称', FILE: '文件', UDF: 'UDF', 'File Subdirectory': '文件子目录', 'Please enter a function name': '请输入函数名', 'Package Name': '包名类名', 'Please enter a Package name': '请输入包名类名', Parameter: '参数', 'Please enter a parameter': '请输入参数', 'UDF Resources': 'UDF资源', 'Upload Resources': '上传资源', Instructions: '使用说明', 'Please enter a instructions': '请输入使用说明', 'Please enter a UDF function name': '请输入UDF函数名称', 'Select UDF Resources': '请选择UDF资源', 'Class Name': '类名', 'Jar Package': 'jar包', 'Library Name': '库名', 'UDF Resource Name': 'UDF资源名称', 'File Size': '文件大小', Description: '描述', 'Drag Nodes and Selected Items': '拖动节点和选中项', 'Select Line Connection': '选择线条连接', 'Delete selected lines or nodes': '删除选中的线或节点', 'Full Screen': '全屏', Unpublished: '未发布', 'Start Process': '启动工作流', 'Execute from the current node': '从当前节点开始执行', 'Recover tolerance fault process': '恢复被容错的工作流', 'Resume the suspension process': '恢复运行流程', 'Execute from the failed nodes': '从失败节点开始执行', 'Complement Data': '补数', 'Scheduling execution': '调度执行', 'Recovery waiting thread': '恢复等待线程', 'Submitted successfully': '提交成功', Executing: '正在执行', 'Ready to pause': '准备暂停', 'Ready to stop': '准备停止', 'Need fault tolerance': '需要容错', Kill: 'Kill', 'Waiting for thread': '等待线程', 'Waiting for dependence': '等待依赖', Start: '运行', Copy: '复制节点', 'Copy name': '复制名称', 'Copy path': '复制路径', 'Please enter keyword': '请输入关键词', 'File Upload': '文件上传', 'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!', 'Drag area upload': '拖动区域上传', Upload: '上传', 'ReUpload File': '重新上传文件', 'Please enter file name': '请输入文件名', 'Please select the file to upload': '请选择要上传的文件', 'Resources manage': '资源中心', Security: '安全中心', Logout: '退出', 'No data': '查询无数据', 'Uploading...': '文件上传中', 'Loading...': '正在努力加载中...', List: '列表', 'Unable to download without proper url': '无下载url无法下载', Process: '工作流', 'Process definition': '工作流定义', 'Task record': '任务记录', 'Warning group manage': '告警组管理', 'Warning instance manage': '告警实例管理', 'Servers manage': '服务管理', 'UDF manage': 'UDF管理', 'Resource manage': '资源管理', 'Function manage': '函数管理', 'Edit password': '修改密码', 'Ordinary users': '普通用户', 'Create process': '创建工作流', 'Import process': '导入工作流', 'Timing state': '定时状态', Timing: '定时', TreeView: '树形图', 'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复', 'Mailbox input is illegal': '邮箱输入不合法', 'Please set the parameters before starting': '启动前请先设置参数', Continue: '继续', End: '结束', 'Node execution': '节点执行', 'Backward execution': '向后执行', 'Forward execution': '向前执行', 'Execute only the current node': '仅执行当前节点', 'Notification strategy': '通知策略', 'Notification group': '通知组', 'Please select a notification group': '请选择通知组', receivers: '收件人', receiverCcs: '抄送人', 'Whether it is a complement process?': '是否补数', 'Schedule date': '调度日期', 'Mode of execution': '执行方式', 'Serial execution': '串行执行', 'Parallel execution': '并行执行', 'Set parameters before timing': '定时前请先设置参数', 'Start and stop time': '起止时间', 'Please select time': '请选择时间', 'Please enter crontab': '请输入crontab', none_1: '都不发', success_1: '成功发', failure_1: '失败发', All_1: '成功或失败都发', Toolbar: '工具栏', 'View variables': '查看变量', 'Format DAG': '格式化DAG', 'Refresh DAG status': '刷新DAG状态', Return_1: '返回上一节点', 'Please enter format': '请输入格式为', 'connection parameter': '连接参数', 'Process definition details': '流程定义详情', 'Create process definition': '创建流程定义', 'Scheduled task list': '定时任务列表', 'Process instance details': '流程实例详情', 'Create Resource': '创建资源', 'User Center': '用户中心', 'Please enter method': '请输入方法', None: '无', Name: '名称', 'Process priority': '流程优先级', 'Task priority': '任务优先级', 'Task timeout alarm': '任务超时告警', 'Timeout strategy': '超时策略', 'Timeout alarm': '超时告警', 'Timeout failure': '超时失败', 'Timeout period': '超时时长', 'Waiting Dependent complete': '等待依赖完成', 'Waiting Dependent start': '等待依赖启动', 'Check interval': '检查间隔', 'Timeout must be longer than check interval': '超时时间必须比检查间隔长', 'Timeout strategy must be selected': '超时策略必须选一个', 'Timeout must be a positive integer': '超时时长必须为正整数', 'Add dependency': '添加依赖', and: '且', or: '或', month: '月', week: '周', day: '日', hour: '时', Running: '正在运行', 'Waiting for dependency to complete': '等待依赖完成', Selected: '已选', CurrentHour: '当前小时', Last1Hour: '前1小时', Last2Hours: '前2小时', Last3Hours: '前3小时', Last24Hours: '前24小时', today: '今天', Last1Days: '昨天', Last2Days: '前两天', Last3Days: '前三天', Last7Days: '前七天', ThisWeek: '本周', LastWeek: '上周', LastMonday: '上周一', LastTuesday: '上周二', LastWednesday: '上周三', LastThursday: '上周四', LastFriday: '上周五', LastSaturday: '上周六', LastSunday: '上周日', ThisMonth: '本月', LastMonth: '上月', LastMonthBegin: '上月初', LastMonthEnd: '上月末', 'Refresh status succeeded': '刷新状态成功', 'Queue manage': 'Yarn 队列管理', 'Create queue': '创建队列', 'Edit queue': '编辑队列', 'Datasource manage': '数据源中心', 'History task record': '历史任务记录', 'Please go online': '不要忘记上线', 'Queue value': '队列值', 'Please enter queue value': '请输入队列值', 'Worker group manage': 'Worker分组管理', 'Create worker group': '创建Worker分组', 'Edit worker group': '编辑Worker分组', 'Token manage': '令牌管理', 'Create token': '创建令牌', 'Edit token': '编辑令牌', Addresses: '地址', 'Worker Addresses': 'Worker地址', 'Please select the worker addresses': '请选择Worker地址', 'Failure time': '失效时间', 'Expiration time': '失效时间', User: '用户', 'Please enter token': '请输入令牌', 'Generate token': '生成令牌', Monitor: '监控中心', Group: '分组', 'Queue statistics': '队列统计', 'Command status statistics': '命令状态统计', 'Task kill': '等待kill任务', 'Task queue': '等待执行任务', 'Error command count': '错误指令数', 'Normal command count': '正确指令数', Manage: '管理', 'Number of connections': '连接数', Sent: '发送量', Received: '接收量', 'Min latency': '最低延时', 'Avg latency': '平均延时', 'Max latency': '最大延时', 'Node count': '节点数', 'Query time': '当前查询时间', 'Node self-test status': '节点自检状态', 'Health status': '健康状态', 'Max connections': '最大连接数', 'Threads connections': '当前连接数', 'Max used connections': '同时使用连接最大数', 'Threads running connections': '数据库当前活跃连接数', 'Worker group': 'Worker分组', 'Please enter a positive integer greater than 0': '请输入大于 0 的正整数', 'Pre Statement': '前置sql', 'Post Statement': '后置sql', 'Statement cannot be empty': '语句不能为空', 'Process Define Count': '工作流定义数', 'Process Instance Running Count': '正在运行的流程数', 'command number of waiting for running': '待执行的命令数', 'failure command number': '执行失败的命令数', 'tasks number of waiting running': '待运行任务数', 'task number of ready to kill': '待杀死任务数', 'Statistics manage': '统计管理', statistics: '统计', 'select tenant': '选择租户', 'Please enter Principal': '请输入Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': '请输入kerberos认证参数 java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': '请输入kerberos认证参数 login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': '请输入kerberos认证参数 login.user.keytab.path', 'The start time must not be the same as the end': '开始时间和结束时间不能相同', 'Startup parameter': '启动参数', 'Startup type': '启动类型', 'warning of timeout': '超时告警', 'Next five execution times': '接下来五次执行时间', 'Execute time': '执行时间', 'Complement range': '补数范围', 'Http Url': '请求地址', 'Http Method': '请求类型', 'Http Parameters': '请求参数', 'Http Parameters Key': '参数名', 'Http Parameters Position': '参数位置', 'Http Parameters Value': '参数值', 'Http Check Condition': '校验条件', 'Http Condition': '校验内容', 'Please Enter Http Url': '请填写请求地址(必填)', 'Please Enter Http Condition': '请填写校验内容', 'There is no data for this period of time': '该时间段无数据', 'Worker addresses cannot be empty': 'Worker地址不能为空', 'Please generate token': '请生成Token', 'Spark Version': 'Spark版本', TargetDataBase: '目标库', TargetTable: '目标表', 'Please enter the table of target': '请输入目标表名', 'Please enter a Target Table(required)': '请输入目标表(必填)', SpeedByte: '限流(字节数)', SpeedRecord: '限流(记录数)', '0 means unlimited by byte': 'KB,0代表不限制', '0 means unlimited by count': '0代表不限制', 'Modify User': '修改用户', 'Whether directory': '是否文件夹', Yes: '是', No: '否', 'Hadoop Custom Params': 'Hadoop参数', 'Sqoop Advanced Parameters': 'Sqoop参数', 'Sqoop Job Name': '任务名称', 'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)', 'Please enter Mysql Table(required)': '请输入Mysql表名(必填)', 'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开', 'Please enter Target Dir(required)': '请输入目标路径(必填)', 'Please enter Export Dir(required)': '请输入数据源路径(必填)', 'Please enter Hive Database(required)': '请输入Hive数据库(必填)', 'Please enter Hive Table(required)': '请输入Hive表名(必填)', 'Please enter Hive Partition Keys': '请输入分区键', 'Please enter Hive Partition Values': '请输入分区值', 'Please enter Replace Delimiter': '请输入替换分隔符', 'Please enter Fields Terminated': '请输入列分隔符', 'Please enter Lines Terminated': '请输入行分隔符', 'Please enter Concurrency': '请输入并发度', 'Please enter Update Key': '请输入更新列', 'Please enter Job Name(required)': '请输入任务名称(必填)', 'Please enter Custom Shell(required)': '请输入自定义脚本', Direct: '流向', Type: '类型', ModelType: '模式', ColumnType: '列类型', Database: '数据库', Column: '列', 'Map Column Hive': 'Hive类型映射', 'Map Column Java': 'Java类型映射', 'Export Dir': '数据源路径', 'Hive partition Keys': 'Hive 分区键', 'Hive partition Values': 'Hive 分区值', FieldsTerminated: '列分隔符', LinesTerminated: '行分隔符', IsUpdate: '是否更新', UpdateKey: '更新列', UpdateMode: '更新类型', 'Target Dir': '目标路径', DeleteTargetDir: '是否删除目录', FileType: '保存格式', CompressionCodec: '压缩类型', CreateHiveTable: '是否创建新表', DropDelimiter: '是否删除分隔符', OverWriteSrc: '是否覆盖数据源', ReplaceDelimiter: '替换分隔符', Concurrency: '并发度', Form: '表单', OnlyUpdate: '只更新', AllowInsert: '无更新便插入', 'Data Source': '数据来源', 'Data Target': '数据目的', 'All Columns': '全表导入', 'Some Columns': '选择列', 'Branch flow': '分支流转', 'Custom Job': '自定义任务', 'Custom Script': '自定义脚本', 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点', 'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填', 'No resources exist': '不存在资源', 'Please delete all non-existing resources': '请删除所有不存在资源', 'Unauthorized or deleted resources': '未授权或已删除资源', 'Please delete all non-existent resources': '请删除所有未授权或已删除资源', Kinship: '工作流关系', Reset: '重置', KinshipStateActive: '当前选择', KinshipState1: '已上线', KinshipState0: '工作流未上线', KinshipState10: '调度未上线', 'Dag label display control': 'Dag节点名称显隐', Enable: '启用', Disable: '停用', 'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!', 'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存', 'User name length is between 3 and 39': '用户名长度在3~39之间', 'Timeout Settings': '超时设置', 'Connect Timeout': '连接超时', 'Socket Timeout': 'Socket超时', 'Connect timeout be a positive integer': '连接超时必须为数字', 'Socket Timeout be a positive integer': 'Socket超时必须为数字', ms: '毫秒', 'Please Enter Url': '请直接填写地址,例如:127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': '请选择waterdrop配置文件', zkDirectory: 'zk注册目录', 'Directory detail': '查看目录详情', 'Connection name': '连线名', 'Current connection settings': '当前连线设置', 'Please save the DAG before formatting': '格式化前请先保存DAG', 'Batch copy': '批量复制', 'Related items': '关联项目', 'Project name is required': '项目名称必填', 'Batch move': '批量移动', Version: '版本', 'Pre tasks': '前置任务', 'Running Memory': '运行内存', 'Max Memory': '最大内存', 'Min Memory': '最小内存', 'The workflow canvas is abnormal and cannot be saved, please recreate': '该工作流画布异常,无法保存,请重新创建', Info: '提示', 'Datasource userName': '所属用户', 'Resource userName': '所属用户' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,093
[Feature][server] Support for stored procedures and stored function calls and data source supports DB2
**Describe the feature** 1.Support for stored procedures and stored function calls 2. Task node page data source parameters can support the selection of a variety of data sources, rather than currently only limited to mysql, Oracle and other data sources 3. When the calling script is filled in on the page, there will be subtle differences between the calling scripts corresponding to various data sources **Is your feature request related to a problem? Please describe.** 1. Fixed Issues #3990 , data source supports DB2 **Describe the solution you'd like** 1.Support for stored procedures and stored function calls ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2. Task node page selection can support multiple types of data sources, instead of only configuring several data sources like mysql and Oracle 3. Call the script to fill in the page and verify the matching of the number of parameters --- **描述特征** 1.支持存储过程和存储函数调用 2.任务节点页面数据源参数可以支持选择多种类型数据源,而不是现在只限制mysql,oracle等几个数据源 3.调用脚本在页面填写,各个数据源对应的调用脚本会存在细微区别 **您的特性请求与问题有关吗?请描述** 1.修复issues #3990 ,数据源支持DB2 **描述你想要的解决方案** 1.支持存储过程和存储函数调用 ``` {call <procedure-name>[(<arg1>,<arg2>, ...)]} {?= call <procedure-name>[(<arg1>,<arg2>, ...)]} ``` 2.任务节点页面选择可以支持多种类型数据源,而不是现在只配置mysql,oracle等几个数据源 3.调用脚本在页面填写,做参数个数匹配验证
https://github.com/apache/dolphinscheduler/issues/4093
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-11-20T11:10:34Z"
java
"2021-04-29T10:29:44Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.19.RELEASE</spring.version> <spring.boot.version>2.1.18.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.2.5</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java</include> <include>**/common/datasource/db2/Db2DatasourceProcessorTest.java</include> <include>**/common/datasource/hive/HiveDatasourceProcessorTest.java</include> <include>**/common/datasource/mysql/MysqlDatasourceProcessorTest.java</include> <include>**/common/datasource/oracle/OracleDatasourceProcessorTest.java</include> <include>**/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java</include> <include>**/common/datasource/presto/PrestoDatasourceProcessorTest.java</include> <include>**/common/datasource/spark/SparkDatasourceProcessorTest.java</include> <include>**/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java</include> <include>**/common/datasource/DatasourceUtilTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/task/sql/SqlTaskTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/DingTalkUtilsTest.java</include> <include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,990
[Feature][Procedure] Procedure task supports db2 procedure
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the feature** A clear and concise description of what the feature is. Procedure task needs to support db2 procedure **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
https://github.com/apache/dolphinscheduler/issues/3990
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-10-26T01:14:30Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.utils.EnumUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.datax.DataxTask; import org.apache.dolphinscheduler.server.worker.task.flink.FlinkTask; import org.apache.dolphinscheduler.server.worker.task.http.HttpTask; import org.apache.dolphinscheduler.server.worker.task.mr.MapReduceTask; import org.apache.dolphinscheduler.server.worker.task.processdure.ProcedureTask; import org.apache.dolphinscheduler.server.worker.task.python.PythonTask; import org.apache.dolphinscheduler.server.worker.task.shell.ShellTask; import org.apache.dolphinscheduler.server.worker.task.spark.SparkTask; import org.apache.dolphinscheduler.server.worker.task.sql.SqlTask; import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopTask; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.slf4j.Logger; /** * task manaster */ public class TaskManager { /** * create new task * @param taskExecutionContext taskExecutionContext * @param logger logger * @return AbstractTask * @throws IllegalArgumentException illegal argument exception */ public static AbstractTask newTask(TaskExecutionContext taskExecutionContext, Logger logger, AlertClientService alertClientService) throws IllegalArgumentException { TaskType anEnum = EnumUtils.getEnum(TaskType.class, taskExecutionContext.getTaskType()); if (anEnum == null) { logger.error("not support task type: {}", taskExecutionContext.getTaskType()); throw new IllegalArgumentException("not support task type"); } switch (anEnum) { case SHELL: case WATERDROP: return new ShellTask(taskExecutionContext, logger); case PROCEDURE: return new ProcedureTask(taskExecutionContext, logger); case SQL: return new SqlTask(taskExecutionContext, logger, alertClientService); case MR: return new MapReduceTask(taskExecutionContext, logger); case SPARK: return new SparkTask(taskExecutionContext, logger); case FLINK: return new FlinkTask(taskExecutionContext, logger); case PYTHON: return new PythonTask(taskExecutionContext, logger); case HTTP: return new HttpTask(taskExecutionContext, logger); case DATAX: return new DataxTask(taskExecutionContext, logger); case SQOOP: return new SqoopTask(taskExecutionContext, logger); default: logger.error("not support task type: {}", taskExecutionContext.getTaskType()); throw new IllegalArgumentException("not support task type"); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,990
[Feature][Procedure] Procedure task supports db2 procedure
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the feature** A clear and concise description of what the feature is. Procedure task needs to support db2 procedure **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
https://github.com/apache/dolphinscheduler/issues/3990
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-10-26T01:14:30Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/procedure/ProcedureTask.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,990
[Feature][Procedure] Procedure task supports db2 procedure
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the feature** A clear and concise description of what the feature is. Procedure task needs to support db2 procedure **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
https://github.com/apache/dolphinscheduler/issues/3990
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-10-26T01:14:30Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/procedure/ProcedureTaskTest.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,990
[Feature][Procedure] Procedure task supports db2 procedure
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the feature** A clear and concise description of what the feature is. Procedure task needs to support db2 procedure **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
https://github.com/apache/dolphinscheduler/issues/3990
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-10-26T01:14:30Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/procedure.vue
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ <template> <div class="procedure-model"> <m-list-box> <div slot="text">{{$t('Datasource')}}</div> <div slot="content"> <m-datasource ref="refDs" @on-dsData="_onDsData" :supportType="['MYSQL','POSTGRESQL','CLICKHOUSE', 'ORACLE', 'SQLSERVER']" :data="{ type:type,datasource:datasource }"> </m-datasource> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('methods')}}</div> <div slot="content"> <el-input type="input" size="small" :disabled="isDetails" v-model="method" :placeholder="$t('Please enter method(optional)')"> </el-input> </div> </m-list-box> <m-list-box> <div slot="text">{{$t('Custom Parameters')}}</div> <div slot="content"> <m-local-params ref="refLocalParams" @on-local-params="_onLocalParams" :udp-list="localParams"> </m-local-params> </div> </m-list-box> </div> </template> <script> import _ from 'lodash' import i18n from '@/module/i18n' import mListBox from './_source/listBox' import mDatasource from './_source/datasource' import mLocalParams from './_source/localParams' import disabledState from '@/module/mixin/disabledState' export default { name: 'procedure', data () { return { // method method: '', // Custom parameter localParams: [], // Data source type type: '', // data source datasource: '', // Return to the selected data source rtDatasource: '' } }, mixins: [disabledState], props: { backfillItem: Object }, methods: { /** * return type or datasource */ _onDsData (o) { this.type = o.type this.rtDatasource = o.datasource }, /** * return udp */ _onLocalParams (a) { }, /** * verification */ _verification () { // datasource Subcomponent verification if (!this.$refs.refDs._verifDatasource()) { return false } // Verification function if (!this.method) { this.$message.warning(`${i18n.$t('Please enter method')}`) return false } // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } // storage this.$emit('on-params', { type: this.type, datasource: this.rtDatasource, method: this.method, localParams: this.localParams }) return true } }, watch: { // Watch the cacheParams cacheParams (val) { this.$emit('on-cache-params', val) } }, computed: { cacheParams () { return { type: this.type, datasource: this.rtDatasource, method: this.method, localParams: this.localParams } } }, created () { let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.name = o.name this.desc = o.desc // backfill this.type = o.params.type || '' this.datasource = o.params.datasource || '' this.method = o.params.method || '' // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } }, mounted () { }, components: { mListBox, mDatasource, mLocalParams } } </script>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,990
[Feature][Procedure] Procedure task supports db2 procedure
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the feature** A clear and concise description of what the feature is. Procedure task needs to support db2 procedure **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
https://github.com/apache/dolphinscheduler/issues/3990
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-10-26T01:14:30Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': 'User Name', 'Please enter user name': 'Please enter user name', Password: 'Password', 'Please enter your password': 'Please enter your password', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22', Login: 'Login', Home: 'Home', 'Failed to create node to save': 'Failed to create node to save', 'Global parameters': 'Global parameters', 'Local parameters': 'Local parameters', 'Copy success': 'Copy success', 'The browser does not support automatic copying': 'The browser does not support automatic copying', 'Whether to save the DAG graph': 'Whether to save the DAG graph', 'Current node settings': 'Current node settings', 'View history': 'View history', 'View log': 'View log', 'Force success': 'Force success', 'Enter this child node': 'Enter this child node', 'Node name': 'Node name', 'Please enter name (required)': 'Please enter name (required)', 'Run flag': 'Run flag', Normal: 'Normal', 'Prohibition execution': 'Prohibition execution', 'Please enter description': 'Please enter description', 'Number of failed retries': 'Number of failed retries', Times: 'Times', 'Failed retry interval': 'Failed retry interval', Minute: 'Minute', 'Delay execution time': 'Delay execution time', 'Delay execution': 'Delay execution', 'Forced success': 'Forced success', Cancel: 'Cancel', 'Confirm add': 'Confirm add', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process', 'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process', 'Name already exists': 'Name already exists', 'Download Log': 'Download Log', 'Refresh Log': 'Refresh Log', 'Enter full screen': 'Enter full screen', 'Cancel full screen': 'Cancel full screen', Close: 'Close', 'Update log success': 'Update log success', 'No more logs': 'No more logs', 'No log': 'No log', 'Loading Log...': 'Loading Log...', 'Set the DAG diagram name': 'Set the DAG diagram name', 'Please enter description(optional)': 'Please enter description(optional)', 'Set global': 'Set global', 'Whether to go online the process definition': 'Whether to go online the process definition', 'Whether to update the process definition': 'Whether to update the process definition', Add: 'Add', 'DAG graph name cannot be empty': 'DAG graph name cannot be empty', 'Create Datasource': 'Create Datasource', 'Project Home': 'Project Home', 'Project Manage': 'Project', 'Create Project': 'Create Project', 'Cron Manage': 'Cron Manage', 'Copy Workflow': 'Copy Workflow', 'Tenant Manage': 'Tenant Manage', 'Create Tenant': 'Create Tenant', 'User Manage': 'User Manage', 'Create User': 'Create User', 'User Information': 'User Information', 'Edit Password': 'Edit Password', Success: 'Success', Failed: 'Failed', Delete: 'Delete', 'Please choose': 'Please choose', 'Please enter a positive integer': 'Please enter a positive integer', 'Program Type': 'Program Type', 'Main Class': 'Main Class', 'Main Jar Package': 'Main Jar Package', 'Please enter main jar package': 'Please enter main jar package', 'Please enter main class': 'Please enter main class', 'Main Arguments': 'Main Arguments', 'Please enter main arguments': 'Please enter main arguments', 'Option Parameters': 'Option Parameters', 'Please enter option parameters': 'Please enter option parameters', Resources: 'Resources', 'Custom Parameters': 'Custom Parameters', 'Custom template': 'Custom template', Datasource: 'Datasource', methods: 'methods', 'Please enter method(optional)': 'Please enter method(optional)', Script: 'Script', 'Please enter script(required)': 'Please enter script(required)', 'Deploy Mode': 'Deploy Mode', 'Driver Cores': 'Driver Cores', 'Please enter Driver cores': 'Please enter Driver cores', 'Driver Memory': 'Driver Memory', 'Please enter Driver memory': 'Please enter Driver memory', 'Executor Number': 'Executor Number', 'Please enter Executor number': 'Please enter Executor number', 'The Executor number should be a positive integer': 'The Executor number should be a positive integer', 'Executor Memory': 'Executor Memory', 'Please enter Executor memory': 'Please enter Executor memory', 'Executor Cores': 'Executor Cores', 'Please enter Executor cores': 'Please enter Executor cores', 'Memory should be a positive integer': 'Memory should be a positive integer', 'Core number should be positive integer': 'Core number should be positive integer', 'Flink Version': 'Flink Version', 'JobManager Memory': 'JobManager Memory', 'Please enter JobManager memory': 'Please enter JobManager memory', 'TaskManager Memory': 'TaskManager Memory', 'Please enter TaskManager memory': 'Please enter TaskManager memory', 'Slot Number': 'Slot Number', 'Please enter Slot number': 'Please enter Slot number', Parallelism: 'Parallelism', 'Please enter Parallelism': 'Please enter Parallelism', 'TaskManager Number': 'TaskManager Number', 'Please enter TaskManager number': 'Please enter TaskManager number', 'App Name': 'App Name', 'Please enter app name(optional)': 'Please enter app name(optional)', 'SQL Type': 'SQL Type', 'Send Email': 'Send Email', 'Log display': 'Log display', 'rows of result': 'rows of result', Title: 'Title', 'Please enter the title of email': 'Please enter the title of email', Table: 'Table', TableMode: 'Table', Attachment: 'Attachment', 'SQL Parameter': 'SQL Parameter', 'SQL Statement': 'SQL Statement', 'UDF Function': 'UDF Function', 'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)', 'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)', 'One form or attachment must be selected': 'One form or attachment must be selected', 'Mail subject required': 'Mail subject required', 'Child Node': 'Child Node', 'Please select a sub-Process': 'Please select a sub-Process', Edit: 'Edit', 'Switch To This Version': 'Switch To This Version', 'Datasource Name': 'Datasource Name', 'Please enter datasource name': 'Please enter datasource name', IP: 'IP', 'Please enter IP': 'Please enter IP', Port: 'Port', 'Please enter port': 'Please enter port', 'Database Name': 'Database Name', 'Please enter database name': 'Please enter database name', 'Oracle Connect Type': 'ServiceName or SID', 'Oracle Service Name': 'ServiceName', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc connect parameters', 'Test Connect': 'Test Connect', 'Please enter resource name': 'Please enter resource name', 'Please enter resource folder name': 'Please enter resource folder name', 'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement', 'Please enter IP/hostname': 'Please enter IP/hostname', 'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format', '#': '#', 'Datasource Type': 'Datasource Type', 'Datasource Parameter': 'Datasource Parameter', 'Create Time': 'Create Time', 'Update Time': 'Update Time', Operation: 'Operation', 'Current Version': 'Current Version', 'Click to view': 'Click to view', 'Delete?': 'Delete?', 'Switch Version Successfully': 'Switch Version Successfully', 'Confirm Switch To This Version?': 'Confirm Switch To This Version?', Confirm: 'Confirm', 'Task status statistics': 'Task Status Statistics', Number: 'Number', State: 'State', 'Process Status Statistics': 'Process Status Statistics', 'Process Definition Statistics': 'Process Definition Statistics', 'Project Name': 'Project Name', 'Please enter name': 'Please enter name', 'Owned Users': 'Owned Users', 'Process Pid': 'Process Pid', 'Zk registration directory': 'Zk registration directory', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': 'Last heartbeat time', 'Edit Tenant': 'Edit Tenant', 'OS Tenant Code': 'OS Tenant Code', 'Tenant Name': 'Tenant Name', Queue: 'Yarn Queue', 'Please select a queue': 'default is tenant association queue', 'Please enter the os tenant code in English': 'Please enter the os tenant code in English', 'Please enter os tenant code in English': 'Please enter os tenant code in English', 'Please enter os tenant code': 'Please enter os tenant code', 'Please enter tenant Name': 'Please enter tenant Name', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed', 'Edit User': 'Edit User', Tenant: 'Tenant', Email: 'Email', Phone: 'Phone', 'User Type': 'User Type', 'Please enter phone number': 'Please enter phone number', 'Please enter email': 'Please enter email', 'Please enter the correct email format': 'Please enter the correct email format', 'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format', Project: 'Project', Authorize: 'Authorize', 'File resources': 'File resources', 'UDF resources': 'UDF resources', 'UDF resources directory': 'UDF resources directory', 'Please select UDF resources directory': 'Please select UDF resources directory', 'Alarm group': 'Alarm group', 'Alarm group required': 'Alarm group required', 'Edit alarm group': 'Edit alarm group', 'Create alarm group': 'Create alarm group', 'Create Alarm Instance': 'Create Alarm Instance', 'Edit Alarm Instance': 'Edit Alarm Instance', 'Group Name': 'Group Name', 'Alarm instance name': 'Alarm instance name', 'Alarm plugin name': 'Alarm plugin name', 'Select plugin': 'Select plugin', 'Please enter group name': 'Please enter group name', 'Instance parameter exception': 'Instance parameter exception', 'Group Type': 'Group Type', 'Alarm plugin instance': 'Alarm plugin instance', Remarks: 'Remarks', SMS: 'SMS', 'Managing Users': 'Managing Users', Permission: 'Permission', Administrator: 'Administrator', 'Confirm Password': 'Confirm Password', 'Please enter confirm password': 'Please enter confirm password', 'Password cannot be in Chinese': 'Password cannot be in Chinese', 'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password', 'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese', 'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password', 'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password', 'Please select the datasource': 'Please select the datasource', 'Please select resources': 'Please select resources', Query: 'Query', 'Non Query': 'Non Query', 'prop(required)': 'prop(required)', 'value(optional)': 'value(optional)', 'value(required)': 'value(required)', 'prop is empty': 'prop is empty', 'value is empty': 'value is empty', 'prop is repeat': 'prop is repeat', 'Start Time': 'Start Time', 'End Time': 'End Time', crontab: 'crontab', 'Failure Strategy': 'Failure Strategy', online: 'online', offline: 'offline', 'Task Status': 'Task Status', 'Process Instance': 'Process Instance', 'Task Instance': 'Task Instance', 'Select date range': 'Select date range', startDate: 'startDate', endDate: 'endDate', Date: 'Date', Waiting: 'Waiting', Execution: 'Execution', Finish: 'Finish', 'Create File': 'Create File', 'Create folder': 'Create folder', 'File Name': 'File Name', 'Folder Name': 'Folder Name', 'File Format': 'File Format', 'Folder Format': 'Folder Format', 'File Content': 'File Content', 'Upload File Size': 'Upload File size cannot exceed 1g', Create: 'Create', 'Please enter the resource content': 'Please enter the resource content', 'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines', 'File Details': 'File Details', 'Download Details': 'Download Details', Return: 'Return', Save: 'Save', 'File Manage': 'File Manage', 'Upload Files': 'Upload Files', 'Create UDF Function': 'Create UDF Function', 'Upload UDF Resources': 'Upload UDF Resources', 'Service-Master': 'Service-Master', 'Service-Worker': 'Service-Worker', 'Process Name': 'Process Name', Executor: 'Executor', 'Run Type': 'Run Type', 'Scheduling Time': 'Scheduling Time', 'Run Times': 'Run Times', host: 'host', 'fault-tolerant sign': 'fault-tolerant sign', Rerun: 'Rerun', 'Recovery Failed': 'Recovery Failed', Stop: 'Stop', Pause: 'Pause', 'Recovery Suspend': 'Recovery Suspend', Gantt: 'Gantt', 'Node Type': 'Node Type', 'Submit Time': 'Submit Time', Duration: 'Duration', 'Retry Count': 'Retry Count', 'Task Name': 'Task Name', 'Task Date': 'Task Date', 'Source Table': 'Source Table', 'Record Number': 'Record Number', 'Target Table': 'Target Table', 'Online viewing type is not supported': 'Online viewing type is not supported', Size: 'Size', Rename: 'Rename', Download: 'Download', Export: 'Export', 'Version Info': 'Version Info', Submit: 'Submit', 'Edit UDF Function': 'Edit UDF Function', type: 'type', 'UDF Function Name': 'UDF Function Name', FILE: 'FILE', UDF: 'UDF', 'File Subdirectory': 'File Subdirectory', 'Please enter a function name': 'Please enter a function name', 'Package Name': 'Package Name', 'Please enter a Package name': 'Please enter a Package name', Parameter: 'Parameter', 'Please enter a parameter': 'Please enter a parameter', 'UDF Resources': 'UDF Resources', 'Upload Resources': 'Upload Resources', Instructions: 'Instructions', 'Please enter a instructions': 'Please enter a instructions', 'Please enter a UDF function name': 'Please enter a UDF function name', 'Select UDF Resources': 'Select UDF Resources', 'Class Name': 'Class Name', 'Jar Package': 'Jar Package', 'Library Name': 'Library Name', 'UDF Resource Name': 'UDF Resource Name', 'File Size': 'File Size', Description: 'Description', 'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items', 'Select Line Connection': 'Select Line Connection', 'Delete selected lines or nodes': 'Delete selected lines or nodes', 'Full Screen': 'Full Screen', Unpublished: 'Unpublished', 'Start Process': 'Start Process', 'Execute from the current node': 'Execute from the current node', 'Recover tolerance fault process': 'Recover tolerance fault process', 'Resume the suspension process': 'Resume the suspension process', 'Execute from the failed nodes': 'Execute from the failed nodes', 'Complement Data': 'Complement Data', 'Scheduling execution': 'Scheduling execution', 'Recovery waiting thread': 'Recovery waiting thread', 'Submitted successfully': 'Submitted successfully', Executing: 'Executing', 'Ready to pause': 'Ready to pause', 'Ready to stop': 'Ready to stop', 'Need fault tolerance': 'Need fault tolerance', Kill: 'Kill', 'Waiting for thread': 'Waiting for thread', 'Waiting for dependence': 'Waiting for dependence', Start: 'Start', Copy: 'Copy', 'Copy name': 'Copy name', 'Copy path': 'Copy path', 'Please enter keyword': 'Please enter keyword', 'File Upload': 'File Upload', 'Drag the file into the current upload window': 'Drag the file into the current upload window', 'Drag area upload': 'Drag area upload', Upload: 'Upload', 'ReUpload File': 'ReUpload File', 'Please enter file name': 'Please enter file name', 'Please select the file to upload': 'Please select the file to upload', 'Resources manage': 'Resources', Security: 'Security', Logout: 'Logout', 'No data': 'No data', 'Uploading...': 'Uploading...', 'Loading...': 'Loading...', List: 'List', 'Unable to download without proper url': 'Unable to download without proper url', Process: 'Process', 'Process definition': 'Process definition', 'Task record': 'Task record', 'Warning group manage': 'Warning group manage', 'Warning instance manage': 'Warning instance manage', 'Servers manage': 'Servers manage', 'UDF manage': 'UDF manage', 'Resource manage': 'Resource manage', 'Function manage': 'Function manage', 'Edit password': 'Edit password', 'Ordinary users': 'Ordinary users', 'Create process': 'Create process', 'Import process': 'Import process', 'Timing state': 'Timing state', Timing: 'Timing', TreeView: 'TreeView', 'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat', 'Mailbox input is illegal': 'Mailbox input is illegal', 'Please set the parameters before starting': 'Please set the parameters before starting', Continue: 'Continue', End: 'End', 'Node execution': 'Node execution', 'Backward execution': 'Backward execution', 'Forward execution': 'Forward execution', 'Execute only the current node': 'Execute only the current node', 'Notification strategy': 'Notification strategy', 'Notification group': 'Notification group', 'Please select a notification group': 'Please select a notification group', receivers: 'receivers', receiverCcs: 'receiverCcs', 'Whether it is a complement process?': 'Whether it is a complement process?', 'Schedule date': 'Schedule date', 'Mode of execution': 'Mode of execution', 'Serial execution': 'Serial execution', 'Parallel execution': 'Parallel execution', 'Set parameters before timing': 'Set parameters before timing', 'Start and stop time': 'Start and stop time', 'Please select time': 'Please select time', 'Please enter crontab': 'Please enter crontab', none_1: 'none', success_1: 'success', failure_1: 'failure', All_1: 'All', Toolbar: 'Toolbar', 'View variables': 'View variables', 'Format DAG': 'Format DAG', 'Refresh DAG status': 'Refresh DAG status', Return_1: 'Return', 'Please enter format': 'Please enter format', 'connection parameter': 'connection parameter', 'Process definition details': 'Process definition details', 'Create process definition': 'Create process definition', 'Scheduled task list': 'Scheduled task list', 'Process instance details': 'Process instance details', 'Create Resource': 'Create Resource', 'User Center': 'User Center', 'Please enter method': 'Please enter method', None: 'None', Name: 'Name', 'Process priority': 'Process priority', 'Task priority': 'Task priority', 'Task timeout alarm': 'Task timeout alarm', 'Timeout strategy': 'Timeout strategy', 'Timeout alarm': 'Timeout alarm', 'Timeout failure': 'Timeout failure', 'Timeout period': 'Timeout period', 'Waiting Dependent complete': 'Waiting Dependent complete', 'Waiting Dependent start': 'Waiting Dependent start', 'Check interval': 'Check interval', 'Timeout must be longer than check interval': 'Timeout must be longer than check interval', 'Timeout strategy must be selected': 'Timeout strategy must be selected', 'Timeout must be a positive integer': 'Timeout must be a positive integer', 'Add dependency': 'Add dependency', and: 'and', or: 'or', month: 'month', week: 'week', day: 'day', hour: 'hour', Running: 'Running', 'Waiting for dependency to complete': 'Waiting for dependency to complete', Selected: 'Selected', CurrentHour: 'CurrentHour', Last1Hour: 'Last1Hour', Last2Hours: 'Last2Hours', Last3Hours: 'Last3Hours', Last24Hours: 'Last24Hours', today: 'today', Last1Days: 'Last1Days', Last2Days: 'Last2Days', Last3Days: 'Last3Days', Last7Days: 'Last7Days', ThisWeek: 'ThisWeek', LastWeek: 'LastWeek', LastMonday: 'LastMonday', LastTuesday: 'LastTuesday', LastWednesday: 'LastWednesday', LastThursday: 'LastThursday', LastFriday: 'LastFriday', LastSaturday: 'LastSaturday', LastSunday: 'LastSunday', ThisMonth: 'ThisMonth', LastMonth: 'LastMonth', LastMonthBegin: 'LastMonthBegin', LastMonthEnd: 'LastMonthEnd', 'Refresh status succeeded': 'Refresh status succeeded', 'Queue manage': 'Yarn Queue manage', 'Create queue': 'Create queue', 'Edit queue': 'Edit queue', 'Datasource manage': 'Datasource', 'History task record': 'History task record', 'Please go online': 'Please go online', 'Queue value': 'Queue value', 'Please enter queue value': 'Please enter queue value', 'Worker group manage': 'Worker group manage', 'Create worker group': 'Create worker group', 'Edit worker group': 'Edit worker group', 'Token manage': 'Token manage', 'Create token': 'Create token', 'Edit token': 'Edit token', Addresses: 'Addresses', 'Worker Addresses': 'Worker Addresses', 'Please select the worker addresses': 'Please select the worker addresses', 'Failure time': 'Failure time', 'Expiration time': 'Expiration time', User: 'User', 'Please enter token': 'Please enter token', 'Generate token': 'Generate token', Monitor: 'Monitor', Group: 'Group', 'Queue statistics': 'Queue statistics', 'Command status statistics': 'Command status statistics', 'Task kill': 'Task Kill', 'Task queue': 'Task queue', 'Error command count': 'Error command count', 'Normal command count': 'Normal command count', Manage: ' Manage', 'Number of connections': 'Number of connections', Sent: 'Sent', Received: 'Received', 'Min latency': 'Min latency', 'Avg latency': 'Avg latency', 'Max latency': 'Max latency', 'Node count': 'Node count', 'Query time': 'Query time', 'Node self-test status': 'Node self-test status', 'Health status': 'Health status', 'Max connections': 'Max connections', 'Threads connections': 'Threads connections', 'Max used connections': 'Max used connections', 'Threads running connections': 'Threads running connections', 'Worker group': 'Worker group', 'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0', 'Pre Statement': 'Pre Statement', 'Post Statement': 'Post Statement', 'Statement cannot be empty': 'Statement cannot be empty', 'Process Define Count': 'Work flow Define Count', 'Process Instance Running Count': 'Process Instance Running Count', 'command number of waiting for running': 'command number of waiting for running', 'failure command number': 'failure command number', 'tasks number of waiting running': 'tasks number of waiting running', 'task number of ready to kill': 'task number of ready to kill', 'Statistics manage': 'Statistics Manage', statistics: 'Statistics', 'select tenant': 'select tenant', 'Please enter Principal': 'Please enter Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': 'Please enter the kerberos authentication parameter java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': 'Please enter the kerberos authentication parameter login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': 'Please enter the kerberos authentication parameter login.user.keytab.path', 'The start time must not be the same as the end': 'The start time must not be the same as the end', 'Startup parameter': 'Startup parameter', 'Startup type': 'Startup type', 'warning of timeout': 'warning of timeout', 'Next five execution times': 'Next five execution times', 'Execute time': 'Execute time', 'Complement range': 'Complement range', 'Http Url': 'Http Url', 'Http Method': 'Http Method', 'Http Parameters': 'Http Parameters', 'Http Parameters Key': 'Http Parameters Key', 'Http Parameters Position': 'Http Parameters Position', 'Http Parameters Value': 'Http Parameters Value', 'Http Check Condition': 'Http Check Condition', 'Http Condition': 'Http Condition', 'Please Enter Http Url': 'Please Enter Http Url(required)', 'Please Enter Http Condition': 'Please Enter Http Condition', 'There is no data for this period of time': 'There is no data for this period of time', 'Worker addresses cannot be empty': 'Worker addresses cannot be empty', 'Please generate token': 'Please generate token', 'Spark Version': 'Spark Version', TargetDataBase: 'target database', TargetTable: 'target table', 'Please enter the table of target': 'Please enter the table of target', 'Please enter a Target Table(required)': 'Please enter a Target Table(required)', SpeedByte: 'speed(byte count)', SpeedRecord: 'speed(record count)', '0 means unlimited by byte': '0 means unlimited', '0 means unlimited by count': '0 means unlimited', 'Modify User': 'Modify User', 'Whether directory': 'Whether directory', Yes: 'Yes', No: 'No', 'Hadoop Custom Params': 'Hadoop Params', 'Sqoop Advanced Parameters': 'Sqoop Params', 'Sqoop Job Name': 'Job Name', 'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)', 'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)', 'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)', 'Please enter Target Dir(required)': 'Please enter Target Dir(required)', 'Please enter Export Dir(required)': 'Please enter Export Dir(required)', 'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)', 'Please enter Hive Table(required)': 'Please enter Hive Table(required)', 'Please enter Hive Partition Keys': 'Please enter Hive Partition Key', 'Please enter Hive Partition Values': 'Please enter Partition Value', 'Please enter Replace Delimiter': 'Please enter Replace Delimiter', 'Please enter Fields Terminated': 'Please enter Fields Terminated', 'Please enter Lines Terminated': 'Please enter Lines Terminated', 'Please enter Concurrency': 'Please enter Concurrency', 'Please enter Update Key': 'Please enter Update Key', 'Please enter Job Name(required)': 'Please enter Job Name(required)', 'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)', Direct: 'Direct', Type: 'Type', ModelType: 'ModelType', ColumnType: 'ColumnType', Database: 'Database', Column: 'Column', 'Map Column Hive': 'Map Column Hive', 'Map Column Java': 'Map Column Java', 'Export Dir': 'Export Dir', 'Hive partition Keys': 'Hive partition Keys', 'Hive partition Values': 'Hive partition Values', FieldsTerminated: 'FieldsTerminated', LinesTerminated: 'LinesTerminated', IsUpdate: 'IsUpdate', UpdateKey: 'UpdateKey', UpdateMode: 'UpdateMode', 'Target Dir': 'Target Dir', DeleteTargetDir: 'DeleteTargetDir', FileType: 'FileType', CompressionCodec: 'CompressionCodec', CreateHiveTable: 'CreateHiveTable', DropDelimiter: 'DropDelimiter', OverWriteSrc: 'OverWriteSrc', ReplaceDelimiter: 'ReplaceDelimiter', Concurrency: 'Concurrency', Form: 'Form', OnlyUpdate: 'OnlyUpdate', AllowInsert: 'AllowInsert', 'Data Source': 'Data Source', 'Data Target': 'Data Target', 'All Columns': 'All Columns', 'Some Columns': 'Some Columns', 'Branch flow': 'Branch flow', 'Custom Job': 'Custom Job', 'Custom Script': 'Custom Script', 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow', 'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required', 'No resources exist': 'No resources exist', 'Please delete all non-existing resources': 'Please delete all non-existing resources', 'Unauthorized or deleted resources': 'Unauthorized or deleted resources', 'Please delete all non-existent resources': 'Please delete all non-existent resources', Kinship: 'Workflow relationship', Reset: 'Reset', KinshipStateActive: 'Active', KinshipState1: 'Online', KinshipState0: 'Workflow is not online', KinshipState10: 'Scheduling is not online', 'Dag label display control': 'Dag label display control', Enable: 'Enable', Disable: 'Disable', 'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!', 'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading', 'User name length is between 3 and 39': 'User name length is between 3 and 39', 'Timeout Settings': 'Timeout Settings', 'Connect Timeout': 'Connect Timeout', 'Socket Timeout': 'Socket Timeout', 'Connect timeout be a positive integer': 'Connect timeout be a positive integer', 'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer', ms: 'ms', 'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': 'Please select the waterdrop resources', zkDirectory: 'zkDirectory', 'Directory detail': 'Directory detail', 'Connection name': 'Connection name', 'Current connection settings': 'Current connection settings', 'Please save the DAG before formatting': 'Please save the DAG before formatting', 'Batch copy': 'Batch copy', 'Related items': 'Related items', 'Project name is required': 'Project name is required', 'Batch move': 'Batch move', Version: 'Version', 'Pre tasks': 'Pre tasks', 'Running Memory': 'Running Memory', 'Max Memory': 'Max Memory', 'Min Memory': 'Min Memory', 'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate', Info: 'Info', 'Datasource userName': 'owner', 'Resource userName': 'owner' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,990
[Feature][Procedure] Procedure task supports db2 procedure
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the feature** A clear and concise description of what the feature is. Procedure task needs to support db2 procedure **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
https://github.com/apache/dolphinscheduler/issues/3990
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-10-26T01:14:30Z"
java
"2021-04-29T10:29:44Z"
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ export default { 'User Name': '用户名', 'Please enter user name': '请输入用户名', Password: '密码', 'Please enter your password': '请输入密码', 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间', Login: '登录', Home: '首页', 'Failed to create node to save': '未创建节点保存失败', 'Global parameters': '全局参数', 'Local parameters': '局部参数', 'Copy success': '复制成功', 'The browser does not support automatic copying': '该浏览器不支持自动复制', 'Whether to save the DAG graph': '是否保存DAG图', 'Current node settings': '当前节点设置', 'View history': '查看历史', 'View log': '查看日志', 'Force success': '强制成功', 'Enter this child node': '进入该子节点', 'Node name': '节点名称', 'Please enter name (required)': '请输入名称(必填)', 'Run flag': '运行标志', Normal: '正常', 'Prohibition execution': '禁止执行', 'Please enter description': '请输入描述', 'Number of failed retries': '失败重试次数', Times: '次', 'Failed retry interval': '失败重试间隔', Minute: '分', 'Delay execution time': '延时执行时间', 'Delay execution': '延时执行', 'Forced success': '强制成功过', Cancel: '取消', 'Confirm add': '确认添加', 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流', 'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流', 'Name already exists': '名称已存在请重新输入', 'Download Log': '下载日志', 'Refresh Log': '刷新日志', 'Enter full screen': '进入全屏', 'Cancel full screen': '取消全屏', Close: '关闭', 'Update log success': '更新日志成功', 'No more logs': '暂无更多日志', 'No log': '暂无日志', 'Loading Log...': '正在努力请求日志中...', 'Set the DAG diagram name': '设置DAG图名称', 'Please enter description(optional)': '请输入描述(选填)', 'Set global': '设置全局', 'Whether to go online the process definition': '是否上线流程定义', 'Whether to update the process definition': '是否更新流程定义', Add: '添加', 'DAG graph name cannot be empty': 'DAG图名称不能为空', 'Create Datasource': '创建数据源', 'Project Home': '项目首页', 'Project Manage': '项目管理', 'Create Project': '创建项目', 'Cron Manage': '定时管理', 'Copy Workflow': '复制工作流', 'Tenant Manage': '租户管理', 'Create Tenant': '创建租户', 'User Manage': '用户管理', 'Create User': '创建用户', 'User Information': '用户信息', 'Edit Password': '密码修改', Success: '成功', Failed: '失败', Delete: '删除', 'Please choose': '请选择', 'Please enter a positive integer': '请输入正整数', 'Program Type': '程序类型', 'Main Class': '主函数的Class', 'Main Jar Package': '主Jar包', 'Please enter main jar package': '请选择主Jar包', 'Please enter main class': '请填写主函数的Class', 'Main Arguments': '主程序参数', 'Please enter main arguments': '请输入主程序参数', 'Option Parameters': '选项参数', 'Please enter option parameters': '请输入选项参数', Resources: '资源', 'Custom Parameters': '自定义参数', 'Custom template': '自定义模版', Datasource: '数据源', methods: '方法', 'Please enter method(optional)': '请输入方法(选填)', Script: '脚本', 'Please enter script(required)': '请输入脚本(必填)', 'Deploy Mode': '部署方式', 'Driver Cores': 'Driver核心数', 'Please enter Driver cores': '请输入Driver核心数', 'Driver Memory': 'Driver内存数', 'Please enter Driver memory': '请输入Driver内存数', 'Executor Number': 'Executor数量', 'Please enter Executor number': '请输入Executor数量', 'The Executor number should be a positive integer': 'Executor数量为正整数', 'Executor Memory': 'Executor内存数', 'Please enter Executor memory': '请输入Executor内存数', 'Executor Cores': 'Executor核心数', 'Please enter Executor cores': '请输入Executor核心数', 'Memory should be a positive integer': '内存数为数字', 'Core number should be positive integer': '核心数为正整数', 'Flink Version': 'Flink版本', 'JobManager Memory': 'JobManager内存数', 'Please enter JobManager memory': '请输入JobManager内存数', 'TaskManager Memory': 'TaskManager内存数', 'Please enter TaskManager memory': '请输入TaskManager内存数', 'Slot Number': 'Slot数量', 'Please enter Slot number': '请输入Slot数量', Parallelism: '并行度', 'Please enter Parallelism': '请输入并行度', 'TaskManager Number': 'TaskManager数量', 'Please enter TaskManager number': '请输入TaskManager数量', 'App Name': '任务名称', 'Please enter app name(optional)': '请输入任务名称(选填)', 'SQL Type': 'sql类型', 'Send Email': '发送邮件', 'Log display': '日志显示', 'rows of result': '行查询结果', Title: '主题', 'Please enter the title of email': '请输入邮件主题', Table: '表名', TableMode: '表格', Attachment: '附件', 'SQL Parameter': 'sql参数', 'SQL Statement': 'sql语句', 'UDF Function': 'UDF函数', 'Please enter a SQL Statement(required)': '请输入sql语句(必填)', 'Please enter a JSON Statement(required)': '请输入json语句(必填)', 'One form or attachment must be selected': '表格、附件必须勾选一个', 'Mail subject required': '邮件主题必填', 'Child Node': '子节点', 'Please select a sub-Process': '请选择子工作流', Edit: '编辑', 'Switch To This Version': '切换到该版本', 'Datasource Name': '数据源名称', 'Please enter datasource name': '请输入数据源名称', IP: 'IP主机名', 'Please enter IP': '请输入IP主机名', Port: '端口', 'Please enter port': '请输入端口', 'Database Name': '数据库名', 'Please enter database name': '请输入数据库名', 'Oracle Connect Type': '服务名或SID', 'Oracle Service Name': '服务名', 'Oracle SID': 'SID', 'jdbc connect parameters': 'jdbc连接参数', 'Test Connect': '测试连接', 'Please enter resource name': '请输入数据源名称', 'Please enter resource folder name': '请输入资源文件夹名称', 'Please enter a non-query SQL statement': '请输入非查询sql语句', 'Please enter IP/hostname': '请输入IP/主机名', 'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式', '#': '编号', 'Datasource Type': '数据源类型', 'Datasource Parameter': '数据源参数', 'Create Time': '创建时间', 'Update Time': '更新时间', Operation: '操作', 'Current Version': '当前版本', 'Click to view': '点击查看', 'Delete?': '确定删除吗?', 'Switch Version Successfully': '切换版本成功', 'Confirm Switch To This Version?': '确定切换到该版本吗?', Confirm: '确定', 'Task status statistics': '任务状态统计', Number: '数量', State: '状态', 'Process Status Statistics': '流程状态统计', 'Process Definition Statistics': '流程定义统计', 'Project Name': '项目名称', 'Please enter name': '请输入名称', 'Owned Users': '所属用户', 'Process Pid': '进程Pid', 'Zk registration directory': 'zk注册目录', cpuUsage: 'cpuUsage', memoryUsage: 'memoryUsage', 'Last heartbeat time': '最后心跳时间', 'Edit Tenant': '编辑租户', 'OS Tenant Code': '操作系统租户', 'Tenant Name': '租户名称', Queue: '队列', 'Please select a queue': '默认为租户关联队列', 'Please enter the os tenant code in English': '请输入操作系统租户只允许英文', 'Please enter os tenant code in English': '请输入英文操作系统租户', 'Please enter os tenant code': '请输入操作系统租户', 'Please enter tenant Name': '请输入租户名称', 'The os tenant code. Only letters or a combination of letters and numbers are allowed': '操作系统租户只允许字母或字母与数字组合', 'Edit User': '编辑用户', Tenant: '租户', Email: '邮件', Phone: '手机', 'User Type': '用户类型', 'Please enter phone number': '请输入手机', 'Please enter email': '请输入邮箱', 'Please enter the correct email format': '请输入正确的邮箱格式', 'Please enter the correct mobile phone format': '请输入正确的手机格式', Project: '项目', Authorize: '授权', 'File resources': '文件资源', 'UDF resources': 'UDF资源', 'UDF resources directory': 'UDF资源目录', 'Please select UDF resources directory': '请选择UDF资源目录', 'Alarm group': '告警组', 'Alarm group required': '告警组必填', 'Edit alarm group': '编辑告警组', 'Create alarm group': '创建告警组', 'Create Alarm Instance': '创建告警实例', 'Edit Alarm Instance': '编辑告警实例', 'Group Name': '组名称', 'Alarm instance name': '告警实例名称', 'Alarm plugin name': '告警插件名称', 'Select plugin': '选择插件', 'Please enter group name': '请输入组名称', 'Instance parameter exception': '实例参数异常', 'Group Type': '组类型', 'Alarm plugin instance': '告警插件实例', Remarks: '备注', SMS: '短信', 'Managing Users': '管理用户', Permission: '权限', Administrator: '管理员', 'Confirm Password': '确认密码', 'Please enter confirm password': '请输入确认密码', 'Password cannot be in Chinese': '密码不能为中文', 'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码', 'Confirmation password cannot be in Chinese': '确认密码不能为中文', 'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码', 'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认', 'Please select the datasource': '请选择数据源', 'Please select resources': '请选择资源', Query: '查询', 'Non Query': '非查询', 'prop(required)': 'prop(必填)', 'value(optional)': 'value(选填)', 'value(required)': 'value(必填)', 'prop is empty': 'prop不能为空', 'value is empty': 'value不能为空', 'prop is repeat': 'prop中有重复', 'Start Time': '开始时间', 'End Time': '结束时间', crontab: 'crontab', 'Failure Strategy': '失败策略', online: '上线', offline: '下线', 'Task Status': '任务状态', 'Process Instance': '工作流实例', 'Task Instance': '任务实例', 'Select date range': '选择日期区间', startDate: '开始日期', endDate: '结束日期', Date: '日期', Waiting: '等待', Execution: '执行中', Finish: '完成', 'Create File': '创建文件', 'Create folder': '创建文件夹', 'File Name': '文件名称', 'Folder Name': '文件夹名称', 'File Format': '文件格式', 'Folder Format': '文件夹格式', 'File Content': '文件内容', 'Upload File Size': '文件大小不能超过1G', Create: '创建', 'Please enter the resource content': '请输入资源内容', 'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行', 'File Details': '文件详情', 'Download Details': '下载详情', Return: '返回', Save: '保存', 'File Manage': '文件管理', 'Upload Files': '上传文件', 'Create UDF Function': '创建UDF函数', 'Upload UDF Resources': '上传UDF资源', 'Service-Master': '服务管理-Master', 'Service-Worker': '服务管理-Worker', 'Process Name': '工作流名称', Executor: '执行用户', 'Run Type': '运行类型', 'Scheduling Time': '调度时间', 'Run Times': '运行次数', host: 'host', 'fault-tolerant sign': '容错标识', Rerun: '重跑', 'Recovery Failed': '恢复失败', Stop: '停止', Pause: '暂停', 'Recovery Suspend': '恢复运行', Gantt: '甘特图', 'Node Type': '节点类型', 'Submit Time': '提交时间', Duration: '运行时长', 'Retry Count': '重试次数', 'Task Name': '任务名称', 'Task Date': '任务日期', 'Source Table': '源表', 'Record Number': '记录数', 'Target Table': '目标表', 'Online viewing type is not supported': '不支持在线查看类型', Size: '大小', Rename: '重命名', Download: '下载', Export: '导出', 'Version Info': '版本信息', Submit: '提交', 'Edit UDF Function': '编辑UDF函数', type: '类型', 'UDF Function Name': 'UDF函数名称', FILE: '文件', UDF: 'UDF', 'File Subdirectory': '文件子目录', 'Please enter a function name': '请输入函数名', 'Package Name': '包名类名', 'Please enter a Package name': '请输入包名类名', Parameter: '参数', 'Please enter a parameter': '请输入参数', 'UDF Resources': 'UDF资源', 'Upload Resources': '上传资源', Instructions: '使用说明', 'Please enter a instructions': '请输入使用说明', 'Please enter a UDF function name': '请输入UDF函数名称', 'Select UDF Resources': '请选择UDF资源', 'Class Name': '类名', 'Jar Package': 'jar包', 'Library Name': '库名', 'UDF Resource Name': 'UDF资源名称', 'File Size': '文件大小', Description: '描述', 'Drag Nodes and Selected Items': '拖动节点和选中项', 'Select Line Connection': '选择线条连接', 'Delete selected lines or nodes': '删除选中的线或节点', 'Full Screen': '全屏', Unpublished: '未发布', 'Start Process': '启动工作流', 'Execute from the current node': '从当前节点开始执行', 'Recover tolerance fault process': '恢复被容错的工作流', 'Resume the suspension process': '恢复运行流程', 'Execute from the failed nodes': '从失败节点开始执行', 'Complement Data': '补数', 'Scheduling execution': '调度执行', 'Recovery waiting thread': '恢复等待线程', 'Submitted successfully': '提交成功', Executing: '正在执行', 'Ready to pause': '准备暂停', 'Ready to stop': '准备停止', 'Need fault tolerance': '需要容错', Kill: 'Kill', 'Waiting for thread': '等待线程', 'Waiting for dependence': '等待依赖', Start: '运行', Copy: '复制节点', 'Copy name': '复制名称', 'Copy path': '复制路径', 'Please enter keyword': '请输入关键词', 'File Upload': '文件上传', 'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!', 'Drag area upload': '拖动区域上传', Upload: '上传', 'ReUpload File': '重新上传文件', 'Please enter file name': '请输入文件名', 'Please select the file to upload': '请选择要上传的文件', 'Resources manage': '资源中心', Security: '安全中心', Logout: '退出', 'No data': '查询无数据', 'Uploading...': '文件上传中', 'Loading...': '正在努力加载中...', List: '列表', 'Unable to download without proper url': '无下载url无法下载', Process: '工作流', 'Process definition': '工作流定义', 'Task record': '任务记录', 'Warning group manage': '告警组管理', 'Warning instance manage': '告警实例管理', 'Servers manage': '服务管理', 'UDF manage': 'UDF管理', 'Resource manage': '资源管理', 'Function manage': '函数管理', 'Edit password': '修改密码', 'Ordinary users': '普通用户', 'Create process': '创建工作流', 'Import process': '导入工作流', 'Timing state': '定时状态', Timing: '定时', TreeView: '树形图', 'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复', 'Mailbox input is illegal': '邮箱输入不合法', 'Please set the parameters before starting': '启动前请先设置参数', Continue: '继续', End: '结束', 'Node execution': '节点执行', 'Backward execution': '向后执行', 'Forward execution': '向前执行', 'Execute only the current node': '仅执行当前节点', 'Notification strategy': '通知策略', 'Notification group': '通知组', 'Please select a notification group': '请选择通知组', receivers: '收件人', receiverCcs: '抄送人', 'Whether it is a complement process?': '是否补数', 'Schedule date': '调度日期', 'Mode of execution': '执行方式', 'Serial execution': '串行执行', 'Parallel execution': '并行执行', 'Set parameters before timing': '定时前请先设置参数', 'Start and stop time': '起止时间', 'Please select time': '请选择时间', 'Please enter crontab': '请输入crontab', none_1: '都不发', success_1: '成功发', failure_1: '失败发', All_1: '成功或失败都发', Toolbar: '工具栏', 'View variables': '查看变量', 'Format DAG': '格式化DAG', 'Refresh DAG status': '刷新DAG状态', Return_1: '返回上一节点', 'Please enter format': '请输入格式为', 'connection parameter': '连接参数', 'Process definition details': '流程定义详情', 'Create process definition': '创建流程定义', 'Scheduled task list': '定时任务列表', 'Process instance details': '流程实例详情', 'Create Resource': '创建资源', 'User Center': '用户中心', 'Please enter method': '请输入方法', None: '无', Name: '名称', 'Process priority': '流程优先级', 'Task priority': '任务优先级', 'Task timeout alarm': '任务超时告警', 'Timeout strategy': '超时策略', 'Timeout alarm': '超时告警', 'Timeout failure': '超时失败', 'Timeout period': '超时时长', 'Waiting Dependent complete': '等待依赖完成', 'Waiting Dependent start': '等待依赖启动', 'Check interval': '检查间隔', 'Timeout must be longer than check interval': '超时时间必须比检查间隔长', 'Timeout strategy must be selected': '超时策略必须选一个', 'Timeout must be a positive integer': '超时时长必须为正整数', 'Add dependency': '添加依赖', and: '且', or: '或', month: '月', week: '周', day: '日', hour: '时', Running: '正在运行', 'Waiting for dependency to complete': '等待依赖完成', Selected: '已选', CurrentHour: '当前小时', Last1Hour: '前1小时', Last2Hours: '前2小时', Last3Hours: '前3小时', Last24Hours: '前24小时', today: '今天', Last1Days: '昨天', Last2Days: '前两天', Last3Days: '前三天', Last7Days: '前七天', ThisWeek: '本周', LastWeek: '上周', LastMonday: '上周一', LastTuesday: '上周二', LastWednesday: '上周三', LastThursday: '上周四', LastFriday: '上周五', LastSaturday: '上周六', LastSunday: '上周日', ThisMonth: '本月', LastMonth: '上月', LastMonthBegin: '上月初', LastMonthEnd: '上月末', 'Refresh status succeeded': '刷新状态成功', 'Queue manage': 'Yarn 队列管理', 'Create queue': '创建队列', 'Edit queue': '编辑队列', 'Datasource manage': '数据源中心', 'History task record': '历史任务记录', 'Please go online': '不要忘记上线', 'Queue value': '队列值', 'Please enter queue value': '请输入队列值', 'Worker group manage': 'Worker分组管理', 'Create worker group': '创建Worker分组', 'Edit worker group': '编辑Worker分组', 'Token manage': '令牌管理', 'Create token': '创建令牌', 'Edit token': '编辑令牌', Addresses: '地址', 'Worker Addresses': 'Worker地址', 'Please select the worker addresses': '请选择Worker地址', 'Failure time': '失效时间', 'Expiration time': '失效时间', User: '用户', 'Please enter token': '请输入令牌', 'Generate token': '生成令牌', Monitor: '监控中心', Group: '分组', 'Queue statistics': '队列统计', 'Command status statistics': '命令状态统计', 'Task kill': '等待kill任务', 'Task queue': '等待执行任务', 'Error command count': '错误指令数', 'Normal command count': '正确指令数', Manage: '管理', 'Number of connections': '连接数', Sent: '发送量', Received: '接收量', 'Min latency': '最低延时', 'Avg latency': '平均延时', 'Max latency': '最大延时', 'Node count': '节点数', 'Query time': '当前查询时间', 'Node self-test status': '节点自检状态', 'Health status': '健康状态', 'Max connections': '最大连接数', 'Threads connections': '当前连接数', 'Max used connections': '同时使用连接最大数', 'Threads running connections': '数据库当前活跃连接数', 'Worker group': 'Worker分组', 'Please enter a positive integer greater than 0': '请输入大于 0 的正整数', 'Pre Statement': '前置sql', 'Post Statement': '后置sql', 'Statement cannot be empty': '语句不能为空', 'Process Define Count': '工作流定义数', 'Process Instance Running Count': '正在运行的流程数', 'command number of waiting for running': '待执行的命令数', 'failure command number': '执行失败的命令数', 'tasks number of waiting running': '待运行任务数', 'task number of ready to kill': '待杀死任务数', 'Statistics manage': '统计管理', statistics: '统计', 'select tenant': '选择租户', 'Please enter Principal': '请输入Principal', 'Please enter the kerberos authentication parameter java.security.krb5.conf': '请输入kerberos认证参数 java.security.krb5.conf', 'Please enter the kerberos authentication parameter login.user.keytab.username': '请输入kerberos认证参数 login.user.keytab.username', 'Please enter the kerberos authentication parameter login.user.keytab.path': '请输入kerberos认证参数 login.user.keytab.path', 'The start time must not be the same as the end': '开始时间和结束时间不能相同', 'Startup parameter': '启动参数', 'Startup type': '启动类型', 'warning of timeout': '超时告警', 'Next five execution times': '接下来五次执行时间', 'Execute time': '执行时间', 'Complement range': '补数范围', 'Http Url': '请求地址', 'Http Method': '请求类型', 'Http Parameters': '请求参数', 'Http Parameters Key': '参数名', 'Http Parameters Position': '参数位置', 'Http Parameters Value': '参数值', 'Http Check Condition': '校验条件', 'Http Condition': '校验内容', 'Please Enter Http Url': '请填写请求地址(必填)', 'Please Enter Http Condition': '请填写校验内容', 'There is no data for this period of time': '该时间段无数据', 'Worker addresses cannot be empty': 'Worker地址不能为空', 'Please generate token': '请生成Token', 'Spark Version': 'Spark版本', TargetDataBase: '目标库', TargetTable: '目标表', 'Please enter the table of target': '请输入目标表名', 'Please enter a Target Table(required)': '请输入目标表(必填)', SpeedByte: '限流(字节数)', SpeedRecord: '限流(记录数)', '0 means unlimited by byte': 'KB,0代表不限制', '0 means unlimited by count': '0代表不限制', 'Modify User': '修改用户', 'Whether directory': '是否文件夹', Yes: '是', No: '否', 'Hadoop Custom Params': 'Hadoop参数', 'Sqoop Advanced Parameters': 'Sqoop参数', 'Sqoop Job Name': '任务名称', 'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)', 'Please enter Mysql Table(required)': '请输入Mysql表名(必填)', 'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开', 'Please enter Target Dir(required)': '请输入目标路径(必填)', 'Please enter Export Dir(required)': '请输入数据源路径(必填)', 'Please enter Hive Database(required)': '请输入Hive数据库(必填)', 'Please enter Hive Table(required)': '请输入Hive表名(必填)', 'Please enter Hive Partition Keys': '请输入分区键', 'Please enter Hive Partition Values': '请输入分区值', 'Please enter Replace Delimiter': '请输入替换分隔符', 'Please enter Fields Terminated': '请输入列分隔符', 'Please enter Lines Terminated': '请输入行分隔符', 'Please enter Concurrency': '请输入并发度', 'Please enter Update Key': '请输入更新列', 'Please enter Job Name(required)': '请输入任务名称(必填)', 'Please enter Custom Shell(required)': '请输入自定义脚本', Direct: '流向', Type: '类型', ModelType: '模式', ColumnType: '列类型', Database: '数据库', Column: '列', 'Map Column Hive': 'Hive类型映射', 'Map Column Java': 'Java类型映射', 'Export Dir': '数据源路径', 'Hive partition Keys': 'Hive 分区键', 'Hive partition Values': 'Hive 分区值', FieldsTerminated: '列分隔符', LinesTerminated: '行分隔符', IsUpdate: '是否更新', UpdateKey: '更新列', UpdateMode: '更新类型', 'Target Dir': '目标路径', DeleteTargetDir: '是否删除目录', FileType: '保存格式', CompressionCodec: '压缩类型', CreateHiveTable: '是否创建新表', DropDelimiter: '是否删除分隔符', OverWriteSrc: '是否覆盖数据源', ReplaceDelimiter: '替换分隔符', Concurrency: '并发度', Form: '表单', OnlyUpdate: '只更新', AllowInsert: '无更新便插入', 'Data Source': '数据来源', 'Data Target': '数据目的', 'All Columns': '全表导入', 'Some Columns': '选择列', 'Branch flow': '分支流转', 'Custom Job': '自定义任务', 'Custom Script': '自定义脚本', 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点', 'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填', 'No resources exist': '不存在资源', 'Please delete all non-existing resources': '请删除所有不存在资源', 'Unauthorized or deleted resources': '未授权或已删除资源', 'Please delete all non-existent resources': '请删除所有未授权或已删除资源', Kinship: '工作流关系', Reset: '重置', KinshipStateActive: '当前选择', KinshipState1: '已上线', KinshipState0: '工作流未上线', KinshipState10: '调度未上线', 'Dag label display control': 'Dag节点名称显隐', Enable: '启用', Disable: '停用', 'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!', 'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存', 'User name length is between 3 and 39': '用户名长度在3~39之间', 'Timeout Settings': '超时设置', 'Connect Timeout': '连接超时', 'Socket Timeout': 'Socket超时', 'Connect timeout be a positive integer': '连接超时必须为数字', 'Socket Timeout be a positive integer': 'Socket超时必须为数字', ms: '毫秒', 'Please Enter Url': '请直接填写地址,例如:127.0.0.1:7077', Master: 'Master', 'Please select the waterdrop resources': '请选择waterdrop配置文件', zkDirectory: 'zk注册目录', 'Directory detail': '查看目录详情', 'Connection name': '连线名', 'Current connection settings': '当前连线设置', 'Please save the DAG before formatting': '格式化前请先保存DAG', 'Batch copy': '批量复制', 'Related items': '关联项目', 'Project name is required': '项目名称必填', 'Batch move': '批量移动', Version: '版本', 'Pre tasks': '前置任务', 'Running Memory': '运行内存', 'Max Memory': '最大内存', 'Min Memory': '最小内存', 'The workflow canvas is abnormal and cannot be saved, please recreate': '该工作流画布异常,无法保存,请重新创建', Info: '提示', 'Datasource userName': '所属用户', 'Resource userName': '所属用户' }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,990
[Feature][Procedure] Procedure task supports db2 procedure
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the feature** A clear and concise description of what the feature is. Procedure task needs to support db2 procedure **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here.
https://github.com/apache/dolphinscheduler/issues/3990
https://github.com/apache/dolphinscheduler/pull/4094
2a59ed092ca1eea733a28f3ceb7f03776582ecb6
88a07f7b55d164b6a022d2f9df75ecbb258c280f
"2020-10-26T01:14:30Z"
java
"2021-04-29T10:29:44Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.19.RELEASE</spring.version> <spring.boot.version>2.1.18.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.2.5</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java</include> <include>**/common/datasource/db2/Db2DatasourceProcessorTest.java</include> <include>**/common/datasource/hive/HiveDatasourceProcessorTest.java</include> <include>**/common/datasource/mysql/MysqlDatasourceProcessorTest.java</include> <include>**/common/datasource/oracle/OracleDatasourceProcessorTest.java</include> <include>**/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java</include> <include>**/common/datasource/presto/PrestoDatasourceProcessorTest.java</include> <include>**/common/datasource/spark/SparkDatasourceProcessorTest.java</include> <include>**/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java</include> <include>**/common/datasource/DatasourceUtilTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/task/sql/SqlTaskTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/DingTalkUtilsTest.java</include> <include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,408
[Improvement][Dao] Database add connection check
**Describe the question** When the database connects fails, the application should fail quickly and shouldn't start successfully. At present, if we set the wrong database params, we can still start the application successful, we need to check the log to determine whether the program is normal. **Which version of DolphinScheduler:** -[1.3.6-release] -[dev] **Describe alternatives you've considered** Do initialize when the database is created
https://github.com/apache/dolphinscheduler/issues/5408
https://github.com/apache/dolphinscheduler/pull/5409
0bdf15efb9774ee11afab61f79d5f5547457e402
71f39fa8751252c9b665213e2f3a4a1827da2a27
"2021-04-29T14:45:12Z"
java
"2021-05-06T01:41:44Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.datasource; import com.alibaba.druid.pool.DruidDataSource; import com.baomidou.mybatisplus.core.MybatisConfiguration; import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; import org.apache.dolphinscheduler.common.Constants; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.transaction.TransactionFactory; import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; import org.mybatis.spring.SqlSessionTemplate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.sql.DataSource; /** * not spring manager connection, only use for init db, and alert module for non-spring application * data source connection factory */ public class ConnectionFactory extends SpringConnectionFactory { private static final Logger logger = LoggerFactory.getLogger(ConnectionFactory.class); private static class ConnectionFactoryHolder { private static final ConnectionFactory connectionFactory = new ConnectionFactory(); } public static ConnectionFactory getInstance() { return ConnectionFactoryHolder.connectionFactory; } private ConnectionFactory() { try { dataSource = buildDataSource(); sqlSessionFactory = getSqlSessionFactory(); sqlSessionTemplate = getSqlSessionTemplate(); } catch (Exception e) { logger.error("Initializing ConnectionFactory error", e); throw new RuntimeException(e); } } /** * sql session factory */ private SqlSessionFactory sqlSessionFactory; /** * sql session template */ private SqlSessionTemplate sqlSessionTemplate; private DataSource dataSource; public DataSource getDataSource() { return dataSource; } /** * get the data source * * @return druid dataSource */ private DataSource buildDataSource() { DruidDataSource druidDataSource = dataSource(); return druidDataSource; } /** * * get sql session factory * * @return sqlSessionFactory * @throws Exception sqlSessionFactory exception */ private SqlSessionFactory getSqlSessionFactory() throws Exception { TransactionFactory transactionFactory = new JdbcTransactionFactory(); Environment environment = new Environment("development", transactionFactory, getDataSource()); MybatisConfiguration configuration = new MybatisConfiguration(); configuration.setEnvironment(environment); configuration.setLazyLoadingEnabled(true); configuration.addMappers("org.apache.dolphinscheduler.dao.mapper"); configuration.addInterceptor(new PaginationInterceptor()); MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); sqlSessionFactoryBean.setConfiguration(configuration); sqlSessionFactoryBean.setDataSource(getDataSource()); sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); sqlSessionFactory = sqlSessionFactoryBean.getObject(); return sqlSessionFactory; } private SqlSessionTemplate getSqlSessionTemplate() { sqlSessionTemplate = new SqlSessionTemplate(sqlSessionFactory); return sqlSessionTemplate; } /** * get sql session * * @return sqlSession */ public SqlSession getSqlSession() { return sqlSessionTemplate; } /** * get mapper * * @param type target class * @param <T> generic * @return target object */ public <T> T getMapper(Class<T> type) { try { return getSqlSession().getMapper(type); } catch (Exception e) { logger.error(e.getMessage(), e); throw new RuntimeException("get mapper failed"); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,408
[Improvement][Dao] Database add connection check
**Describe the question** When the database connects fails, the application should fail quickly and shouldn't start successfully. At present, if we set the wrong database params, we can still start the application successful, we need to check the log to determine whether the program is normal. **Which version of DolphinScheduler:** -[1.3.6-release] -[dev] **Describe alternatives you've considered** Do initialize when the database is created
https://github.com/apache/dolphinscheduler/issues/5408
https://github.com/apache/dolphinscheduler/pull/5409
0bdf15efb9774ee11afab61f79d5f5547457e402
71f39fa8751252c9b665213e2f3a4a1827da2a27
"2021-04-29T14:45:12Z"
java
"2021-05-06T01:41:44Z"
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.dao.datasource; import static org.apache.dolphinscheduler.common.Constants.DATASOURCE_PROPERTIES; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.ibatis.mapping.DatabaseIdProvider; import org.apache.ibatis.mapping.VendorDatabaseIdProvider; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.type.JdbcType; import java.util.Properties; import org.mybatis.spring.SqlSessionTemplate; import org.mybatis.spring.annotation.MapperScan; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import com.alibaba.druid.pool.DruidDataSource; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.core.MybatisConfiguration; import com.baomidou.mybatisplus.core.config.GlobalConfig; import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; /** * data source connection factory */ @Configuration @MapperScan("org.apache.dolphinscheduler.*.mapper") public class SpringConnectionFactory { private static final Logger logger = LoggerFactory.getLogger(SpringConnectionFactory.class); static { PropertyUtils.loadPropertyFile(DATASOURCE_PROPERTIES); } /** * pagination interceptor * * @return pagination interceptor */ @Bean public PaginationInterceptor paginationInterceptor() { return new PaginationInterceptor(); } /** * get the data source * * @return druid dataSource */ @Bean(destroyMethod = "") public DruidDataSource dataSource() { DruidDataSource druidDataSource = new DruidDataSource(); druidDataSource.setDriverClassName(PropertyUtils.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME)); druidDataSource.setUrl(PropertyUtils.getString(Constants.SPRING_DATASOURCE_URL)); druidDataSource.setUsername(PropertyUtils.getString(Constants.SPRING_DATASOURCE_USERNAME)); druidDataSource.setPassword(PropertyUtils.getString(Constants.SPRING_DATASOURCE_PASSWORD)); druidDataSource.setValidationQuery(PropertyUtils.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY, "SELECT 1")); druidDataSource.setPoolPreparedStatements(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS, true)); druidDataSource.setTestWhileIdle(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE, true)); druidDataSource.setTestOnBorrow(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW, true)); druidDataSource.setTestOnReturn(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN, true)); druidDataSource.setKeepAlive(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE, true)); druidDataSource.setMinIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5)); druidDataSource.setMaxActive(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); druidDataSource.setMaxWait(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT, 60000)); druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE, 20)); druidDataSource.setInitialSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE, 5)); druidDataSource.setTimeBetweenEvictionRunsMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS, 60000)); druidDataSource.setTimeBetweenConnectErrorMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS, 60000)); druidDataSource.setMinEvictableIdleTimeMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS, 300000)); druidDataSource.setValidationQueryTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT, 3)); //auto commit druidDataSource.setDefaultAutoCommit(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT, true)); return druidDataSource; } /** * * get transaction manager * * @return DataSourceTransactionManager */ @Bean public DataSourceTransactionManager transactionManager() { return new DataSourceTransactionManager(dataSource()); } /** * * get sql session factory * * @return sqlSessionFactory * @throws Exception sqlSessionFactory exception */ @Bean public SqlSessionFactory sqlSessionFactory() throws Exception { MybatisConfiguration configuration = new MybatisConfiguration(); configuration.setMapUnderscoreToCamelCase(true); configuration.setCacheEnabled(false); configuration.setCallSettersOnNulls(true); configuration.setJdbcTypeForNull(JdbcType.NULL); configuration.addInterceptor(paginationInterceptor()); MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); sqlSessionFactoryBean.setConfiguration(configuration); sqlSessionFactoryBean.setDataSource(dataSource()); GlobalConfig.DbConfig dbConfig = new GlobalConfig.DbConfig(); dbConfig.setIdType(IdType.AUTO); GlobalConfig globalConfig = new GlobalConfig(); globalConfig.setDbConfig(dbConfig); sqlSessionFactoryBean.setGlobalConfig(globalConfig); sqlSessionFactoryBean.setTypeAliasesPackage("org.apache.dolphinscheduler.dao.entity"); ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); sqlSessionFactoryBean.setMapperLocations(resolver.getResources("org/apache/dolphinscheduler/dao/mapper/*Mapper.xml")); sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); sqlSessionFactoryBean.setDatabaseIdProvider(databaseIdProvider()); return sqlSessionFactoryBean.getObject(); } /** * get sql session * * @return SqlSession */ @Bean public SqlSession sqlSession() throws Exception { return new SqlSessionTemplate(sqlSessionFactory()); } @Bean public DatabaseIdProvider databaseIdProvider() { DatabaseIdProvider databaseIdProvider = new VendorDatabaseIdProvider(); Properties properties = new Properties(); properties.setProperty("MySQL", "mysql"); properties.setProperty("PostgreSQL", "pg"); databaseIdProvider.setProperties(properties); return databaseIdProvider; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,429
[Improvement][dist] Cleanup of useless files in the binary package lib directory
**Describe the question** There is dolphinscheduler-dist-${version}.tar.gz in the binary package lib directory ![image](https://user-images.githubusercontent.com/37063904/117533124-97aa6980-b01d-11eb-9efc-9b898868a39a.png) **Which version of DolphinScheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/5429
https://github.com/apache/dolphinscheduler/pull/5430
f88bbbd637700401715bc3802601e7802b56b9ae
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
"2021-05-08T09:05:30Z"
java
"2021-05-10T01:44:17Z"
dolphinscheduler-dist/src/main/assembly/dolphinscheduler-bin.xml
<!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> <id>bin</id> <formats> <format>tar.gz</format> </formats> <includeBaseDirectory>true</includeBaseDirectory> <baseDirectory>${project.build.finalName}-bin</baseDirectory> <fileSets> <fileSet> <directory>${basedir}/../dolphinscheduler-alert/src/main/resources</directory> <includes> <include>**/*.properties</include> <include>**/*.xml</include> <include>**/*.json</include> <include>**/*.ftl</include> </includes> <outputDirectory>conf</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-api/src/main/resources</directory> <includes> <include>**/*.properties</include> <include>**/*.xml</include> <include>**/*.json</include> </includes> <excludes> <exclude>application.properties</exclude> </excludes> <outputDirectory>conf</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-common/src/main/resources</directory> <includes> <include>**/*.properties</include> <include>**/*.xml</include> <include>**/*.json</include> </includes> <outputDirectory>conf</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-common/src/main/resources/bin</directory> <includes> <include>*.*</include> </includes> <directoryMode>755</directoryMode> <outputDirectory>bin</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-dao/src/main/resources</directory> <includes> <include>**/*.properties</include> <include>**/*.xml</include> <include>**/*.json</include> </includes> <excludes> <exclude>org/apache/dolphinscheduler/dao/mapper/*.xml</exclude> </excludes> <outputDirectory>conf</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-server/src/main/resources</directory> <includes> <include>**/*.properties</include> <include>**/*.xml</include> <include>**/*.json</include> <include>config/*.*</include> </includes> <outputDirectory>conf</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-service/src/main/resources</directory> <includes> <include>**/*.properties</include> <include>**/*.xml</include> <include>**/*.json</include> <include>**/*.yml</include> </includes> <outputDirectory>conf</outputDirectory> </fileSet> <fileSet> <directory>src/main/resources</directory> <includes> <include>**/*.properties</include> <include>**/*.xml</include> <include>**/*.json</include> </includes> <outputDirectory>conf</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-server/target/dolphinscheduler-server-${project.version}</directory> <includes> <include>**/*.*</include> </includes> <outputDirectory>.</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-api/target/dolphinscheduler-api-${project.version}</directory> <includes> <include>**/*.*</include> </includes> <outputDirectory>.</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-alert/target/dolphinscheduler-alert-${project.version}</directory> <includes> <include>**/*.*</include> </includes> <outputDirectory>.</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-dist/target/dolphinscheduler-dist-${project.version}</directory> <includes> <include>**/*.*</include> </includes> <outputDirectory>.</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../dolphinscheduler-ui/dist</directory> <includes> <include>**/*.*</include> </includes> <outputDirectory>./ui</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../sql</directory> <includes> <include>**/*</include> </includes> <outputDirectory>./sql</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../script</directory> <includes> <include>*.*</include> </includes> <outputDirectory>./script</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../script</directory> <includes> <include>env/*.*</include> </includes> <outputDirectory>./conf</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/../script</directory> <includes> <include>start-all.sh</include> <include>stop-all.sh</include> <include>dolphinscheduler-daemon.sh</include> <include>status-all.sh</include> </includes> <outputDirectory>./bin</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/.././</directory> <includes> <include>*.sh</include> <include>*.py</include> <include>DISCLAIMER</include> </includes> <outputDirectory>.</outputDirectory> </fileSet> <fileSet> <directory>${basedir}/release-docs</directory> <useDefaultExcludes>true</useDefaultExcludes> <includes> <include>**/*</include> </includes> <outputDirectory>.</outputDirectory> </fileSet> </fileSets> <dependencySets> <dependencySet> <outputDirectory>lib</outputDirectory> <useProjectArtifact>true</useProjectArtifact> <excludes> <exclude>javax.servlet:servlet-api</exclude> <exclude>org.eclipse.jetty.aggregate:jetty-all</exclude> <exclude>org.slf4j:slf4j-log4j12</exclude> </excludes> </dependencySet> </dependencySets> </assembly>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/plugin/DolphinSchedulerPluginLoaderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.plugin; import java.util.Objects; import org.junit.Test; import com.google.common.collect.ImmutableList; public class DolphinSchedulerPluginLoaderTest { /** * Method: loadPlugins() */ @Test public void testLoadPlugins() { PluginManagerTest pluginManager = new PluginManagerTest(); DolphinPluginManagerConfig alertPluginManagerConfig = new DolphinPluginManagerConfig(); String path = Objects.requireNonNull(DolphinPluginLoader.class.getClassLoader().getResource("")).getPath(); alertPluginManagerConfig.setPlugins(path + "../../../dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml"); DolphinPluginLoader alertPluginLoader = new DolphinPluginLoader(alertPluginManagerConfig, ImmutableList.of(pluginManager)); try { alertPluginLoader.loadPlugins(); } catch (Exception e) { throw new RuntimeException("load Alert Plugin Failed !", e); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.runner; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVERY_START_NODE_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.common.Constants.SEC_2_MINUTES_TIME_UNIT; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.VarPoolUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.AlertManager; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.apache.dolphinscheduler.service.queue.PeerTaskInstancePriorityQueue; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; /** * master exec thread,split dag */ public class MasterExecThread implements Runnable { /** * logger of MasterExecThread */ private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class); /** * runing TaskNode */ private final Map<MasterBaseTaskExecThread, Future<Boolean>> activeTaskNode = new ConcurrentHashMap<>(); /** * task exec service */ private final ExecutorService taskExecService; /** * process instance */ private ProcessInstance processInstance; /** * submit failure nodes */ private boolean taskFailedSubmit = false; /** * recover node id list */ private List<TaskInstance> recoverNodeIdList = new ArrayList<>(); /** * error task list */ private Map<String, TaskInstance> errorTaskList = new ConcurrentHashMap<>(); /** * complete task list */ private Map<String, TaskInstance> completeTaskList = new ConcurrentHashMap<>(); /** * ready to submit task queue */ private PeerTaskInstancePriorityQueue readyToSubmitTaskQueue = new PeerTaskInstancePriorityQueue(); /** * depend failed task map */ private Map<String, TaskInstance> dependFailedTask = new ConcurrentHashMap<>(); /** * forbidden task map */ private Map<String, TaskNode> forbiddenTaskList = new ConcurrentHashMap<>(); /** * skip task map */ private Map<String, TaskNode> skipTaskNodeList = new ConcurrentHashMap<>(); /** * recover tolerance fault task list */ private List<TaskInstance> recoverToleranceFaultTaskList = new ArrayList<>(); /** * alert manager */ private AlertManager alertManager; /** * the object of DAG */ private DAG<String, TaskNode, TaskNodeRelation> dag; /** * process service */ private ProcessService processService; /** * master config */ private MasterConfig masterConfig; /** * */ private NettyRemotingClient nettyRemotingClient; /** * submit post node * * @param parentNodeName parent node name */ private Map<String, Object> propToValue = new ConcurrentHashMap<String, Object>(); /** * constructor of MasterExecThread * * @param processInstance processInstance * @param processService processService * @param nettyRemotingClient nettyRemotingClient */ public MasterExecThread(ProcessInstance processInstance , ProcessService processService , NettyRemotingClient nettyRemotingClient , AlertManager alertManager , MasterConfig masterConfig) { this.processService = processService; this.processInstance = processInstance; this.masterConfig = masterConfig; int masterTaskExecNum = masterConfig.getMasterExecTaskNum(); this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread", masterTaskExecNum); this.nettyRemotingClient = nettyRemotingClient; this.alertManager = alertManager; } @Override public void run() { // process instance is null if (processInstance == null) { logger.info("process instance is not exists"); return; } // check to see if it's done if (processInstance.getState().typeIsFinished()) { logger.info("process instance is done : {}", processInstance.getId()); return; } try { if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()) { // sub process complement data executeComplementProcess(); } else { // execute flow executeProcess(); } } catch (Exception e) { logger.error("master exec thread exception", e); logger.error("process execute failed, process id:{}", processInstance.getId()); processInstance.setState(ExecutionStatus.FAILURE); processInstance.setEndTime(new Date()); processService.updateProcessInstance(processInstance); } finally { taskExecService.shutdown(); } } /** * execute process * * @throws Exception exception */ private void executeProcess() throws Exception { prepareProcess(); runProcess(); endProcess(); } /** * execute complement process * * @throws Exception exception */ private void executeComplementProcess() throws Exception { Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); processService.saveProcessInstance(processInstance); // get schedules int processDefinitionId = processInstance.getProcessDefinitionId(); List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); List<Date> listDate = Lists.newLinkedList(); if (!CollectionUtils.isEmpty(schedules)) { for (Schedule schedule : schedules) { listDate.addAll(CronUtils.getSelfFireDateList(startDate, endDate, schedule.getCrontab())); } } // get first fire date Iterator<Date> iterator = null; Date scheduleDate = null; if (!CollectionUtils.isEmpty(listDate)) { iterator = listDate.iterator(); scheduleDate = iterator.next(); processInstance.setScheduleTime(scheduleDate); processService.updateProcessInstance(processInstance); } else { scheduleDate = processInstance.getScheduleTime(); if (scheduleDate == null) { scheduleDate = startDate; } } while (Stopper.isRunning()) { logger.info("process {} start to complement {} data", processInstance.getId(), DateUtils.dateToString(scheduleDate)); // prepare dag and other info prepareProcess(); if (dag == null) { logger.error("process {} dag is null, please check out parameters", processInstance.getId()); processInstance.setState(ExecutionStatus.SUCCESS); processService.updateProcessInstance(processInstance); return; } // execute process ,waiting for end runProcess(); endProcess(); // process instance failure ,no more complements if (!processInstance.getState().typeIsSuccess()) { logger.info("process {} state {}, complement not completely!", processInstance.getId(), processInstance.getState()); break; } // current process instance success ,next execute if (null == iterator) { // loop by day scheduleDate = DateUtils.getSomeDay(scheduleDate, 1); if (scheduleDate.after(endDate)) { // all success logger.info("process {} complement completely!", processInstance.getId()); break; } } else { // loop by schedule date if (!iterator.hasNext()) { // all success logger.info("process {} complement completely!", processInstance.getId()); break; } scheduleDate = iterator.next(); } // flow end // execute next process instance complement data processInstance.setScheduleTime(scheduleDate); if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) { cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); } processInstance.setState(ExecutionStatus.RUNNING_EXECUTION); processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processInstance.getProcessDefinition().getGlobalParamMap(), processInstance.getProcessDefinition().getGlobalParamList(), CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); processInstance.setId(0); processInstance.setStartTime(new Date()); processInstance.setEndTime(null); processService.saveProcessInstance(processInstance); } } /** * prepare process parameter * * @throws Exception exception */ private void prepareProcess() throws Exception { // gen process dag buildFlowDag(); // init task queue initTaskQueue(); logger.info("prepare process :{} end", processInstance.getId()); } /** * process end handle */ private void endProcess() { processInstance.setEndTime(new Date()); processService.updateProcessInstance(processInstance); if (processInstance.getState().typeIsWaitingThread()) { processService.createRecoveryWaitingThreadCommand(null, processInstance); } List<TaskInstance> taskInstances = processService.findValidTaskListByProcessId(processInstance.getId()); ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); alertManager.sendAlertProcessInstance(processInstance, taskInstances, projectUser); } /** * generate process dag * * @throws Exception exception */ private void buildFlowDag() throws Exception { recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam()); forbiddenTaskList = DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson()); // generate process to get DAG info List<String> recoveryNameList = getRecoveryNodeNameList(); List<String> startNodeNameList = parseStartNodeName(processInstance.getCommandParam()); ProcessDag processDag = generateFlowDag(processInstance.getProcessInstanceJson(), startNodeNameList, recoveryNameList, processInstance.getTaskDependType()); if (processDag == null) { logger.error("processDag is null"); return; } // generate process dag dag = DagHelper.buildDagGraph(processDag); } /** * init task queue */ private void initTaskQueue() { taskFailedSubmit = false; activeTaskNode.clear(); dependFailedTask.clear(); completeTaskList.clear(); errorTaskList.clear(); List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); for (TaskInstance task : taskInstanceList) { if (task.isTaskComplete()) { completeTaskList.put(task.getName(), task); } if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { continue; } if (task.getState().typeIsFailure() && !task.taskCanRetry()) { errorTaskList.put(task.getName(), task); } } } /** * submit task to execute * * @param taskInstance task instance * @return TaskInstance */ private TaskInstance submitTaskExec(TaskInstance taskInstance) { MasterBaseTaskExecThread abstractExecThread = null; if (taskInstance.isSubProcess()) { abstractExecThread = new SubProcessTaskExecThread(taskInstance); } else if (taskInstance.isDependTask()) { abstractExecThread = new DependentTaskExecThread(taskInstance); } else if (taskInstance.isConditionsTask()) { abstractExecThread = new ConditionsTaskExecThread(taskInstance); } else { abstractExecThread = new MasterTaskExecThread(taskInstance); } Future<Boolean> future = taskExecService.submit(abstractExecThread); activeTaskNode.putIfAbsent(abstractExecThread, future); return abstractExecThread.getTaskInstance(); } /** * find task instance in db. * in case submit more than one same name task in the same time. * * @param taskName task name * @return TaskInstance */ private TaskInstance findTaskIfExists(String taskName) { List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(this.processInstance.getId()); for (TaskInstance taskInstance : taskInstanceList) { if (taskInstance.getName().equals(taskName)) { return taskInstance; } } return null; } /** * encapsulation task * * @param processInstance process instance * @param nodeName node name * @return TaskInstance */ private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName, TaskNode taskNode) { //update processInstance for update the globalParams this.processInstance = this.processService.findProcessInstanceById(this.processInstance.getId()); TaskInstance taskInstance = findTaskIfExists(nodeName); if (taskInstance == null) { taskInstance = new TaskInstance(); // task name taskInstance.setName(nodeName); // process instance define id taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId()); // task instance state taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); // process instance id taskInstance.setProcessInstanceId(processInstance.getId()); // task instance node json taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode)); // task instance type taskInstance.setTaskType(taskNode.getType()); // task instance whether alert taskInstance.setAlertFlag(Flag.NO); // task instance start time taskInstance.setStartTime(null); // task instance flag taskInstance.setFlag(Flag.YES); // task instance retry times taskInstance.setRetryTimes(0); // max task instance retry times taskInstance.setMaxRetryTimes(taskNode.getMaxRetryTimes()); // retry task instance interval taskInstance.setRetryInterval(taskNode.getRetryInterval()); // task instance priority if (taskNode.getTaskInstancePriority() == null) { taskInstance.setTaskInstancePriority(Priority.MEDIUM); } else { taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); } String processWorkerGroup = processInstance.getWorkerGroup(); processWorkerGroup = StringUtils.isBlank(processWorkerGroup) ? DEFAULT_WORKER_GROUP : processWorkerGroup; String taskWorkerGroup = StringUtils.isBlank(taskNode.getWorkerGroup()) ? processWorkerGroup : taskNode.getWorkerGroup(); if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP) && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) { taskInstance.setWorkerGroup(processWorkerGroup); } else { taskInstance.setWorkerGroup(taskWorkerGroup); } //get process global setProcessGlobal(taskNode, taskInstance); // delay execution time taskInstance.setDelayTime(taskNode.getDelayTime()); } return taskInstance; } private void setProcessGlobal(TaskNode taskNode, TaskInstance taskInstance) { String globalParams = this.processInstance.getGlobalParams(); if (StringUtils.isNotEmpty(globalParams)) { Map<String, String> globalMap = processService.getGlobalParamMap(globalParams); if (globalMap != null && globalMap.size() != 0) { setGlobalMapToTask(taskNode, taskInstance, globalMap); } } } private void setGlobalMapToTask(TaskNode taskNode, TaskInstance taskInstance, Map<String, String> globalMap) { // the param save in localParams Map<String, Object> result = JSONUtils.toMap(taskNode.getParams(), String.class, Object.class); Object localParams = result.get(LOCAL_PARAMS); if (localParams != null) { List<Property> allParam = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); for (Property info : allParam) { if (info.getDirect().equals(Direct.IN)) { String paramName = info.getProp(); String value = globalMap.get(paramName); if (StringUtils.isNotEmpty(value)) { info.setValue(value); } } } result.put(LOCAL_PARAMS, allParam); taskNode.setParams(JSONUtils.toJsonString(result)); // task instance node json taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode)); } } private void submitPostNode(String parentNodeName) { Set<String> submitTaskNodeList = DagHelper.parsePostNodes(parentNodeName, skipTaskNodeList, dag, completeTaskList); List<TaskInstance> taskInstances = new ArrayList<>(); for (String taskNode : submitTaskNodeList) { try { VarPoolUtils.convertVarPoolToMap(propToValue, processInstance.getVarPool()); } catch (ParseException e) { logger.error("parse {} exception", processInstance.getVarPool(), e); throw new RuntimeException(); } TaskNode taskNodeObject = dag.getNode(taskNode); VarPoolUtils.setTaskNodeLocalParams(taskNodeObject, propToValue); taskInstances.add(createTaskInstance(processInstance, taskNode, taskNodeObject)); } // if previous node success , post node submit for (TaskInstance task : taskInstances) { if (readyToSubmitTaskQueue.contains(task)) { continue; } if (completeTaskList.containsKey(task.getName())) { logger.info("task {} has already run success", task.getName()); continue; } if (task.getState().typeIsPause() || task.getState().typeIsCancel()) { logger.info("task {} stopped, the state is {}", task.getName(), task.getState()); } else { addTaskToStandByList(task); } } } /** * determine whether the dependencies of the task node are complete * * @return DependResult */ private DependResult isTaskDepsComplete(String taskName) { Collection<String> startNodes = dag.getBeginNode(); // if vertex,returns true directly if (startNodes.contains(taskName)) { return DependResult.SUCCESS; } TaskNode taskNode = dag.getNode(taskName); List<String> depNameList = taskNode.getDepList(); for (String depsNode : depNameList) { if (!dag.containsNode(depsNode) || forbiddenTaskList.containsKey(depsNode) || skipTaskNodeList.containsKey(depsNode)) { continue; } // dependencies must be fully completed if (!completeTaskList.containsKey(depsNode)) { return DependResult.WAITING; } ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); if (depTaskState.typeIsPause() || depTaskState.typeIsCancel()) { return DependResult.NON_EXEC; } // ignore task state if current task is condition if (taskNode.isConditionsTask()) { continue; } if (!dependTaskSuccess(depsNode, taskName)) { return DependResult.FAILED; } } logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray())); return DependResult.SUCCESS; } /** * depend node is completed, but here need check the condition task branch is the next node */ private boolean dependTaskSuccess(String dependNodeName, String nextNodeName) { if (dag.getNode(dependNodeName).isConditionsTask()) { //condition task need check the branch to run List<String> nextTaskList = DagHelper.parseConditionTask(dependNodeName, skipTaskNodeList, dag, completeTaskList); if (!nextTaskList.contains(nextNodeName)) { return false; } } else { ExecutionStatus depTaskState = completeTaskList.get(dependNodeName).getState(); if (depTaskState.typeIsFailure()) { return false; } } return true; } /** * query task instance by complete state * * @param state state * @return task instance list */ private List<TaskInstance> getCompleteTaskByState(ExecutionStatus state) { List<TaskInstance> resultList = new ArrayList<>(); for (Map.Entry<String, TaskInstance> entry : completeTaskList.entrySet()) { if (entry.getValue().getState() == state) { resultList.add(entry.getValue()); } } return resultList; } /** * where there are ongoing tasks * * @param state state * @return ExecutionStatus */ private ExecutionStatus runningState(ExecutionStatus state) { if (state == ExecutionStatus.READY_STOP || state == ExecutionStatus.READY_PAUSE || state == ExecutionStatus.WAITTING_THREAD || state == ExecutionStatus.DELAY_EXECUTION) { // if the running task is not completed, the state remains unchanged return state; } else { return ExecutionStatus.RUNNING_EXECUTION; } } /** * exists failure task,contains submit failure、dependency failure,execute failure(retry after) * * @return Boolean whether has failed task */ private boolean hasFailedTask() { if (this.taskFailedSubmit) { return true; } if (this.errorTaskList.size() > 0) { return true; } return this.dependFailedTask.size() > 0; } /** * process instance failure * * @return Boolean whether process instance failed */ private boolean processFailed() { if (hasFailedTask()) { if (processInstance.getFailureStrategy() == FailureStrategy.END) { return true; } if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) { return readyToSubmitTaskQueue.size() == 0 || activeTaskNode.size() == 0; } } return false; } /** * whether task for waiting thread * * @return Boolean whether has waiting thread task */ private boolean hasWaitingThreadTask() { List<TaskInstance> waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD); return CollectionUtils.isNotEmpty(waitingList); } /** * prepare for pause * 1,failed retry task in the preparation queue , returns to failure directly * 2,exists pause task,complement not completed, pending submission of tasks, return to suspension * 3,success * * @return ExecutionStatus */ private ExecutionStatus processReadyPause() { if (hasRetryTaskInStandBy()) { return ExecutionStatus.FAILURE; } List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); if (CollectionUtils.isNotEmpty(pauseList) || !isComplementEnd() || readyToSubmitTaskQueue.size() > 0) { return ExecutionStatus.PAUSE; } else { return ExecutionStatus.SUCCESS; } } /** * generate the latest process instance status by the tasks state * * @return process instance execution status */ private ExecutionStatus getProcessInstanceState() { ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); ExecutionStatus state = instance.getState(); if (activeTaskNode.size() > 0 || hasRetryTaskInStandBy()) { // active task and retry task exists return runningState(state); } // process failure if (processFailed()) { return ExecutionStatus.FAILURE; } // waiting thread if (hasWaitingThreadTask()) { return ExecutionStatus.WAITTING_THREAD; } // pause if (state == ExecutionStatus.READY_PAUSE) { return processReadyPause(); } // stop if (state == ExecutionStatus.READY_STOP) { List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP); List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL); if (CollectionUtils.isNotEmpty(stopList) || CollectionUtils.isNotEmpty(killList) || !isComplementEnd()) { return ExecutionStatus.STOP; } else { return ExecutionStatus.SUCCESS; } } // success if (state == ExecutionStatus.RUNNING_EXECUTION) { List<TaskInstance> killTasks = getCompleteTaskByState(ExecutionStatus.KILL); if (readyToSubmitTaskQueue.size() > 0) { //tasks currently pending submission, no retries, indicating that depend is waiting to complete return ExecutionStatus.RUNNING_EXECUTION; } else if (CollectionUtils.isNotEmpty(killTasks)) { // tasks maybe killed manually return ExecutionStatus.FAILURE; } else { // if the waiting queue is empty and the status is in progress, then success return ExecutionStatus.SUCCESS; } } return state; } /** * whether standby task list have retry tasks */ private boolean retryTaskExists() { boolean result = false; for (Iterator<TaskInstance> iter = readyToSubmitTaskQueue.iterator(); iter.hasNext(); ) { TaskInstance task = iter.next(); if (task.getState().typeIsFailure()) { result = true; break; } } return result; } /** * whether complement end * * @return Boolean whether is complement end */ private boolean isComplementEnd() { if (!processInstance.isComplementData()) { return true; } try { Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); return processInstance.getScheduleTime().equals(endTime); } catch (Exception e) { logger.error("complement end failed ", e); return false; } } /** * updateProcessInstance process instance state * after each batch of tasks is executed, the status of the process instance is updated */ private void updateProcessInstanceState() { ExecutionStatus state = getProcessInstanceState(); if (processInstance.getState() != state) { logger.info( "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", processInstance.getId(), processInstance.getName(), processInstance.getState(), state, processInstance.getCommandType()); ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); instance.setState(state); instance.setProcessDefinition(processInstance.getProcessDefinition()); processService.updateProcessInstance(instance); processInstance = instance; } } /** * get task dependency result * * @param taskInstance task instance * @return DependResult */ private DependResult getDependResultForTask(TaskInstance taskInstance) { return isTaskDepsComplete(taskInstance.getName()); } /** * add task to standby list * * @param taskInstance task instance */ private void addTaskToStandByList(TaskInstance taskInstance) { logger.info("add task to stand by list: {}", taskInstance.getName()); try { readyToSubmitTaskQueue.put(taskInstance); } catch (Exception e) { logger.error("add task instance to readyToSubmitTaskQueue error, taskName: {}", taskInstance.getName(), e); } } /** * remove task from stand by list * * @param taskInstance task instance */ private void removeTaskFromStandbyList(TaskInstance taskInstance) { logger.info("remove task from stand by list: {}", taskInstance.getName()); try { readyToSubmitTaskQueue.remove(taskInstance); } catch (Exception e) { logger.error("remove task instance from readyToSubmitTaskQueue error, taskName: {}", taskInstance.getName(), e); } } /** * has retry task in standby * * @return Boolean whether has retry task in standby */ private boolean hasRetryTaskInStandBy() { for (Iterator<TaskInstance> iter = readyToSubmitTaskQueue.iterator(); iter.hasNext(); ) { if (iter.next().getState().typeIsFailure()) { return true; } } return false; } /** * submit and watch the tasks, until the work flow stop */ private void runProcess() { // submit start node submitPostNode(null); boolean sendTimeWarning = false; while (!processInstance.isProcessInstanceStop() && Stopper.isRunning()) { // send warning email if process time out. if (!sendTimeWarning && checkProcessTimeOut(processInstance)) { alertManager.sendProcessTimeoutAlert(processInstance, processService.findProcessDefineById(processInstance.getProcessDefinitionId())); sendTimeWarning = true; } for (Map.Entry<MasterBaseTaskExecThread, Future<Boolean>> entry : activeTaskNode.entrySet()) { Future<Boolean> future = entry.getValue(); TaskInstance task = entry.getKey().getTaskInstance(); if (!future.isDone()) { continue; } // node monitor thread complete task = this.processService.findTaskInstanceById(task.getId()); if (task == null) { this.taskFailedSubmit = true; activeTaskNode.remove(entry.getKey()); continue; } // node monitor thread complete if (task.getState().typeIsFinished()) { activeTaskNode.remove(entry.getKey()); } logger.info("task :{}, id:{} complete, state is {} ", task.getName(), task.getId(), task.getState()); // node success , post node submit if (task.getState() == ExecutionStatus.SUCCESS) { processInstance = processService.findProcessInstanceById(processInstance.getId()); processInstance.setVarPool(task.getVarPool()); processService.updateProcessInstance(processInstance); completeTaskList.put(task.getName(), task); submitPostNode(task.getName()); continue; } // node fails, retry first, and then execute the failure process if (task.getState().typeIsFailure()) { if (task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE) { this.recoverToleranceFaultTaskList.add(task); } if (task.taskCanRetry()) { addTaskToStandByList(task); } else { completeTaskList.put(task.getName(), task); if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { submitPostNode(task.getName()); } else { errorTaskList.put(task.getName(), task); if (processInstance.getFailureStrategy() == FailureStrategy.END) { killTheOtherTasks(); } } } continue; } // other status stop/pause completeTaskList.put(task.getName(), task); } // send alert if (CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)) { alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList); this.recoverToleranceFaultTaskList.clear(); } // updateProcessInstance completed task status // failure priority is higher than pause // if a task fails, other suspended tasks need to be reset kill // check if there exists forced success nodes in errorTaskList if (errorTaskList.size() > 0) { for (Map.Entry<String, TaskInstance> entry : completeTaskList.entrySet()) { TaskInstance completeTask = entry.getValue(); if (completeTask.getState() == ExecutionStatus.PAUSE) { completeTask.setState(ExecutionStatus.KILL); completeTaskList.put(entry.getKey(), completeTask); processService.updateTaskInstance(completeTask); } } for (Map.Entry<String, TaskInstance> entry : errorTaskList.entrySet()) { TaskInstance errorTask = entry.getValue(); TaskInstance currentTask = processService.findTaskInstanceById(errorTask.getId()); if (currentTask == null) { continue; } // for nodes that have been forced success if (errorTask.getState().typeIsFailure() && currentTask.getState().equals(ExecutionStatus.FORCED_SUCCESS)) { // update state in this thread and remove from errorTaskList errorTask.setState(currentTask.getState()); logger.info("task: {} has been forced success, remove it from error task list", errorTask.getName()); errorTaskList.remove(errorTask.getName()); // submit post nodes submitPostNode(errorTask.getName()); } } } if (canSubmitTaskToQueue()) { submitStandByTask(); } try { Thread.sleep(Constants.SLEEP_TIME_MILLIS); } catch (InterruptedException e) { logger.error(e.getMessage(), e); Thread.currentThread().interrupt(); } updateProcessInstanceState(); } logger.info("process:{} end, state :{}", processInstance.getId(), processInstance.getState()); } /** * whether check process time out * * @param processInstance task instance * @return true if time out of process instance > running time of process instance */ private boolean checkProcessTimeOut(ProcessInstance processInstance) { if (processInstance.getTimeout() == 0) { return false; } Date now = new Date(); long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); return runningTime > processInstance.getTimeout(); } /** * whether can submit task to queue * * @return boolean */ private boolean canSubmitTaskToQueue() { return OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory()); } /** * close the on going tasks */ private void killTheOtherTasks() { logger.info("kill called on process instance id: {}, num: {}", processInstance.getId(), activeTaskNode.size()); for (Map.Entry<MasterBaseTaskExecThread, Future<Boolean>> entry : activeTaskNode.entrySet()) { MasterBaseTaskExecThread taskExecThread = entry.getKey(); Future<Boolean> future = entry.getValue(); TaskInstance taskInstance = taskExecThread.getTaskInstance(); taskInstance = processService.findTaskInstanceById(taskInstance.getId()); if (taskInstance != null && taskInstance.getState().typeIsFinished()) { continue; } if (!future.isDone()) { // record kill info logger.info("kill process instance, id: {}, task: {}", processInstance.getId(), taskExecThread.getTaskInstance().getId()); // kill node taskExecThread.kill(); } } } /** * whether the retry interval is timed out * * @param taskInstance task instance * @return Boolean */ private boolean retryTaskIntervalOverTime(TaskInstance taskInstance) { if (taskInstance.getState() != ExecutionStatus.FAILURE) { return true; } if (taskInstance.getId() == 0 || taskInstance.getMaxRetryTimes() == 0 || taskInstance.getRetryInterval() == 0) { return true; } Date now = new Date(); long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime()); // task retry does not over time, return false return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval; } /** * handling the list of tasks to be submitted */ private void submitStandByTask() { try { int length = readyToSubmitTaskQueue.size(); for (int i = 0; i < length; i++) { TaskInstance task = readyToSubmitTaskQueue.peek(); // stop tasks which is retrying if forced success happens if (task.taskCanRetry()) { TaskInstance retryTask = processService.findTaskInstanceById(task.getId()); if (retryTask != null && retryTask.getState().equals(ExecutionStatus.FORCED_SUCCESS)) { task.setState(retryTask.getState()); logger.info("task: {} has been forced success, put it into complete task list and stop retrying", task.getName()); removeTaskFromStandbyList(task); completeTaskList.put(task.getName(), task); submitPostNode(task.getName()); continue; } } DependResult dependResult = getDependResultForTask(task); if (DependResult.SUCCESS == dependResult) { if (retryTaskIntervalOverTime(task)) { submitTaskExec(task); removeTaskFromStandbyList(task); } } else if (DependResult.FAILED == dependResult) { // if the dependency fails, the current node is not submitted and the state changes to failure. dependFailedTask.put(task.getName(), task); removeTaskFromStandbyList(task); logger.info("task {},id:{} depend result : {}", task.getName(), task.getId(), dependResult); } else if (DependResult.NON_EXEC == dependResult) { // for some reasons(depend task pause/stop) this task would not be submit removeTaskFromStandbyList(task); logger.info("remove task {},id:{} , because depend result : {}", task.getName(), task.getId(), dependResult); } } } catch (Exception e) { logger.error("submit standby task error", e); } } /** * get recovery task instance * * @param taskId task id * @return recovery task instance */ private TaskInstance getRecoveryTaskInstance(String taskId) { if (!StringUtils.isNotEmpty(taskId)) { return null; } try { Integer intId = Integer.valueOf(taskId); TaskInstance task = processService.findTaskInstanceById(intId); if (task == null) { logger.error("start node id cannot be found: {}", taskId); } else { return task; } } catch (Exception e) { logger.error("get recovery task instance failed ", e); } return null; } /** * get start task instance list * * @param cmdParam command param * @return task instance list */ private List<TaskInstance> getStartTaskInstanceList(String cmdParam) { List<TaskInstance> instanceList = new ArrayList<>(); Map<String, String> paramMap = JSONUtils.toMap(cmdParam); if (paramMap != null && paramMap.containsKey(CMD_PARAM_RECOVERY_START_NODE_STRING)) { String[] idList = paramMap.get(CMD_PARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA); for (String nodeId : idList) { TaskInstance task = getRecoveryTaskInstance(nodeId); if (task != null) { instanceList.add(task); } } } return instanceList; } /** * parse "StartNodeNameList" from cmd param * * @param cmdParam command param * @return start node name list */ private List<String> parseStartNodeName(String cmdParam) { List<String> startNodeNameList = new ArrayList<>(); Map<String, String> paramMap = JSONUtils.toMap(cmdParam); if (paramMap == null) { return startNodeNameList; } if (paramMap.containsKey(CMD_PARAM_START_NODE_NAMES)) { startNodeNameList = Arrays.asList(paramMap.get(CMD_PARAM_START_NODE_NAMES).split(Constants.COMMA)); } return startNodeNameList; } /** * generate start node name list from parsing command param; * if "StartNodeIdList" exists in command param, return StartNodeIdList * * @return recovery node name list */ private List<String> getRecoveryNodeNameList() { List<String> recoveryNodeNameList = new ArrayList<>(); if (CollectionUtils.isNotEmpty(recoverNodeIdList)) { for (TaskInstance task : recoverNodeIdList) { recoveryNodeNameList.add(task.getName()); } } return recoveryNodeNameList; } /** * generate flow dag * * @param processDefinitionJson process definition json * @param startNodeNameList start node name list * @param recoveryNodeNameList recovery node name list * @param depNodeType depend node type * @return ProcessDag process dag * @throws Exception exception */ public ProcessDag generateFlowDag(String processDefinitionJson, List<String> startNodeNameList, List<String> recoveryNodeNameList, TaskDependType depNodeType) throws Exception { return DagHelper.generateFlowDag(processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master.runner; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.zk.ZKMasterClient; import org.apache.dolphinscheduler.server.utils.AlertManager; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.curator.framework.imps.CuratorFrameworkState; import org.apache.curator.framework.recipes.locks.InterProcessMutex; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * master scheduler thread */ @Service public class MasterSchedulerService extends Thread { /** * logger of MasterSchedulerService */ private static final Logger logger = LoggerFactory.getLogger(MasterSchedulerService.class); /** * dolphinscheduler database interface */ @Autowired private ProcessService processService; /** * zookeeper master client */ @Autowired private ZKMasterClient zkMasterClient; /** * master config */ @Autowired private MasterConfig masterConfig; /** * alert manager */ private AlertManager alertManager = new AlertManager(); /** * netty remoting client */ private NettyRemotingClient nettyRemotingClient; /** * master exec service */ private ThreadPoolExecutor masterExecService; /** * constructor of MasterSchedulerService */ @PostConstruct public void init() { this.masterExecService = (ThreadPoolExecutor)ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread", masterConfig.getMasterExecThreads()); NettyClientConfig clientConfig = new NettyClientConfig(); this.nettyRemotingClient = new NettyRemotingClient(clientConfig); } @Override public synchronized void start() { super.setName("MasterSchedulerService"); super.start(); } public void close() { masterExecService.shutdown(); boolean terminated = false; try { terminated = masterExecService.awaitTermination(5, TimeUnit.SECONDS); } catch (InterruptedException ignore) { Thread.currentThread().interrupt(); } if (!terminated) { logger.warn("masterExecService shutdown without terminated, increase await time"); } nettyRemotingClient.close(); logger.info("master schedule service stopped..."); } /** * run of MasterSchedulerService */ @Override public void run() { logger.info("master scheduler started"); while (Stopper.isRunning()) { try { boolean runCheckFlag = OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory()); if (!runCheckFlag) { Thread.sleep(Constants.SLEEP_TIME_MILLIS); continue; } if (zkMasterClient.getZkClient().getState() == CuratorFrameworkState.STARTED) { scheduleProcess(); } } catch (Exception e) { logger.error("master scheduler thread error", e); } } } private void scheduleProcess() throws Exception { InterProcessMutex mutex = null; try { mutex = zkMasterClient.blockAcquireMutex(); int activeCount = masterExecService.getActiveCount(); // make sure to scan and delete command table in one transaction Command command = processService.findOneCommand(); if (command != null) { logger.info("find one command: id: {}, type: {}", command.getId(),command.getCommandType()); try { ProcessInstance processInstance = processService.handleCommand(logger, getLocalAddress(), this.masterConfig.getMasterExecThreads() - activeCount, command); if (processInstance != null) { logger.info("start master exec thread , split DAG ..."); masterExecService.execute( new MasterExecThread( processInstance , processService , nettyRemotingClient , alertManager , masterConfig)); } } catch (Exception e) { logger.error("scan command error ", e); processService.moveToErrorCommand(command, e.toString()); } } else { //indicate that no command ,sleep for 1s Thread.sleep(Constants.SLEEP_TIME_MILLIS); } } finally { zkMasterClient.releaseMutex(mutex); } } private String getLocalAddress() { return NetUtils.getAddr(masterConfig.getListenPort()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.DependentRelation; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.DependentItem; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DependentUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; /** * dependent item execute */ public class DependentExecute { /** * process service */ private final ProcessService processService = SpringApplicationContext.getBean(ProcessService.class); /** * depend item list */ private List<DependentItem> dependItemList; /** * dependent relation */ private DependentRelation relation; /** * depend result */ private DependResult modelDependResult = DependResult.WAITING; /** * depend result map */ private Map<String, DependResult> dependResultMap = new HashMap<>(); /** * logger */ private Logger logger = LoggerFactory.getLogger(DependentExecute.class); /** * constructor * @param itemList item list * @param relation relation */ public DependentExecute(List<DependentItem> itemList, DependentRelation relation){ this.dependItemList = itemList; this.relation = relation; } /** * get dependent item for one dependent item * @param dependentItem dependent item * @param currentTime current time * @return DependResult */ private DependResult getDependentResultForItem(DependentItem dependentItem, Date currentTime){ List<DateInterval> dateIntervals = DependentUtils.getDateIntervalList(currentTime, dependentItem.getDateValue()); return calculateResultForTasks(dependentItem, dateIntervals ); } /** * calculate dependent result for one dependent item. * @param dependentItem dependent item * @param dateIntervals date intervals * @return dateIntervals */ private DependResult calculateResultForTasks(DependentItem dependentItem, List<DateInterval> dateIntervals) { DependResult result = DependResult.FAILED; for(DateInterval dateInterval : dateIntervals){ ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(), dateInterval); if(processInstance == null){ return DependResult.WAITING; } // need to check workflow for updates, so get all task and check the task state if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){ result = dependResultByProcessInstance(processInstance); }else{ result = getDependTaskResult(dependentItem.getDepTasks(),processInstance); } if(result != DependResult.SUCCESS){ break; } } return result; } /** * depend type = depend_all * @return */ private DependResult dependResultByProcessInstance(ProcessInstance processInstance){ if(!processInstance.getState().typeIsFinished()){ return DependResult.WAITING; } if(processInstance.getState().typeIsSuccess()){ return DependResult.SUCCESS; } return DependResult.FAILED; } /** * get depend task result * @param taskName * @param processInstance * @return */ private DependResult getDependTaskResult(String taskName, ProcessInstance processInstance) { DependResult result; TaskInstance taskInstance = null; List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); for(TaskInstance task : taskInstanceList){ if(task.getName().equals(taskName)){ taskInstance = task; break; } } if(taskInstance == null){ // cannot find task in the process instance // maybe because process instance is running or failed. if(processInstance.getState().typeIsFinished()){ result = DependResult.FAILED; }else{ return DependResult.WAITING; } }else{ result = getDependResultByState(taskInstance.getState()); } return result; } /** * find the last one process instance that : * 1. manual run and finish between the interval * 2. schedule run and schedule time between the interval * @param definitionId definition id * @param dateInterval date interval * @return ProcessInstance */ private ProcessInstance findLastProcessInterval(int definitionId, DateInterval dateInterval) { ProcessInstance runningProcess = processService.findLastRunningProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); if(runningProcess != null){ return runningProcess; } ProcessInstance lastSchedulerProcess = processService.findLastSchedulerProcessInterval( definitionId, dateInterval ); ProcessInstance lastManualProcess = processService.findLastManualProcessInterval( definitionId, dateInterval ); if(lastManualProcess ==null){ return lastSchedulerProcess; } if(lastSchedulerProcess == null){ return lastManualProcess; } return (lastManualProcess.getEndTime().after(lastSchedulerProcess.getEndTime()))? lastManualProcess : lastSchedulerProcess; } /** * get dependent result by task/process instance state * @param state state * @return DependResult */ private DependResult getDependResultByState(ExecutionStatus state) { if(!state.typeIsFinished()){ return DependResult.WAITING; }else if(state.typeIsSuccess()){ return DependResult.SUCCESS; }else{ return DependResult.FAILED; } } /** * get dependent result by task instance state when task instance is null * @param state state * @return DependResult */ private DependResult getDependResultByProcessStateWhenTaskNull(ExecutionStatus state) { if(state.typeIsRunning() || state == ExecutionStatus.SUBMITTED_SUCCESS || state == ExecutionStatus.WAITTING_THREAD){ return DependResult.WAITING; }else{ return DependResult.FAILED; } } /** * judge depend item finished * @param currentTime current time * @return boolean */ public boolean finish(Date currentTime){ if(modelDependResult == DependResult.WAITING){ modelDependResult = getModelDependResult(currentTime); return false; } return true; } /** * get model depend result * @param currentTime current time * @return DependResult */ public DependResult getModelDependResult(Date currentTime){ List<DependResult> dependResultList = new ArrayList<>(); for(DependentItem dependentItem : dependItemList){ DependResult dependResult = getDependResultForItem(dependentItem, currentTime); if(dependResult != DependResult.WAITING){ dependResultMap.put(dependentItem.getKey(), dependResult); } dependResultList.add(dependResult); } modelDependResult = DependentUtils.getDependResultForRelation( this.relation, dependResultList ); return modelDependResult; } /** * get dependent item result * @param item item * @param currentTime current time * @return DependResult */ private DependResult getDependResultForItem(DependentItem item, Date currentTime){ String key = item.getKey(); if(dependResultMap.containsKey(key)){ return dependResultMap.get(key); } return getDependentResultForItem(item, currentTime); } public Map<String, DependResult> getDependResultMap(){ return dependResultMap; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Map; /** * param utils */ public class ParamUtils { /** * parameter conversion * @param globalParams global params * @param globalParamsMap global params map * @param localParams local params * @param commandType command type * @param scheduleTime schedule time * @return global params */ public static Map<String,Property> convert(Map<String,Property> globalParams, Map<String,String> globalParamsMap, Map<String,Property> localParams, CommandType commandType, Date scheduleTime){ if (globalParams == null && localParams == null){ return null; } // if it is a complement, // you need to pass in the task instance id to locate the time // of the process instance complement Map<String,String> timeParams = BusinessTimeUtils .getBusinessTime(commandType, scheduleTime); if (globalParamsMap != null){ timeParams.putAll(globalParamsMap); } if (globalParams != null && localParams != null){ globalParams.putAll(localParams); }else if (globalParams == null && localParams != null){ globalParams = localParams; } Iterator<Map.Entry<String, Property>> iter = globalParams.entrySet().iterator(); while (iter.hasNext()){ Map.Entry<String, Property> en = iter.next(); Property property = en.getValue(); if (StringUtils.isNotEmpty(property.getValue()) && property.getValue().startsWith("$")){ /** * local parameter refers to global parameter with the same name * note: the global parameters of the process instance here are solidified parameters, * and there are no variables in them. */ String val = property.getValue(); val = ParameterUtils.convertParameterPlaceholders(val, timeParams); property.setValue(val); } } return globalParams; } /** * format convert * @param paramsMap params map * @return Map of converted */ public static Map<String,String> convert(Map<String,Property> paramsMap){ if(paramsMap == null){ return null; } Map<String,String> map = new HashMap<>(); Iterator<Map.Entry<String, Property>> iter = paramsMap.entrySet().iterator(); while (iter.hasNext()){ Map.Entry<String, Property> en = iter.next(); map.put(en.getKey(),en.getValue().getValue()); } return map; } /** * get parameters map * @param definedParams definedParams * @return parameters map */ public static Map<String,Property> getUserDefParamsMap(Map<String,String> definedParams) { if (definedParams != null) { Map<String,Property> userDefParamsMaps = new HashMap<>(); Iterator<Map.Entry<String, String>> iter = definedParams.entrySet().iterator(); while (iter.hasNext()){ Map.Entry<String, String> en = iter.next(); Property property = new Property(en.getKey(), Direct.IN, DataType.VARCHAR , en.getValue()); userDefParamsMaps.put(property.getProp(),property); } return userDefParamsMaps; } return null; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/AlertManagerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.master; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.server.utils.AlertManager; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; import java.util.List; /** * alert manager test */ @Ignore public class AlertManagerTest { private static final Logger logger = LoggerFactory.getLogger(AlertManagerTest.class); @Autowired ProcessDefinitionMapper processDefinitionMapper; @Autowired ProcessInstanceMapper processInstanceMapper; @Autowired TaskInstanceMapper taskInstanceMapper; @Autowired ProjectMapper projectMapper; AlertManager alertManager; /** * send worker alert fault tolerance */ @Test public void sendWarnningWorkerleranceFaultTest(){ // process instance ProcessInstance processInstance = processInstanceMapper.queryDetailById(13028); // set process definition ProcessDefinition processDefinition = processDefinitionMapper.selectById(47); processInstance.setProcessDefinition(processDefinition); // fault task instance TaskInstance toleranceTask1 = taskInstanceMapper.selectById(5038); TaskInstance toleranceTask2 = taskInstanceMapper.selectById(5039); List<TaskInstance> toleranceTaskList = new ArrayList<>(2); toleranceTaskList.add(toleranceTask1); toleranceTaskList.add(toleranceTask2); alertManager.sendAlertWorkerToleranceFault(processInstance, toleranceTaskList); } /** * send worker alert fault tolerance */ @Test public void sendWarnningOfProcessInstanceTest(){ // process instance ProcessInstance processInstance = processInstanceMapper.queryDetailById(13028); // set process definition ProcessDefinition processDefinition = processDefinitionMapper.selectById(47); processInstance.setProcessDefinition(processDefinition); // fault task instance TaskInstance toleranceTask1 = taskInstanceMapper.selectById(5038); toleranceTask1.setState(ExecutionStatus.FAILURE); TaskInstance toleranceTask2 = taskInstanceMapper.selectById(5039); toleranceTask2.setState(ExecutionStatus.FAILURE); List<TaskInstance> toleranceTaskList = new ArrayList<>(2); toleranceTaskList.add(toleranceTask1); toleranceTaskList.add(toleranceTask2); ProjectUser projectUser = projectMapper.queryProjectWithUserByProcessInstanceId(processInstance.getId()); alertManager.sendAlertProcessInstance(processInstance, toleranceTaskList, projectUser); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManagerTest.java
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,058
[Improvement][Master] The master-server creates the DB data source twice
**Describe the question** The master-server creates the DB data source twice 1. Load the data source through Spring 2.AlertManager builds the data source with the ConnectionFactory factory class **Which version of DolphinScheduler:** -[dev] ![image](https://user-images.githubusercontent.com/37063904/112297565-03847d00-8cd1-11eb-878b-75e12060172a.png)
https://github.com/apache/dolphinscheduler/issues/5058
https://github.com/apache/dolphinscheduler/pull/5139
0d8011f9c1daa2b79dd45b0c80731d6b31ab0f07
9c77faa8ace7242ab9d37b9c6e65a699f1e2f829
"2021-03-15T12:24:11Z"
java
"2021-05-10T01:56:17Z"
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler</artifactId> <version>${revision}</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <url>http://dolphinscheduler.apache.org</url> <description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. </description> <licenses> <license> <name>Apache License 2.0</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> <distribution>repo</distribution> </license> </licenses> <scm> <connection>scm:git:https://github.com/apache/dolphinscheduler.git</connection> <developerConnection>scm:git:https://github.com/apache/dolphinscheduler.git</developerConnection> <url>https://github.com/apache/dolphinscheduler</url> <tag>HEAD</tag> </scm> <mailingLists> <mailingList> <name>DolphinScheduler Developer List</name> <post>dev@dolphinscheduler.apache.org</post> <subscribe>dev-subscribe@dolphinscheduler.apache.org</subscribe> <unsubscribe>dev-unsubscribe@dolphinscheduler.apache.org</unsubscribe> </mailingList> </mailingLists> <parent> <groupId>org.apache</groupId> <artifactId>apache</artifactId> <version>21</version> </parent> <properties> <revision>1.3.6-SNAPSHOT</revision> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <curator.version>4.3.0</curator.version> <spring.version>5.1.19.RELEASE</spring.version> <spring.boot.version>2.1.18.RELEASE</spring.boot.version> <java.version>1.8</java.version> <logback.version>1.2.3</logback.version> <hadoop.version>2.7.3</hadoop.version> <quartz.version>2.3.0</quartz.version> <jackson.version>2.10.5</jackson.version> <mybatis-plus.version>3.2.0</mybatis-plus.version> <mybatis.spring.version>2.0.1</mybatis.spring.version> <cron.utils.version>5.0.5</cron.utils.version> <druid.version>1.1.22</druid.version> <h2.version>1.4.200</h2.version> <commons.codec.version>1.11</commons.codec.version> <commons.logging.version>1.1.1</commons.logging.version> <httpclient.version>4.4.1</httpclient.version> <httpcore.version>4.4.1</httpcore.version> <junit.version>4.12</junit.version> <mysql.connector.version>5.1.34</mysql.connector.version> <slf4j.api.version>1.7.5</slf4j.api.version> <slf4j.log4j12.version>1.7.5</slf4j.log4j12.version> <commons.collections.version>3.2.2</commons.collections.version> <commons.httpclient>3.0.1</commons.httpclient> <commons.beanutils.version>1.9.4</commons.beanutils.version> <commons.configuration.version>1.10</commons.configuration.version> <commons.email.version>1.5</commons.email.version> <poi.version>3.17</poi.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version> <commons.collections4.version>4.1</commons.collections4.version> <guava.version>24.1-jre</guava.version> <postgresql.version>42.2.5</postgresql.version> <hive.jdbc.version>2.1.0</hive.jdbc.version> <commons.io.version>2.4</commons.io.version> <oshi.core.version>3.9.1</oshi.core.version> <clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version> <mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version> <presto.jdbc.version>0.238.1</presto.jdbc.version> <spotbugs.version>3.1.12</spotbugs.version> <checkstyle.version>3.0.0</checkstyle.version> <zookeeper.version>3.4.14</zookeeper.version> <frontend-maven-plugin.version>1.6</frontend-maven-plugin.version> <maven-compiler-plugin.version>3.3</maven-compiler-plugin.version> <maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version> <maven-release-plugin.version>2.5.3</maven-release-plugin.version> <maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version> <maven-source-plugin.version>2.4</maven-source-plugin.version> <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version> <maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version> <rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version> <jacoco.version>0.8.4</jacoco.version> <jcip.version>1.0</jcip.version> <maven.deploy.skip>false</maven.deploy.skip> <cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version> <mockito.version>2.21.0</mockito.version> <powermock.version>2.0.2</powermock.version> <servlet-api.version>2.5</servlet-api.version> <swagger.version>1.9.3</swagger.version> <springfox.version>2.9.2</springfox.version> <swagger-models.version>1.5.24</swagger-models.version> <guava-retry.version>2.0.0</guava-retry.version> <dep.airlift.version>0.184</dep.airlift.version> <dep.packaging.version>${dep.airlift.version}</dep.packaging.version> <protostuff.version>1.7.2</protostuff.version> <reflections.version>0.9.12</reflections.version> <byte-buddy.version>1.9.16</byte-buddy.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus-boot-starter</artifactId> <version>${mybatis-plus.version}</version> </dependency> <dependency> <groupId>com.baomidou</groupId> <artifactId>mybatis-plus</artifactId> <version>${mybatis-plus.version}</version> </dependency> <!-- quartz--> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>org.quartz-scheduler</groupId> <artifactId>quartz-jobs</artifactId> <version>${quartz.version}</version> </dependency> <dependency> <groupId>com.cronutils</groupId> <artifactId>cron-utils</artifactId> <version>${cron.utils.version}</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>${druid.version}</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>${spring.boot.version}</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-jdbc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-test</artifactId> <version>${spring.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-server</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-common</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert-plugin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-dao</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-api</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-remote</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-service</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-alert</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-spi</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-framework</artifactId> <version>${curator.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> <exclusions> <exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <exclusions> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> </exclusion> </exclusions> <version>${zookeeper.version}</version> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>${commons.logging.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> </dependency> <!--protostuff--> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-core --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-core</artifactId> <version>${protostuff.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/io.protostuff/protostuff-runtime --> <dependency> <groupId>io.protostuff</groupId> <artifactId>protostuff-runtime</artifactId> <version>${protostuff.version}</version> </dependency> <dependency> <groupId>net.bytebuddy</groupId> <artifactId>byte-buddy</artifactId> <version>${byte-buddy.version}</version> </dependency> <dependency> <groupId>org.reflections</groupId> <artifactId>reflections</artifactId> <version>${reflections.version}</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito2</artifactId> <version>${powermock.version}</version> <type>jar</type> <scope>test</scope> <exclusions> <exclusion> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.connector.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>com.h2database</groupId> <artifactId>h2</artifactId> <version>${h2.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.api.version}</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>${slf4j.log4j12.version}</version> </dependency> <dependency> <groupId>commons-collections</groupId> <artifactId>commons-collections</artifactId> <version>${commons.collections.version}</version> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>${commons.httpclient}</version> </dependency> <dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>${commons.beanutils.version}</version> </dependency> <dependency> <groupId>commons-configuration</groupId> <artifactId>commons-configuration</artifactId> <version>${commons.configuration.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-email</artifactId> <version>${commons.email.version}</version> </dependency> <!--excel poi--> <dependency> <groupId>org.apache.poi</groupId> <artifactId>poi</artifactId> <version>${poi.version}</version> </dependency> <!-- hadoop --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>com.sun.jersey</artifactId> <groupId>jersey-json</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aws</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-collections4</artifactId> <version>${commons.collections4.version}</version> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>org.postgresql</groupId> <artifactId>postgresql</artifactId> <version>${postgresql.version}</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${hive.jdbc.version}</version> </dependency> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>${commons.io.version}</version> </dependency> <dependency> <groupId>com.github.oshi</groupId> <artifactId>oshi-core</artifactId> <version>${oshi.core.version}</version> </dependency> <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>${clickhouse.jdbc.version}</version> </dependency> <dependency> <groupId>com.microsoft.sqlserver</groupId> <artifactId>mssql-jdbc</artifactId> <version>${mssql.jdbc.version}</version> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>${presto.jdbc.version}</version> </dependency> <dependency> <groupId>net.jcip</groupId> <artifactId>jcip-annotations</artifactId> <version>${jcip.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> <version>${servlet-api.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>${javax.servlet.api.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger2</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> <version>${springfox.version}</version> </dependency> <dependency> <groupId>io.swagger</groupId> <artifactId>swagger-models</artifactId> <version>${swagger-models.version}</version> </dependency> <dependency> <groupId>com.github.xiaoymin</groupId> <artifactId>swagger-bootstrap-ui</artifactId> <version>${swagger.version}</version> </dependency> <dependency> <groupId>com.github.rholder</groupId> <artifactId>guava-retrying</artifactId> <version>${guava-retry.version}</version> </dependency> <dependency> <groupId>org.sonatype.aether</groupId> <artifactId>aether-api</artifactId> <version>1.13.1</version> </dependency> <dependency> <groupId>io.airlift.resolver</groupId> <artifactId>resolver</artifactId> <version>1.5</version> </dependency> <dependency> <groupId>org.ow2.asm</groupId> <artifactId>asm</artifactId> <version>6.2.1</version> </dependency> <dependency> <groupId>javax.activation</groupId> <artifactId>activation</artifactId> <version>1.1</version> </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> <version>1.6.2</version> </dependency> </dependencies> </dependencyManagement> <build> <finalName>apache-dolphinscheduler-${project.version}</finalName> <pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <version>1.0.0</version> <extensions>true</extensions> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <version>1.0.4</version> <extensions>true</extensions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>rpm-maven-plugin</artifactId> <version>${rpm-maven-plugion.version}</version> <inherited>false</inherited> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>${java.version}</source> <target>${java.version}</target> <testSource>${java.version}</testSource> <testTarget>${java.version}</testTarget> </configuration> <version>${maven-compiler-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <tagNameFormat>@{project.version}</tagNameFormat> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>${maven-assembly-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <source>8</source> <failOnError>false</failOnError> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${maven-source-plugin.version}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>${maven-dependency-plugin.version}</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.apache.dolphinscheduler</groupId> <artifactId>dolphinscheduler-maven-plugin</artifactId> <extensions>true</extensions> <!--<configuration>--> <!--<allowedProvidedDependencies>--> <!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>--> <!--</allowedProvidedDependencies>--> <!--</configuration>--> </plugin> <plugin> <groupId>ca.vanzyl.maven.plugins</groupId> <artifactId>provisio-maven-plugin</artifactId> <extensions>true</extensions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <executions> <execution> <id>attach-javadocs</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <aggregate>true</aggregate> <charset>${project.build.sourceEncoding}</charset> <encoding>${project.build.sourceEncoding}</encoding> <docencoding>${project.build.sourceEncoding}</docencoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-release-plugin</artifactId> <version>${maven-release-plugin.version}</version> <configuration> <autoVersionSubmodules>true</autoVersionSubmodules> <tagNameFormat>@{project.version}</tagNameFormat> <tagBase>${project.version}</tagBase> <!--<goals>-f pom.xml deploy</goals>--> </configuration> <dependencies> <dependency> <groupId>org.apache.maven.scm</groupId> <artifactId>maven-scm-provider-jgit</artifactId> <version>1.9.5</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${maven-compiler-plugin.version}</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> <encoding>${project.build.sourceEncoding}</encoding> <skip>false</skip><!--not skip compile test classes--> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <includes> <include>**/api/controller/ProjectControllerTest.java</include> <include>**/api/controller/QueueControllerTest.java</include> <include>**/api/configuration/TrafficConfigurationTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TenantControllerTest.java</include> <include>**/api/dto/resources/filter/ResourceFilterTest.java</include> <include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include> <includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest> <include>**/api/enums/StatusTest.java</include> <include>**/api/exceptions/ApiExceptionHandlerTest.java</include> <include>**/api/exceptions/ServiceExceptionTest.java</include> <include>**/api/interceptor/LocaleChangeInterceptorTest.java</include> <include>**/api/interceptor/LoginHandlerInterceptorTest.java</include> <include>**/api/interceptor/RateLimitInterceptorTest.java</include> <include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include> <include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include> <include>**/api/security/SecurityConfigLDAPTest.java</include> <include>**/api/security/SecurityConfigPasswordTest.java</include> <include>**/api/service/AccessTokenServiceTest.java</include> <include>**/api/service/AlertGroupServiceTest.java</include> <include>**/api/service/BaseDAGServiceTest.java</include> <include>**/api/service/BaseServiceTest.java</include> <include>**/api/service/DataAnalysisServiceTest.java</include> <include>**/api/service/AlertPluginInstanceServiceTest.java</include> <include>**/api/service/DataSourceServiceTest.java</include> <include>**/api/service/ExecutorService2Test.java</include> <include>**/api/service/ExecutorServiceTest.java</include> <include>**/api/service/LoggerServiceTest.java</include> <include>**/api/service/MonitorServiceTest.java</include> <include>**/api/service/ProcessDefinitionServiceTest.java</include> <include>**/api/service/ProcessDefinitionVersionServiceTest.java</include> <include>**/api/service/ProcessInstanceServiceTest.java</include> <include>**/api/service/ProjectServiceTest.java</include> <include>**/api/service/QueueServiceTest.java</include> <include>**/api/service/ResourcesServiceTest.java</include> <include>**/api/service/SchedulerServiceTest.java</include> <include>**/api/service/SessionServiceTest.java</include> <include>**/api/service/TaskInstanceServiceTest.java</include> <include>**/api/service/TenantServiceTest.java</include> <include>**/api/service/UdfFuncServiceTest.java</include> <include>**/api/service/UiPluginServiceTest.java</include> <include>**/api/service/UserAlertGroupServiceTest.java</include> <include>**/api/service/UsersServiceTest.java</include> <include>**/api/service/WorkerGroupServiceTest.java</include> <include>**/api/service/WorkFlowLineageServiceTest.java</include> <include>**/api/controller/ProcessDefinitionControllerTest.java</include> <include>**/api/controller/TaskInstanceControllerTest.java</include> <include>**/api/controller/WorkFlowLineageControllerTest.java</include> <include>**/api/utils/exportprocess/DataSourceParamTest.java</include> <include>**/api/utils/exportprocess/DependentParamTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/FileUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/CheckUtilsTest.java</include> <include>**/api/utils/ResultTest.java</include> <include>**/common/graph/DAGTest.java</include> <include>**/common/os/OshiTest.java</include> <include>**/common/os/OSUtilsTest.java</include> <include>**/common/shell/ShellExecutorTest.java</include> <include>**/common/task/DataxParametersTest.java</include> <include>**/common/task/EntityTestUtils.java</include> <include>**/common/task/FlinkParametersTest.java</include> <include>**/common/task/HttpParametersTest.java</include> <include>**/common/task/SqlParametersTest.java</include> <include>**/common/task/SqoopParameterEntityTest.java</include> <include>**/common/threadutils/ThreadPoolExecutorsTest.java</include> <include>**/common/threadutils/ThreadUtilsTest.java</include> <include>**/common/utils/CollectionUtilsTest.java</include> <include>**/common/utils/CommonUtilsTest.java</include> <include>**/common/utils/DateUtilsTest.java</include> <include>**/common/utils/DependentUtilsTest.java</include> <include>**/common/utils/EncryptionUtilsTest.java</include> <include>**/common/utils/FileUtilsTest.java</include> <include>**/common/utils/JSONUtilsTest.java</include> <include>**/common/utils/LoggerUtilsTest.java</include> <include>**/common/utils/NetUtilsTest.java</include> <include>**/common/utils/OSUtilsTest.java</include> <include>**/common/utils/ParameterUtilsTest.java</include> <include>**/common/utils/TimePlaceholderUtilsTest.java</include> <include>**/common/utils/PreconditionsTest.java</include> <include>**/common/utils/PropertyUtilsTest.java</include> <include>**/common/utils/SchemaUtilsTest.java</include> <include>**/common/utils/ScriptRunnerTest.java</include> <include>**/common/utils/SensitiveLogUtilsTest.java</include> <include>**/common/utils/StringTest.java</include> <include>**/common/utils/StringUtilsTest.java</include> <include>**/common/utils/TaskParametersUtilsTest.java</include> <include>**/common/utils/VarPoolUtilsTest.java</include> <include>**/common/utils/HadoopUtilsTest.java</include> <include>**/common/utils/HttpUtilsTest.java</include> <include>**/common/utils/KerberosHttpClientTest.java</include> <include>**/common/utils/HiveConfUtilsTest.java</include> <include>**/common/ConstantsTest.java</include> <include>**/common/utils/HadoopUtils.java</include> <include>**/common/utils/RetryerUtilsTest.java</include> <include>**/common/plugin/DolphinSchedulerPluginLoaderTest.java</include> <include>**/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java</include> <include>**/common/datasource/db2/Db2DatasourceProcessorTest.java</include> <include>**/common/datasource/hive/HiveDatasourceProcessorTest.java</include> <include>**/common/datasource/mysql/MysqlDatasourceProcessorTest.java</include> <include>**/common/datasource/oracle/OracleDatasourceProcessorTest.java</include> <include>**/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java</include> <include>**/common/datasource/presto/PrestoDatasourceProcessorTest.java</include> <include>**/common/datasource/spark/SparkDatasourceProcessorTest.java</include> <include>**/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java</include> <include>**/common/datasource/DatasourceUtilTest.java</include> <include>**/common/enums/ExecutionStatusTest</include> <include>**/dao/mapper/AccessTokenMapperTest.java</include> <include>**/dao/mapper/AlertGroupMapperTest.java</include> <include>**/dao/mapper/CommandMapperTest.java</include> <include>**/dao/mapper/ConnectionFactoryTest.java</include> <include>**/dao/mapper/DataSourceMapperTest.java</include> <include>**/dao/datasource/MySQLDataSourceTest.java</include> <include>**/dao/entity/TaskInstanceTest.java</include> <include>**/dao/entity/UdfFuncTest.java</include> <include>**/remote/command/alert/AlertSendRequestCommandTest.java</include> <include>**/remote/command/alert/AlertSendResponseCommandTest.java</include> <include>**/remote/command/future/ResponseFutureTest.java</include> <include>**/remote/command/log/RemoveTaskLogRequestCommandTest.java</include> <include>**/remote/command/log/RemoveTaskLogResponseCommandTest.java</include> <include>**/remote/command/log/GetLogBytesRequestCommandTest.java</include> <include>**/remote/command/log/GetLogBytesResponseCommandTest.java</include> <include>**/remote/command/log/ViewLogRequestCommandTest.java</include> <include>**/remote/utils/HostTest.java</include> <include>**/remote/utils/NettyUtilTest.java</include> <include>**/remote/NettyRemotingClientTest.java</include> <include>**/rpc/RpcTest.java</include> <include>**/server/log/LoggerServerTest.java</include> <include>**/server/entity/SQLTaskExecutionContextTest.java</include> <include>**/server/log/MasterLogFilterTest.java</include> <include>**/server/log/SensitiveDataConverterTest.java</include> <include>**/server/log/LoggerRequestProcessorTest.java</include> <!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>--> <include>**/server/log/TaskLogFilterTest.java</include> <include>**/server/log/WorkerLogFilterTest.java</include> <include>**/server/master/config/MasterConfigTest.java</include> <include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include> <include>**/server/master/runner/MasterTaskExecThreadTest.java</include> <!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>--> <include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include> <include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include> <include>**/server/master/dispatch/host/assign/HostWorkerTest.java</include> <include>**/server/master/register/MasterRegistryTest.java</include> <include>**/server/master/registry/ServerNodeManagerTest.java</include> <include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include> <include>**/server/master/AlertManagerTest.java</include> <include>**/server/master/MasterCommandTest.java</include> <include>**/server/master/DependentTaskTest.java</include> <include>**/server/master/ConditionsTaskTest.java</include> <include>**/server/master/MasterExecThreadTest.java</include> <include>**/server/master/ParamsTest.java</include> <include>**/server/master/SubProcessTaskTest.java</include> <include>**/server/master/processor/TaskAckProcessorTest.java</include> <include>**/server/master/processor/TaskKillResponseProcessorTest.java</include> <include>**/server/master/processor/queue/TaskResponseServiceTest.java</include> <include>**/server/master/zk/ZKMasterClientTest.java</include> <include>**/server/register/ZookeeperRegistryCenterTest.java</include> <include>**/server/utils/DataxUtilsTest.java</include> <include>**/server/utils/ExecutionContextTestUtils.java</include> <include>**/server/utils/FlinkArgsUtilsTest.java</include> <include>**/server/utils/LogUtilsTest.java</include> <include>**/server/utils/MapReduceArgsUtilsTest.java</include> <include>**/server/utils/ParamUtilsTest.java</include> <include>**/server/utils/ProcessUtilsTest.java</include> <include>**/server/utils/SparkArgsUtilsTest.java</include> <include>**/server/worker/processor/TaskCallbackServiceTest.java</include> <include>**/server/worker/processor/TaskExecuteProcessorTest.java</include> <include>**/server/worker/registry/WorkerRegistryTest.java</include> <include>**/server/worker/shell/ShellCommandExecutorTest.java</include> <include>**/server/worker/sql/SqlExecutorTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/EnvFileTest.java</include> <include>**/server/worker/task/spark/SparkTaskTest.java</include> <include>**/server/worker/task/datax/DataxTaskTest.java</include> <!--<include>**/server/worker/task/http/HttpTaskTest.java</include>--> <include>**/server/worker/task/sqoop/SqoopTaskTest.java</include> <include>**/server/worker/task/processdure/ProcedureTaskTest.java</include> <include>**/server/worker/task/shell/ShellTaskTest.java</include> <include>**/server/worker/task/TaskManagerTest.java</include> <include>**/server/worker/task/AbstractCommandExecutorTest.java</include> <include>**/server/worker/task/ShellTaskReturnTest.java</include> <include>**/server/worker/task/sql/SqlTaskTest.java</include> <include>**/server/worker/EnvFileTest.java</include> <include>**/server/worker/runner/TaskExecuteThreadTest.java</include> <include>**/server/worker/runner/WorkerManagerThreadTest.java</include> <include>**/service/quartz/cron/CronUtilsTest.java</include> <include>**/service/process/ProcessServiceTest.java</include> <include>**/service/zk/DefaultEnsembleProviderTest.java</include> <include>**/service/zk/ZKServerTest.java</include> <include>**/service/zk/CuratorZookeeperClientTest.java</include> <include>**/service/zk/RegisterOperatorTest.java</include> <include>**/service/queue/TaskUpdateQueueTest.java</include> <include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include> <include>**/service/log/LogClientServiceTest.java</include> <include>**/service/alert/AlertClientServiceTest.java</include> <include>**/dao/mapper/DataSourceUserMapperTest.java</include> <!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>--> <include>**/dao/mapper/ProcessDefinitionMapperTest.java</include> <include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include> <include>**/dao/mapper/ProcessInstanceMapperTest.java</include> <include>**/dao/mapper/ProjectMapperTest.java</include> <include>**/dao/mapper/ProjectUserMapperTest.java</include> <include>**/dao/mapper/QueueMapperTest.java</include> <include>**/dao/mapper/ResourceUserMapperTest.java</include> <include>**/dao/mapper/ScheduleMapperTest.java</include> <include>**/dao/mapper/SessionMapperTest.java</include> <include>**/dao/mapper/TaskInstanceMapperTest.java</include> <include>**/dao/mapper/TenantMapperTest.java</include> <include>**/dao/mapper/UdfFuncMapperTest.java</include> <include>**/dao/mapper/UDFUserMapperTest.java</include> <include>**/dao/mapper/UserMapperTest.java</include> <include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include> <include>**/dao/mapper/PluginDefineTest.java</include> <include>**/dao/utils/DagHelperTest.java</include> <include>**/dao/AlertDaoTest.java</include> <include>**/dao/datasource/OracleDataSourceTest.java</include> <include>**/dao/datasource/HiveDataSourceTest.java</include> <include>**/dao/datasource/BaseDataSourceTest.java</include> <include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include> <include>**/dao/upgrade/WokrerGrouopDaoTest.java</include> <include>**/dao/upgrade/UpgradeDaoTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include> <include>**/plugin/alert/email/EmailAlertChannelTest.java</include> <include>**/plugin/alert/email/ExcelUtilsTest.java</include> <include>**/plugin/alert/email/MailUtilsTest.java</include> <include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include> <include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include> <include>**/plugin/alert/wechat/WeChatSenderTest.java</include> <include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ProcessUtilsTest.java</include> <include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include> <include>**/plugin/alert/script/ScriptSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include> <include>**/plugin/alert/http/HttpAlertChannelTest.java</include> <include>**/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java</include> <include>**/plugin/alert/feishu/FeiShuSenderTest.java</include> <include>**/plugin/alert/http/HttpAlertPluginTest.java</include> <include>**/plugin/alert/http/HttpSenderTest.java</include> <include>**/plugin/alert/slack/SlackAlertChannelFactoryTest.java</include> <include>**/plugin/alert/slack/SlackAlertPluginTest.java</include> <include>**/plugin/alert/slack/SlackSenderTest.java</include> <include>**/spi/params/PluginParamsTransferTest.java</include> <include>**/alert/plugin/EmailAlertPluginTest.java</include> <include>**/alert/plugin/AlertPluginManagerTest.java</include> <include>**/alert/plugin/DolphinPluginLoaderTest.java</include> <include>**/alert/utils/FuncUtilsTest.java</include> <include>**/alert/processor/AlertRequestProcessorTest.java</include> <include>**/alert/runner/AlertSenderTest.java</include> <include>**/alert/AlertServerTest.java</include> </includes> <!-- <skip>true</skip> --> </configuration> </plugin> <!-- jenkins plugin jacoco report--> <plugin> <groupId>org.jacoco</groupId> <artifactId>jacoco-maven-plugin</artifactId> <version>${jacoco.version}</version> <configuration> <destFile>target/jacoco.exec</destFile> <dataFile>target/jacoco.exec</dataFile> </configuration> <executions> <execution> <id>jacoco-initialize</id> <goals> <goal>prepare-agent</goal> </goals> </execution> <execution> <id>jacoco-site</id> <phase>test</phase> <goals> <goal>report</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-maven-plugin</artifactId> <version>${spotbugs.version}</version> <configuration> <xmlOutput>true</xmlOutput> <threshold>medium</threshold> <effort>default</effort> <excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <dependencies> <dependency> <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs</artifactId> <version>4.0.0-beta4</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-checkstyle-plugin</artifactId> <version>${checkstyle.version}</version> <dependencies> <dependency> <groupId>com.puppycrawl.tools</groupId> <artifactId>checkstyle</artifactId> <version>8.18</version> </dependency> </dependencies> <configuration> <consoleOutput>true</consoleOutput> <encoding>UTF-8</encoding> <configLocation>style/checkstyle.xml</configLocation> <suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation> <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression> <failOnViolation>true</failOnViolation> <violationSeverity>warning</violationSeverity> <includeTestSourceDirectory>true</includeTestSourceDirectory> <sourceDirectories> <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory> </sourceDirectories> <excludes>**\/generated-sources\/</excludes> <skip>true</skip> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>check</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>cobertura-maven-plugin</artifactId> <version>${cobertura-maven-plugin.version}</version> <configuration> <check> </check> <aggregate>true</aggregate> <outputDirectory>./target/cobertura</outputDirectory> <encoding>${project.build.sourceEncoding}</encoding> <quiet>true</quiet> <format>xml</format> <instrumentation> <ignoreTrivial>true</ignoreTrivial> </instrumentation> </configuration> </plugin> </plugins> </build> <modules> <module>dolphinscheduler-alert-plugin</module> <module>dolphinscheduler-ui</module> <module>dolphinscheduler-server</module> <module>dolphinscheduler-common</module> <module>dolphinscheduler-api</module> <module>dolphinscheduler-dao</module> <module>dolphinscheduler-alert</module> <module>dolphinscheduler-dist</module> <module>dolphinscheduler-remote</module> <module>dolphinscheduler-service</module> <module>dolphinscheduler-spi</module> <module>dolphinscheduler-microbench</module> </modules> </project>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,424
[Feature][Plugin-Email] Numeric data type support in email attachment
Status Quo Dolphinscheduler currently supports executing SQL and send result as email table/attachment, however the .xls attachment is forcing all columns to be "text" type and creates difficulty in data manipulation down the line (not sure this is a bug or expected behavior). Suggestion Hoping to see support for automatic data type in .xls, and/or additional support for other attachment format such as csv, txt or xlsx
https://github.com/apache/dolphinscheduler/issues/5424
https://github.com/apache/dolphinscheduler/pull/5426
fbef290cc5e611bfee7a430472b2b2774dd6d921
5556e47a85b40b28688cd41e27a96f4b1bc4e225
"2021-05-06T13:13:24Z"
java
"2021-05-11T12:53:27Z"
dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.plugin.alert.email; import org.apache.dolphinscheduler.plugin.alert.email.exception.AlertEmailException; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.commons.collections4.CollectionUtils; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.HorizontalAlignment; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.xssf.streaming.SXSSFWorkbook; import java.io.File; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * excel utils */ public class ExcelUtils { private ExcelUtils() { throw new IllegalStateException("Utility class"); } private static final Logger logger = LoggerFactory.getLogger(ExcelUtils.class); private static final int XLSX_WINDOW_ROW = 10000; /** * generate excel file * * @param content the content * @param title the title * @param xlsFilePath the xls path */ public static void genExcelFile(String content, String title, String xlsFilePath) { File file = new File(xlsFilePath); if (!file.exists() && !file.mkdirs()) { logger.error("Create xlsx directory error, path:{}", xlsFilePath); throw new AlertEmailException("Create xlsx directory error"); } List<LinkedHashMap> itemsList = JSONUtils.toList(content, LinkedHashMap.class); if (CollectionUtils.isEmpty(itemsList)) { logger.error("itemsList is null"); throw new AlertEmailException("itemsList is null"); } LinkedHashMap<String, Object> headerMap = itemsList.get(0); List<String> headerList = new ArrayList<>(); for (Map.Entry<String, Object> en : headerMap.entrySet()) { headerList.add(en.getKey()); } try (SXSSFWorkbook wb = new SXSSFWorkbook(XLSX_WINDOW_ROW); FileOutputStream fos = new FileOutputStream(String.format("%s/%s.xlsx", xlsFilePath, title))) { // declare a workbook // generate a table Sheet sheet = wb.createSheet(); Row row = sheet.createRow(0); //set the height of the first line row.setHeight((short) 500); //set Horizontal right CellStyle cellStyle = wb.createCellStyle(); cellStyle.setAlignment(HorizontalAlignment.RIGHT); //setting excel headers for (int i = 0; i < headerList.size(); i++) { Cell cell = row.createCell(i); cell.setCellStyle(cellStyle); cell.setCellValue(headerList.get(i)); } //setting excel body int rowIndex = 1; for (LinkedHashMap<String, Object> itemsMap : itemsList) { Object[] values = itemsMap.values().toArray(); row = sheet.createRow(rowIndex); //setting excel body height row.setHeight((short) 500); rowIndex++; for (int j = 0; j < values.length; j++) { Cell cell1 = row.createCell(j); cell1.setCellStyle(cellStyle); cell1.setCellValue(String.valueOf(values[j])); } } for (int i = 0; i < headerList.size(); i++) { sheet.setColumnWidth(i, headerList.get(i).length() * 800); } //setting file output wb.write(fos); wb.dispose(); } catch (Exception e) { throw new AlertEmailException("generate excel error", e); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,475
[Improvement][Api] Upload resource to remote failed, the local tmp file need to be cleared
**Describe the question** When we upload a resource file, ds will do three thing. 1. create a local tmp file 2. cope the local tmp file to remote and delete the local file https://github.com/apache/dolphinscheduler/blob/d04f4b60535cd86905e56b0a732f2ec038680eb7/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java#L595-L605 But when the second step is failed, the local tmp file will not be cleaned, **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** When upload to remote throw an exception, clean local tmp file
https://github.com/apache/dolphinscheduler/issues/5475
https://github.com/apache/dolphinscheduler/pull/5476
d04f4b60535cd86905e56b0a732f2ec038680eb7
68301db6b914ff4002bfbc531c6810864d8e47c2
"2021-05-15T03:21:13Z"
java
"2021-05-17T03:13:14Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service.impl; import static org.apache.dolphinscheduler.common.Constants.ALIAS; import static org.apache.dolphinscheduler.common.Constants.CONTENT; import static org.apache.dolphinscheduler.common.Constants.JAR; import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter; import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.ResourcesService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.utils.BooleanUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.ResourcesUser; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; import org.apache.commons.beanutils.BeanMap; import java.io.IOException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.regex.Matcher; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.multipart.MultipartFile; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.fasterxml.jackson.databind.SerializationFeature; /** * resources service impl */ @Service public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesService { private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceImpl.class); @Autowired private ResourceMapper resourcesMapper; @Autowired private UdfFuncMapper udfFunctionMapper; @Autowired private TenantMapper tenantMapper; @Autowired private UserMapper userMapper; @Autowired private ResourceUserMapper resourceUserMapper; @Autowired private ProcessDefinitionMapper processDefinitionMapper; /** * create directory * * @param loginUser login user * @param name alias * @param description description * @param type type * @param pid parent id * @param currentDir current directory * @return create directory result */ @Override @Transactional(rollbackFor = Exception.class) public Result<Object> createDirectory(User loginUser, String name, String description, ResourceType type, int pid, String currentDir) { Result<Object> result = checkResourceUploadStartupState(); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name); result = verifyResource(loginUser, type, fullName, pid); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } if (checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource directory {} has exist, can't recreate", fullName); putMsg(result, Status.RESOURCE_EXIST); return result; } Date now = new Date(); Resource resource = new Resource(pid,name,fullName,true,description,name,loginUser.getId(),type,0,now,now); try { resourcesMapper.insert(resource); putMsg(result, Status.SUCCESS); Map<Object, Object> dataMap = new BeanMap(resource); Map<String, Object> resultMap = new HashMap<>(); for (Map.Entry<Object, Object> entry: dataMap.entrySet()) { if (!"class".equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); } } result.setData(resultMap); } catch (DuplicateKeyException e) { logger.error("resource directory {} has exist, can't recreate", fullName); putMsg(result, Status.RESOURCE_EXIST); return result; } catch (Exception e) { logger.error("resource already exists, can't recreate ", e); throw new ServiceException("resource already exists, can't recreate"); } //create directory in hdfs createDirectory(loginUser,fullName,type,result); return result; } /** * create resource * * @param loginUser login user * @param name alias * @param desc description * @param file file * @param type type * @param pid parent id * @param currentDir current directory * @return create result code */ @Override @Transactional(rollbackFor = Exception.class) public Result<Object> createResource(User loginUser, String name, String desc, ResourceType type, MultipartFile file, int pid, String currentDir) { Result<Object> result = checkResourceUploadStartupState(); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } result = verifyPid(loginUser, pid); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } result = verifyFile(name, type, file); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } // check resource name exists String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name); if (checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name)); putMsg(result, Status.RESOURCE_EXIST); return result; } Date now = new Date(); Resource resource = new Resource(pid,name,fullName,false,desc,file.getOriginalFilename(),loginUser.getId(),type,file.getSize(),now,now); try { resourcesMapper.insert(resource); putMsg(result, Status.SUCCESS); Map<Object, Object> dataMap = new BeanMap(resource); Map<String, Object> resultMap = new HashMap<>(); for (Map.Entry<Object, Object> entry: dataMap.entrySet()) { if (!"class".equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); } } result.setData(resultMap); } catch (Exception e) { logger.error("resource already exists, can't recreate ", e); throw new ServiceException("resource already exists, can't recreate"); } // fail upload if (!upload(loginUser, fullName, file, type)) { logger.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name), RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.HDFS_OPERATION_ERROR); throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); } return result; } /** * check resource is exists * * @param fullName fullName * @param userId user id * @param type type * @return true if resource exists */ private boolean checkResourceExists(String fullName, int userId, int type) { Boolean existResource = resourcesMapper.existResource(fullName, userId, type); return BooleanUtils.isTrue(existResource); } /** * update resource * @param loginUser login user * @param resourceId resource id * @param name name * @param desc description * @param type resource type * @param file resource file * @return update result code */ @Override @Transactional(rollbackFor = Exception.class) public Result<Object> updateResource(User loginUser, int resourceId, String name, String desc, ResourceType type, MultipartFile file) { Result<Object> result = checkResourceUploadStartupState(); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } Resource resource = resourcesMapper.selectById(resourceId); if (resource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } if (!hasPerm(loginUser, resource.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) { putMsg(result, Status.SUCCESS); return result; } //check resource already exists String originFullName = resource.getFullName(); String originResourceName = resource.getAlias(); String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/") + 1),name); if (!originResourceName.equals(name) && checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource {} already exists, can't recreate", name); putMsg(result, Status.RESOURCE_EXIST); return result; } result = verifyFile(name, type, file); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } // query tenant by user id String tenantCode = getTenantCode(resource.getUserId(),result); if (StringUtils.isEmpty(tenantCode)) { return result; } // verify whether the resource exists in storage // get the path of origin file in storage String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName); try { if (!HadoopUtils.getInstance().exists(originHdfsFileName)) { logger.error("{} not exist", originHdfsFileName); putMsg(result,Status.RESOURCE_NOT_EXIST); return result; } } catch (IOException e) { logger.error(e.getMessage(),e); throw new ServiceException(Status.HDFS_OPERATION_ERROR); } if (!resource.isDirectory()) { //get the origin file suffix String originSuffix = FileUtils.suffix(originFullName); String suffix = FileUtils.suffix(fullName); boolean suffixIsChanged = false; if (StringUtils.isBlank(suffix) && StringUtils.isNotBlank(originSuffix)) { suffixIsChanged = true; } if (StringUtils.isNotBlank(suffix) && !suffix.equals(originSuffix)) { suffixIsChanged = true; } //verify whether suffix is changed if (suffixIsChanged) { //need verify whether this resource is authorized to other users Map<String, Object> columnMap = new HashMap<>(); columnMap.put("resources_id", resourceId); List<ResourcesUser> resourcesUsers = resourceUserMapper.selectByMap(columnMap); if (CollectionUtils.isNotEmpty(resourcesUsers)) { List<Integer> userIds = resourcesUsers.stream().map(ResourcesUser::getUserId).collect(Collectors.toList()); List<User> users = userMapper.selectBatchIds(userIds); String userNames = users.stream().map(User::getUserName).collect(Collectors.toList()).toString(); logger.error("resource is authorized to user {},suffix not allowed to be modified", userNames); putMsg(result,Status.RESOURCE_IS_AUTHORIZED,userNames); return result; } } } // updateResource data Date now = new Date(); resource.setAlias(name); resource.setFileName(name); resource.setFullName(fullName); resource.setDescription(desc); resource.setUpdateTime(now); if (file != null) { resource.setSize(file.getSize()); } try { resourcesMapper.updateById(resource); if (resource.isDirectory()) { List<Integer> childrenResource = listAllChildren(resource,false); if (CollectionUtils.isNotEmpty(childrenResource)) { String matcherFullName = Matcher.quoteReplacement(fullName); List<Resource> childResourceList; Integer[] childResIdArray = childrenResource.toArray(new Integer[childrenResource.size()]); List<Resource> resourceList = resourcesMapper.listResourceByIds(childResIdArray); childResourceList = resourceList.stream().map(t -> { t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName)); t.setUpdateTime(now); return t; }).collect(Collectors.toList()); resourcesMapper.batchUpdateResource(childResourceList); if (ResourceType.UDF.equals(resource.getType())) { List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(childResIdArray); if (CollectionUtils.isNotEmpty(udfFuncs)) { udfFuncs = udfFuncs.stream().map(t -> { t.setResourceName(t.getResourceName().replaceFirst(originFullName, matcherFullName)); t.setUpdateTime(now); return t; }).collect(Collectors.toList()); udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); } } } } else if (ResourceType.UDF.equals(resource.getType())) { List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(new Integer[]{resourceId}); if (CollectionUtils.isNotEmpty(udfFuncs)) { udfFuncs = udfFuncs.stream().map(t -> { t.setResourceName(fullName); t.setUpdateTime(now); return t; }).collect(Collectors.toList()); udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); } } putMsg(result, Status.SUCCESS); Map<Object, Object> dataMap = new BeanMap(resource); Map<String, Object> resultMap = new HashMap<>(); for (Map.Entry<Object, Object> entry: dataMap.entrySet()) { if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); } } result.setData(resultMap); } catch (Exception e) { logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e); throw new ServiceException(Status.UPDATE_RESOURCE_ERROR); } // if name unchanged, return directly without moving on HDFS if (originResourceName.equals(name) && file == null) { return result; } if (file != null) { // fail upload if (!upload(loginUser, fullName, file, type)) { logger.error("upload resource: {} file: {} failed.", name, RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.HDFS_OPERATION_ERROR); throw new ServiceException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); } if (!fullName.equals(originFullName)) { try { HadoopUtils.getInstance().delete(originHdfsFileName,false); } catch (IOException e) { logger.error(e.getMessage(),e); throw new ServiceException(String.format("delete resource: %s failed.", originFullName)); } } return result; } // get the path of dest file in hdfs String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName); try { logger.info("start hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName); HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true); } catch (Exception e) { logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e); putMsg(result,Status.HDFS_COPY_FAIL); throw new ServiceException(Status.HDFS_COPY_FAIL); } return result; } private Result<Object> verifyFile(String name, ResourceType type, MultipartFile file) { Result<Object> result = new Result<>(); putMsg(result, Status.SUCCESS); if (file != null) { // file is empty if (file.isEmpty()) { logger.error("file is empty: {}", RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); return result; } // file suffix String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); String nameSuffix = FileUtils.suffix(name); // determine file suffix if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { // rename file suffix and original suffix must be consistent logger.error("rename file suffix and original suffix must be consistent: {}", RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); return result; } //If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) { logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); return result; } if (file.getSize() > Constants.MAX_FILE_SIZE) { logger.error("file size is too large: {}", RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); return result; } } return result; } /** * query resources list paging * * @param loginUser login user * @param type resource type * @param searchVal search value * @param pageNo page number * @param pageSize page size * @return resource list page */ @Override public Map<String, Object> queryResourceListPaging(User loginUser, int directoryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { HashMap<String, Object> result = new HashMap<>(); Page<Resource> page = new Page<>(pageNo, pageSize); int userId = loginUser.getId(); if (isAdmin(loginUser)) { userId = 0; } if (directoryId != -1) { Resource directory = resourcesMapper.selectById(directoryId); if (directory == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } } List<Integer> resourcesIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, 0); IPage<Resource> resourceIPage = resourcesMapper.queryResourcePaging(page, userId, directoryId, type.ordinal(), searchVal,resourcesIds); PageInfo<Resource> pageInfo = new PageInfo<>(pageNo, pageSize); pageInfo.setTotalCount((int)resourceIPage.getTotal()); pageInfo.setLists(resourceIPage.getRecords()); result.put(Constants.DATA_LIST, pageInfo); putMsg(result,Status.SUCCESS); return result; } /** * create directory * @param loginUser login user * @param fullName full name * @param type resource type * @param result Result */ private void createDirectory(User loginUser,String fullName,ResourceType type,Result<Object> result) { String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); String directoryName = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); String resourceRootPath = HadoopUtils.getHdfsDir(type,tenantCode); try { if (!HadoopUtils.getInstance().exists(resourceRootPath)) { createTenantDirIfNotExists(tenantCode); } if (!HadoopUtils.getInstance().mkdir(directoryName)) { logger.error("create resource directory {} of hdfs failed",directoryName); putMsg(result,Status.HDFS_OPERATION_ERROR); throw new ServiceException(String.format("create resource directory: %s failed.", directoryName)); } } catch (Exception e) { logger.error("create resource directory {} of hdfs failed",directoryName); putMsg(result,Status.HDFS_OPERATION_ERROR); throw new ServiceException(String.format("create resource directory: %s failed.", directoryName)); } } /** * upload file to hdfs * * @param loginUser login user * @param fullName full name * @param file file */ private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) { // save to local String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); String nameSuffix = FileUtils.suffix(fullName); // determine file suffix if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { return false; } // query tenant String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); // random file name String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); // save file to hdfs, and delete original file String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); String resourcePath = HadoopUtils.getHdfsDir(type,tenantCode); try { // if tenant dir not exists if (!HadoopUtils.getInstance().exists(resourcePath)) { createTenantDirIfNotExists(tenantCode); } org.apache.dolphinscheduler.api.utils.FileUtils.copyFile(file, localFilename); HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true); } catch (Exception e) { logger.error(e.getMessage(), e); return false; } return true; } /** * query resource list * * @param loginUser login user * @param type resource type * @return resource list */ @Override public Map<String, Object> queryResourceList(User loginUser, ResourceType type) { Map<String, Object> result = new HashMap<>(); List<Resource> allResourceList = queryAuthoredResourceList(loginUser, type); Visitor resourceTreeVisitor = new ResourceTreeVisitor(allResourceList); result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); putMsg(result, Status.SUCCESS); return result; } /** * query resource list by program type * * @param loginUser login user * @param type resource type * @return resource list */ @Override public Map<String, Object> queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) { Map<String, Object> result = new HashMap<>(); List<Resource> allResourceList = queryAuthoredResourceList(loginUser, type); String suffix = ".jar"; if (programType != null) { switch (programType) { case JAVA: case SCALA: break; case PYTHON: suffix = ".py"; break; default: } } List<Resource> resources = new ResourceFilter(suffix, new ArrayList<>(allResourceList)).filter(); Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources); result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); putMsg(result, Status.SUCCESS); return result; } /** * delete resource * * @param loginUser login user * @param resourceId resource id * @return delete result code * @throws IOException exception */ @Override @Transactional(rollbackFor = Exception.class) public Result<Object> delete(User loginUser, int resourceId) throws IOException { Result<Object> result = checkResourceUploadStartupState(); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } // get resource by id Resource resource = resourcesMapper.selectById(resourceId); if (resource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } if (!hasPerm(loginUser, resource.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } String tenantCode = getTenantCode(resource.getUserId(),result); if (StringUtils.isEmpty(tenantCode)) { return result; } // get all resource id of process definitions those is released List<Map<String, Object>> list = processDefinitionMapper.listResources(); Map<Integer, Set<Long>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); Set<Integer> resourceIdSet = resourceProcessMap.keySet(); // get all children of the resource List<Integer> allChildren = listAllChildren(resource,true); Integer[] needDeleteResourceIdArray = allChildren.toArray(new Integer[allChildren.size()]); //if resource type is UDF,need check whether it is bound by UDF function if (resource.getType() == (ResourceType.UDF)) { List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceId(needDeleteResourceIdArray); if (CollectionUtils.isNotEmpty(udfFuncs)) { logger.error("can't be deleted,because it is bound by UDF functions:{}", udfFuncs); putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName()); return result; } } if (resourceIdSet.contains(resource.getPid())) { logger.error("can't be deleted,because it is used of process definition"); putMsg(result, Status.RESOURCE_IS_USED); return result; } resourceIdSet.retainAll(allChildren); if (CollectionUtils.isNotEmpty(resourceIdSet)) { logger.error("can't be deleted,because it is used of process definition"); for (Integer resId : resourceIdSet) { logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId)); } putMsg(result, Status.RESOURCE_IS_USED); return result; } // get hdfs file by type String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); //delete data in database resourcesMapper.deleteIds(needDeleteResourceIdArray); resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray); //delete file on hdfs HadoopUtils.getInstance().delete(hdfsFilename, true); putMsg(result, Status.SUCCESS); return result; } /** * verify resource by name and type * @param loginUser login user * @param fullName resource full name * @param type resource type * @return true if the resource name not exists, otherwise return false */ @Override public Result<Object> verifyResourceName(String fullName, ResourceType type, User loginUser) { Result<Object> result = new Result<>(); putMsg(result, Status.SUCCESS); if (checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource type:{} name:{} has exist, can't create again.", type, RegexUtils.escapeNRT(fullName)); putMsg(result, Status.RESOURCE_EXIST); } else { // query tenant Tenant tenant = tenantMapper.queryById(loginUser.getTenantId()); if (tenant != null) { String tenantCode = tenant.getTenantCode(); try { String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); if (HadoopUtils.getInstance().exists(hdfsFilename)) { logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, RegexUtils.escapeNRT(fullName), hdfsFilename); putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename); } } catch (Exception e) { logger.error(e.getMessage(),e); putMsg(result,Status.HDFS_OPERATION_ERROR); } } else { putMsg(result,Status.TENANT_NOT_EXIST); } } return result; } /** * verify resource by full name or pid and type * @param fullName resource full name * @param id resource id * @param type resource type * @return true if the resource full name or pid not exists, otherwise return false */ @Override public Result<Object> queryResource(String fullName, Integer id, ResourceType type) { Result<Object> result = new Result<>(); if (StringUtils.isBlank(fullName) && id == null) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR); return result; } if (StringUtils.isNotBlank(fullName)) { List<Resource> resourceList = resourcesMapper.queryResource(fullName,type.ordinal()); if (CollectionUtils.isEmpty(resourceList)) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } putMsg(result, Status.SUCCESS); result.setData(resourceList.get(0)); } else { Resource resource = resourcesMapper.selectById(id); if (resource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } Resource parentResource = resourcesMapper.selectById(resource.getPid()); if (parentResource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } putMsg(result, Status.SUCCESS); result.setData(parentResource); } return result; } /** * view resource file online * * @param resourceId resource id * @param skipLineNum skip line number * @param limit limit * @return resource content */ @Override public Result<Object> readResource(int resourceId, int skipLineNum, int limit) { Result<Object> result = checkResourceUploadStartupState(); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } // get resource by id Resource resource = resourcesMapper.selectById(resourceId); if (resource == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } //check preview or not by file suffix String nameSuffix = FileUtils.suffix(resource.getAlias()); String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); if (StringUtils.isNotEmpty(resourceViewSuffixs)) { List<String> strList = Arrays.asList(resourceViewSuffixs.split(",")); if (!strList.contains(nameSuffix)) { logger.error("resource suffix {} not support view, resource id {}", nameSuffix, resourceId); putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); return result; } } String tenantCode = getTenantCode(resource.getUserId(),result); if (StringUtils.isEmpty(tenantCode)) { return result; } // hdfs path String hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resource.getFullName()); logger.info("resource hdfs path is {}", hdfsFileName); try { if (HadoopUtils.getInstance().exists(hdfsFileName)) { List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); putMsg(result, Status.SUCCESS); Map<String, Object> map = new HashMap<>(); map.put(ALIAS, resource.getAlias()); map.put(CONTENT, String.join("\n", content)); result.setData(map); } else { logger.error("read file {} not exist in hdfs", hdfsFileName); putMsg(result, Status.RESOURCE_FILE_NOT_EXIST,hdfsFileName); } } catch (Exception e) { logger.error("Resource {} read failed", hdfsFileName, e); putMsg(result, Status.HDFS_OPERATION_ERROR); } return result; } /** * create resource file online * * @param loginUser login user * @param type resource type * @param fileName file name * @param fileSuffix file suffix * @param desc description * @param content content * @param pid pid * @param currentDir current directory * @return create result code */ @Override @Transactional(rollbackFor = Exception.class) public Result<Object> onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDir) { Result<Object> result = checkResourceUploadStartupState(); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } //check file suffix String nameSuffix = fileSuffix.trim(); String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); if (StringUtils.isNotEmpty(resourceViewSuffixs)) { List<String> strList = Arrays.asList(resourceViewSuffixs.split(",")); if (!strList.contains(nameSuffix)) { logger.error("resource suffix {} not support create", nameSuffix); putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); return result; } } String name = fileName.trim() + "." + nameSuffix; String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name); result = verifyResource(loginUser, type, fullName, pid); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } // save data Date now = new Date(); Resource resource = new Resource(pid,name,fullName,false,desc,name,loginUser.getId(),type,content.getBytes().length,now,now); resourcesMapper.insert(resource); putMsg(result, Status.SUCCESS); Map<Object, Object> dataMap = new BeanMap(resource); Map<String, Object> resultMap = new HashMap<>(); for (Map.Entry<Object, Object> entry: dataMap.entrySet()) { if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); } } result.setData(resultMap); String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); result = uploadContentToHdfs(fullName, tenantCode, content); if (!result.getCode().equals(Status.SUCCESS.getCode())) { throw new ServiceException(result.getMsg()); } return result; } private Result<Object> checkResourceUploadStartupState() { Result<Object> result = new Result<>(); putMsg(result, Status.SUCCESS); // if resource upload startup if (!PropertyUtils.getResUploadStartupState()) { logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); putMsg(result, Status.HDFS_NOT_STARTUP); return result; } return result; } private Result<Object> verifyResource(User loginUser, ResourceType type, String fullName, int pid) { Result<Object> result = verifyResourceName(fullName, type, loginUser); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } return verifyPid(loginUser, pid); } private Result<Object> verifyPid(User loginUser, int pid) { Result<Object> result = new Result<>(); putMsg(result, Status.SUCCESS); if (pid != -1) { Resource parentResource = resourcesMapper.selectById(pid); if (parentResource == null) { putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); return result; } if (!hasPerm(loginUser, parentResource.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } } return result; } /** * updateProcessInstance resource * * @param resourceId resource id * @param content content * @return update result cod */ @Override @Transactional(rollbackFor = Exception.class) public Result<Object> updateResourceContent(int resourceId, String content) { Result<Object> result = checkResourceUploadStartupState(); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } Resource resource = resourcesMapper.selectById(resourceId); if (resource == null) { logger.error("read file not exist, resource id {}", resourceId); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } //check can edit by file suffix String nameSuffix = FileUtils.suffix(resource.getAlias()); String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); if (StringUtils.isNotEmpty(resourceViewSuffixs)) { List<String> strList = Arrays.asList(resourceViewSuffixs.split(",")); if (!strList.contains(nameSuffix)) { logger.error("resource suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId); putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); return result; } } String tenantCode = getTenantCode(resource.getUserId(),result); if (StringUtils.isEmpty(tenantCode)) { return result; } resource.setSize(content.getBytes().length); resource.setUpdateTime(new Date()); resourcesMapper.updateById(resource); result = uploadContentToHdfs(resource.getFullName(), tenantCode, content); if (!result.getCode().equals(Status.SUCCESS.getCode())) { throw new ServiceException(result.getMsg()); } return result; } /** * @param resourceName resource name * @param tenantCode tenant code * @param content content * @return result */ private Result<Object> uploadContentToHdfs(String resourceName, String tenantCode, String content) { Result<Object> result = new Result<>(); String localFilename = ""; String hdfsFileName = ""; try { localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); if (!FileUtils.writeContent2File(content, localFilename)) { // write file fail logger.error("file {} fail, content is {}", localFilename, RegexUtils.escapeNRT(content)); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } // get resource file hdfs path hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resourceName); String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); logger.info("resource hdfs path is {}, resource dir is {}", hdfsFileName, resourcePath); HadoopUtils hadoopUtils = HadoopUtils.getInstance(); if (!hadoopUtils.exists(resourcePath)) { // create if tenant dir not exists createTenantDirIfNotExists(tenantCode); } if (hadoopUtils.exists(hdfsFileName)) { hadoopUtils.delete(hdfsFileName, false); } hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true); } catch (Exception e) { logger.error(e.getMessage(), e); result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName)); return result; } putMsg(result, Status.SUCCESS); return result; } /** * download file * * @param resourceId resource id * @return resource content * @throws IOException exception */ @Override public org.springframework.core.io.Resource downloadResource(int resourceId) throws IOException { // if resource upload startup if (!PropertyUtils.getResUploadStartupState()) { logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); throw new ServiceException("hdfs not startup"); } Resource resource = resourcesMapper.selectById(resourceId); if (resource == null) { logger.error("download file not exist, resource id {}", resourceId); return null; } if (resource.isDirectory()) { logger.error("resource id {} is directory,can't download it", resourceId); throw new ServiceException("can't download directory"); } int userId = resource.getUserId(); User user = userMapper.selectById(userId); if (user == null) { logger.error("user id {} not exists", userId); throw new ServiceException(String.format("resource owner id %d not exist",userId)); } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { logger.error("tenant id {} not exists", user.getTenantId()); throw new ServiceException(String.format("The tenant id %d of resource owner not exist",user.getTenantId())); } String tenantCode = tenant.getTenantCode(); String hdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); String localFileName = FileUtils.getDownloadFilename(resource.getAlias()); logger.info("resource hdfs path is {}, download local filename is {}", hdfsFileName, localFileName); HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true); return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); } /** * list all file * * @param loginUser login user * @param userId user id * @return unauthorized result code */ @Override public Map<String, Object> authorizeResourceTree(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { return result; } List<Resource> resourceList = resourcesMapper.queryResourceExceptUserId(userId); List<ResourceComponent> list; if (CollectionUtils.isNotEmpty(resourceList)) { Visitor visitor = new ResourceTreeVisitor(resourceList); list = visitor.visit().getChildren(); } else { list = new ArrayList<>(0); } result.put(Constants.DATA_LIST, list); putMsg(result, Status.SUCCESS); return result; } /** * unauthorized file * * @param loginUser login user * @param userId user id * @return unauthorized result code */ @Override public Map<String, Object> unauthorizedFile(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { return result; } List<Resource> resourceList = resourcesMapper.queryResourceExceptUserId(userId); List<Resource> list; if (resourceList != null && !resourceList.isEmpty()) { Set<Resource> resourceSet = new HashSet<>(resourceList); List<Resource> authedResourceList = queryResourceList(userId, Constants.AUTHORIZE_WRITABLE_PERM); getAuthorizedResourceList(resourceSet, authedResourceList); list = new ArrayList<>(resourceSet); } else { list = new ArrayList<>(0); } Visitor visitor = new ResourceTreeVisitor(list); result.put(Constants.DATA_LIST, visitor.visit().getChildren()); putMsg(result, Status.SUCCESS); return result; } /** * unauthorized udf function * * @param loginUser login user * @param userId user id * @return unauthorized result code */ @Override public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); //only admin can operate if (isNotAdmin(loginUser, result)) { return result; } List<UdfFunc> udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId); List<UdfFunc> resultList = new ArrayList<>(); Set<UdfFunc> udfFuncSet; if (CollectionUtils.isNotEmpty(udfFuncList)) { udfFuncSet = new HashSet<>(udfFuncList); List<UdfFunc> authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId); getAuthorizedResourceList(udfFuncSet, authedUDFFuncList); resultList = new ArrayList<>(udfFuncSet); } result.put(Constants.DATA_LIST, resultList); putMsg(result, Status.SUCCESS); return result; } /** * authorized udf function * * @param loginUser login user * @param userId user id * @return authorized result code */ @Override public Map<String, Object> authorizedUDFFunction(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { return result; } List<UdfFunc> udfFuncs = udfFunctionMapper.queryAuthedUdfFunc(userId); result.put(Constants.DATA_LIST, udfFuncs); putMsg(result, Status.SUCCESS); return result; } /** * authorized file * * @param loginUser login user * @param userId user id * @return authorized result */ @Override public Map<String, Object> authorizedFile(User loginUser, Integer userId) { Map<String, Object> result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { return result; } List<Resource> authedResources = queryResourceList(userId, Constants.AUTHORIZE_WRITABLE_PERM); Visitor visitor = new ResourceTreeVisitor(authedResources); String visit = JSONUtils.toJsonString(visitor.visit(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); logger.info(visit); String jsonTreeStr = JSONUtils.toJsonString(visitor.visit().getChildren(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); logger.info(jsonTreeStr); result.put(Constants.DATA_LIST, visitor.visit().getChildren()); putMsg(result,Status.SUCCESS); return result; } /** * get authorized resource list * * @param resourceSet resource set * @param authedResourceList authorized resource list */ private void getAuthorizedResourceList(Set<?> resourceSet, List<?> authedResourceList) { Set<?> authedResourceSet; if (CollectionUtils.isNotEmpty(authedResourceList)) { authedResourceSet = new HashSet<>(authedResourceList); resourceSet.removeAll(authedResourceSet); } } /** * get tenantCode by UserId * * @param userId user id * @param result return result * @return tenant code */ private String getTenantCode(int userId,Result<Object> result) { User user = userMapper.selectById(userId); if (user == null) { logger.error("user {} not exists", userId); putMsg(result, Status.USER_NOT_EXIST,userId); return null; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { logger.error("tenant not exists"); putMsg(result, Status.TENANT_NOT_EXIST); return null; } return tenant.getTenantCode(); } /** * list all children id * @param resource resource * @param containSelf whether add self to children list * @return all children id */ List<Integer> listAllChildren(Resource resource,boolean containSelf) { List<Integer> childList = new ArrayList<>(); if (resource.getId() != -1 && containSelf) { childList.add(resource.getId()); } if (resource.isDirectory()) { listAllChildren(resource.getId(),childList); } return childList; } /** * list all children id * @param resourceId resource id * @param childList child list */ void listAllChildren(int resourceId,List<Integer> childList) { List<Integer> children = resourcesMapper.listChildren(resourceId); for (int childId : children) { childList.add(childId); listAllChildren(childId, childList); } } /** * query authored resource list (own and authorized) * @param loginUser login user * @param type ResourceType * @return all authored resource list */ private List<Resource> queryAuthoredResourceList(User loginUser, ResourceType type) { List<Resource> relationResources; int userId = loginUser.getId(); if (isAdmin(loginUser)) { userId = 0; relationResources = new ArrayList<>(); } else { // query resource relation relationResources = queryResourceList(userId, 0); } List<Resource> ownResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal()); ownResourceList.addAll(relationResources); return ownResourceList; } /** * query resource list by userId and perm * @param userId userId * @param perm perm * @return resource list */ private List<Resource> queryResourceList(Integer userId, int perm) { List<Integer> resIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, perm); return CollectionUtils.isEmpty(resIds) ? new ArrayList<>() : resourcesMapper.queryResourceListById(resIds); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,475
[Improvement][Api] Upload resource to remote failed, the local tmp file need to be cleared
**Describe the question** When we upload a resource file, ds will do three thing. 1. create a local tmp file 2. cope the local tmp file to remote and delete the local file https://github.com/apache/dolphinscheduler/blob/d04f4b60535cd86905e56b0a732f2ec038680eb7/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java#L595-L605 But when the second step is failed, the local tmp file will not be cleaned, **Which version of DolphinScheduler:** -[1.3.6] -[dev] **Describe alternatives you've considered** When upload to remote throw an exception, clean local tmp file
https://github.com/apache/dolphinscheduler/issues/5475
https://github.com/apache/dolphinscheduler/pull/5476
d04f4b60535cd86905e56b0a732f2ec038680eb7
68301db6b914ff4002bfbc531c6810864d8e47c2
"2021-05-15T03:21:13Z"
java
"2021-05-17T03:13:14Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import static org.apache.dolphinscheduler.common.Constants.DATA_BASEDIR_PATH; import static org.apache.dolphinscheduler.common.Constants.RESOURCE_VIEW_SUFFIXS; import static org.apache.dolphinscheduler.common.Constants.RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE; import static org.apache.dolphinscheduler.common.Constants.YYYYMMDDHHMMSS; import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.charset.UnsupportedCharsetException; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * file utils */ public class FileUtils { public static final Logger logger = LoggerFactory.getLogger(FileUtils.class); public static final String DATA_BASEDIR = PropertyUtils.getString(DATA_BASEDIR_PATH, "/tmp/dolphinscheduler"); public static final ThreadLocal<Logger> taskLoggerThreadLocal = new ThreadLocal<>(); private FileUtils() { throw new UnsupportedOperationException("Construct FileUtils"); } /** * get file suffix * * @param filename file name * @return file suffix */ public static String suffix(String filename) { String fileSuffix = ""; if (StringUtils.isNotEmpty(filename)) { int lastIndex = filename.lastIndexOf('.'); if (lastIndex > 0) { fileSuffix = filename.substring(lastIndex + 1); } } return fileSuffix; } /** * get download file absolute path and name * * @param filename file name * @return download file name */ public static String getDownloadFilename(String filename) { String fileName = String.format("%s/download/%s/%s", DATA_BASEDIR, DateUtils.getCurrentTime(YYYYMMDDHHMMSS), filename); File file = new File(fileName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } return fileName; } /** * get upload file absolute path and name * * @param tenantCode tenant code * @param filename file name * @return local file path */ public static String getUploadFilename(String tenantCode, String filename) { String fileName = String.format("%s/%s/resources/%s", DATA_BASEDIR, tenantCode, filename); File file = new File(fileName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } return fileName; } /** * directory of process execution * * @param projectCode project code * @param processDefineCode process definition Code * @param processDefineVersion process definition version * @param processInstanceId process instance id * @param taskInstanceId task instance id * @return directory of process execution */ public static String getProcessExecDir(long projectCode, long processDefineCode, int processDefineVersion, int processInstanceId, int taskInstanceId) { String fileName = String.format("%s/exec/process/%d/%s/%d/%d", DATA_BASEDIR, projectCode, processDefineCode + "_" + processDefineVersion, processInstanceId, taskInstanceId); File file = new File(fileName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } return fileName; } /** * @return get suffixes for resource files that support online viewing */ public static String getResourceViewSuffixs() { return PropertyUtils.getString(RESOURCE_VIEW_SUFFIXS, RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE); } /** * create directory if absent * * @param execLocalPath execute local path * @throws IOException errors */ public static void createWorkDirIfAbsent(String execLocalPath) throws IOException { //if work dir exists, first delete File execLocalPathFile = new File(execLocalPath); if (execLocalPathFile.exists()) { org.apache.commons.io.FileUtils.forceDelete(execLocalPathFile); } //create work dir org.apache.commons.io.FileUtils.forceMkdir(execLocalPathFile); String mkdirLog = "create dir success " + execLocalPath; LoggerUtils.logInfo(Optional.ofNullable(logger), mkdirLog); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), mkdirLog); } /** * write content to file ,if parent path not exists, it will do one's utmost to mkdir * * @param content content * @param filePath target file path * @return true if write success */ public static boolean writeContent2File(String content, String filePath) { boolean flag = true; BufferedReader bufferedReader = null; BufferedWriter bufferedWriter = null; try { File distFile = new File(filePath); if (!distFile.getParentFile().exists() && !distFile.getParentFile().mkdirs()) { FileUtils.logger.error("mkdir parent failed"); return false; } bufferedReader = new BufferedReader(new StringReader(content)); bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(distFile), StandardCharsets.UTF_8)); char[] buf = new char[1024]; int len; while ((len = bufferedReader.read(buf)) != -1) { bufferedWriter.write(buf, 0, len); } bufferedWriter.flush(); bufferedReader.close(); bufferedWriter.close(); } catch (IOException e) { FileUtils.logger.error(e.getMessage(), e); flag = false; return flag; } finally { IOUtils.closeQuietly(bufferedWriter); IOUtils.closeQuietly(bufferedReader); } return flag; } /** * Writes a String to a file creating the file if it does not exist. * <p> * NOTE: As from v1.3, the parent directories of the file will be created * if they do not exist. * * @param file the file to write * @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @throws IOException in case of an I/O error * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM * @since 2.4 */ public static void writeStringToFile(File file, String data, Charset encoding) throws IOException { writeStringToFile(file, data, encoding, false); } /** * Writes a String to a file creating the file if it does not exist. * <p> * NOTE: As from v1.3, the parent directories of the file will be created * if they do not exist. * * @param file the file to write * @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @throws IOException in case of an I/O error * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM */ public static void writeStringToFile(File file, String data, String encoding) throws IOException { writeStringToFile(file, data, encoding, false); } /** * Writes a String to a file creating the file if it does not exist. * * @param file the file to write * @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @param append if {@code true}, then the String will be added to the * end of the file rather than overwriting * @throws IOException in case of an I/O error * @since 2.3 */ public static void writeStringToFile(File file, String data, Charset encoding, boolean append) throws IOException { OutputStream out = null; try { out = openOutputStream(file, append); IOUtils.write(data, out, encoding); out.close(); // don't swallow close Exception if copy completes normally } finally { IOUtils.closeQuietly(out); } } /** * Writes a String to a file creating the file if it does not exist. * * @param file the file to write * @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @param append if {@code true}, then the String will be added to the * end of the file rather than overwriting * @throws IOException in case of an I/O error * @throws UnsupportedCharsetException thrown instead of {@link UnsupportedEncodingException} in version 2.2 if the encoding is not * supported by the VM * @since 2.1 */ public static void writeStringToFile(File file, String data, String encoding, boolean append) throws IOException { writeStringToFile(file, data, Charsets.toCharset(encoding), append); } /** * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. * * @param file the file to write * @param data the content to write to the file * @throws IOException in case of an I/O error */ public static void writeStringToFile(File file, String data) throws IOException { writeStringToFile(file, data, Charset.defaultCharset(), false); } /** * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. * * @param file the file to write * @param data the content to write to the file * @param append if {@code true}, then the String will be added to the * end of the file rather than overwriting * @throws IOException in case of an I/O error * @since 2.1 */ public static void writeStringToFile(File file, String data, boolean append) throws IOException { writeStringToFile(file, data, Charset.defaultCharset(), append); } /** * Opens a {@link FileOutputStream} for the specified file, checking and * creating the parent directory if it does not exist. * <p> * At the end of the method either the stream will be successfully opened, * or an exception will have been thrown. * <p> * The parent directory will be created if it does not exist. * The file will be created if it does not exist. * An exception is thrown if the file object exists but is a directory. * An exception is thrown if the file exists but cannot be written to. * An exception is thrown if the parent directory cannot be created. * * @param file the file to open for output, must not be {@code null} * @return a new {@link FileOutputStream} for the specified file * @throws IOException if the file object is a directory * @throws IOException if the file cannot be written to * @throws IOException if a parent directory needs creating but that fails * @since 1.3 */ public static FileOutputStream openOutputStream(File file) throws IOException { return openOutputStream(file, false); } /** * Opens a {@link FileOutputStream} for the specified file, checking and * creating the parent directory if it does not exist. * <p> * At the end of the method either the stream will be successfully opened, * or an exception will have been thrown. * <p> * The parent directory will be created if it does not exist. * The file will be created if it does not exist. * An exception is thrown if the file object exists but is a directory. * An exception is thrown if the file exists but cannot be written to. * An exception is thrown if the parent directory cannot be created. * * @param file the file to open for output, must not be {@code null} * @param append if {@code true}, then bytes will be added to the * end of the file rather than overwriting * @return a new {@link FileOutputStream} for the specified file * @throws IOException if the file object is a directory * @throws IOException if the file cannot be written to * @throws IOException if a parent directory needs creating but that fails * @since 2.1 */ public static FileOutputStream openOutputStream(File file, boolean append) throws IOException { if (file.exists()) { if (file.isDirectory()) { throw new IOException("File '" + file + "' exists but is a directory"); } if (!file.canWrite()) { throw new IOException("File '" + file + "' cannot be written to"); } } else { File parent = file.getParentFile(); if (parent != null && !parent.mkdirs() && !parent.isDirectory()) { throw new IOException("Directory '" + parent + "' could not be created"); } } return new FileOutputStream(file, append); } /** * deletes a directory recursively * * @param dir directory * @throws IOException in case deletion is unsuccessful */ public static void deleteDir(String dir) throws IOException { org.apache.commons.io.FileUtils.deleteDirectory(new File(dir)); } /** * Deletes a file. If file is a directory, delete it and all sub-directories. * <p> * The difference between File.delete() and this method are: * <ul> * <li>A directory to be deleted does not have to be empty.</li> * <li>You get exceptions when a file or directory cannot be deleted. * (java.io.File methods returns a boolean)</li> * </ul> * * @param filename file name * @throws IOException in case deletion is unsuccessful */ public static void deleteFile(String filename) throws IOException { org.apache.commons.io.FileUtils.forceDelete(new File(filename)); } /** * Gets all the parent subdirectories of the parentDir directory * * @param parentDir parent dir * @return all dirs */ public static File[] getAllDir(String parentDir) { if (parentDir == null || "".equals(parentDir)) { throw new RuntimeException("parentDir can not be empty"); } File file = new File(parentDir); if (!file.exists() || !file.isDirectory()) { throw new RuntimeException("parentDir not exist, or is not a directory:" + parentDir); } return file.listFiles(File::isDirectory); } /** * Get Content * * @param inputStream input stream * @return string of input stream */ public static String readFile2Str(InputStream inputStream) { try { ByteArrayOutputStream output = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length; while ((length = inputStream.read(buffer)) != -1) { output.write(buffer, 0, length); } return output.toString(); } catch (Exception e) { logger.error(e.getMessage(), e); throw new RuntimeException(e); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,474
[Bug][SQL] Fix symbol error in sql script
**Describe the bug** **To Reproduce** Steps to reproduce the behavior, for example: 1. execute dolphinscheduler_mysql.sql 2. see error info: [42000][1064] You have an error in your SQL syntax; **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/5474
https://github.com/apache/dolphinscheduler/pull/5462
a925f645719edec4afdb994ef57029c45c55d27e
018f5c89f6ee1dbb8259a6036c4beb1874cd3f5c
"2021-05-15T02:54:44Z"
java
"2021-05-18T02:20:18Z"
sql/dolphinscheduler_mysql.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for QRTZ_BLOB_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_BLOB_TRIGGERS`; CREATE TABLE `QRTZ_BLOB_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `BLOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `SCHED_NAME` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_BLOB_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_BLOB_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_CALENDARS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_CALENDARS`; CREATE TABLE `QRTZ_CALENDARS` ( `SCHED_NAME` varchar(120) NOT NULL, `CALENDAR_NAME` varchar(200) NOT NULL, `CALENDAR` blob NOT NULL, PRIMARY KEY (`SCHED_NAME`,`CALENDAR_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CALENDARS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_CRON_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_CRON_TRIGGERS`; CREATE TABLE `QRTZ_CRON_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `CRON_EXPRESSION` varchar(120) NOT NULL, `TIME_ZONE_ID` varchar(80) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_CRON_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CRON_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_FIRED_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_FIRED_TRIGGERS`; CREATE TABLE `QRTZ_FIRED_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `ENTRY_ID` varchar(200) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `INSTANCE_NAME` varchar(200) NOT NULL, `FIRED_TIME` bigint(13) NOT NULL, `SCHED_TIME` bigint(13) NOT NULL, `PRIORITY` int(11) NOT NULL, `STATE` varchar(16) NOT NULL, `JOB_NAME` varchar(200) DEFAULT NULL, `JOB_GROUP` varchar(200) DEFAULT NULL, `IS_NONCONCURRENT` varchar(1) DEFAULT NULL, `REQUESTS_RECOVERY` varchar(1) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`ENTRY_ID`), KEY `IDX_QRTZ_FT_TRIG_INST_NAME` (`SCHED_NAME`,`INSTANCE_NAME`), KEY `IDX_QRTZ_FT_INST_JOB_REQ_RCVRY` (`SCHED_NAME`,`INSTANCE_NAME`,`REQUESTS_RECOVERY`), KEY `IDX_QRTZ_FT_J_G` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_FT_JG` (`SCHED_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_FT_T_G` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_FT_TG` (`SCHED_NAME`,`TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_FIRED_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_JOB_DETAILS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_JOB_DETAILS`; CREATE TABLE `QRTZ_JOB_DETAILS` ( `SCHED_NAME` varchar(120) NOT NULL, `JOB_NAME` varchar(200) NOT NULL, `JOB_GROUP` varchar(200) NOT NULL, `DESCRIPTION` varchar(250) DEFAULT NULL, `JOB_CLASS_NAME` varchar(250) NOT NULL, `IS_DURABLE` varchar(1) NOT NULL, `IS_NONCONCURRENT` varchar(1) NOT NULL, `IS_UPDATE_DATA` varchar(1) NOT NULL, `REQUESTS_RECOVERY` varchar(1) NOT NULL, `JOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_J_REQ_RECOVERY` (`SCHED_NAME`,`REQUESTS_RECOVERY`), KEY `IDX_QRTZ_J_GRP` (`SCHED_NAME`,`JOB_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_JOB_DETAILS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_LOCKS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_LOCKS`; CREATE TABLE `QRTZ_LOCKS` ( `SCHED_NAME` varchar(120) NOT NULL, `LOCK_NAME` varchar(40) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`LOCK_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_LOCKS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_PAUSED_TRIGGER_GRPS`; CREATE TABLE `QRTZ_PAUSED_TRIGGER_GRPS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SCHEDULER_STATE -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SCHEDULER_STATE`; CREATE TABLE `QRTZ_SCHEDULER_STATE` ( `SCHED_NAME` varchar(120) NOT NULL, `INSTANCE_NAME` varchar(200) NOT NULL, `LAST_CHECKIN_TIME` bigint(13) NOT NULL, `CHECKIN_INTERVAL` bigint(13) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`INSTANCE_NAME`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SCHEDULER_STATE -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SIMPLE_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SIMPLE_TRIGGERS`; CREATE TABLE `QRTZ_SIMPLE_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `REPEAT_COUNT` bigint(7) NOT NULL, `REPEAT_INTERVAL` bigint(12) NOT NULL, `TIMES_TRIGGERED` bigint(10) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPLE_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPLE_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_SIMPROP_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_SIMPROP_TRIGGERS`; CREATE TABLE `QRTZ_SIMPROP_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `STR_PROP_1` varchar(512) DEFAULT NULL, `STR_PROP_2` varchar(512) DEFAULT NULL, `STR_PROP_3` varchar(512) DEFAULT NULL, `INT_PROP_1` int(11) DEFAULT NULL, `INT_PROP_2` int(11) DEFAULT NULL, `LONG_PROP_1` bigint(20) DEFAULT NULL, `LONG_PROP_2` bigint(20) DEFAULT NULL, `DEC_PROP_1` decimal(13,4) DEFAULT NULL, `DEC_PROP_2` decimal(13,4) DEFAULT NULL, `BOOL_PROP_1` varchar(1) DEFAULT NULL, `BOOL_PROP_2` varchar(1) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPROP_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPROP_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_TRIGGERS -- ---------------------------- DROP TABLE IF EXISTS `QRTZ_TRIGGERS`; CREATE TABLE `QRTZ_TRIGGERS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_NAME` varchar(200) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, `JOB_NAME` varchar(200) NOT NULL, `JOB_GROUP` varchar(200) NOT NULL, `DESCRIPTION` varchar(250) DEFAULT NULL, `NEXT_FIRE_TIME` bigint(13) DEFAULT NULL, `PREV_FIRE_TIME` bigint(13) DEFAULT NULL, `PRIORITY` int(11) DEFAULT NULL, `TRIGGER_STATE` varchar(16) NOT NULL, `TRIGGER_TYPE` varchar(8) NOT NULL, `START_TIME` bigint(13) NOT NULL, `END_TIME` bigint(13) DEFAULT NULL, `CALENDAR_NAME` varchar(200) DEFAULT NULL, `MISFIRE_INSTR` smallint(2) DEFAULT NULL, `JOB_DATA` blob, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_T_J` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_T_JG` (`SCHED_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_T_C` (`SCHED_NAME`,`CALENDAR_NAME`), KEY `IDX_QRTZ_T_G` (`SCHED_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_T_STATE` (`SCHED_NAME`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_N_STATE` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_N_G_STATE` (`SCHED_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_NEXT_FIRE_TIME` (`SCHED_NAME`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_ST` (`SCHED_NAME`,`TRIGGER_STATE`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`), KEY `IDX_QRTZ_T_NFT_ST_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_NFT_ST_MISFIRE_GRP` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), CONSTRAINT `QRTZ_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) REFERENCES `QRTZ_JOB_DETAILS` (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_TRIGGERS -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_access_token -- ---------------------------- DROP TABLE IF EXISTS `t_ds_access_token`; CREATE TABLE `t_ds_access_token` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) DEFAULT NULL COMMENT 'user id', `token` varchar(64) DEFAULT NULL COMMENT 'token', `expire_time` datetime DEFAULT NULL COMMENT 'end time of token ', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_access_token -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_alert -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert`; CREATE TABLE `t_ds_alert` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `title` varchar(64) DEFAULT NULL COMMENT 'title', `content` text COMMENT 'Message content (can be email, can be SMS. Mail is stored in JSON map, and SMS is string)', `alert_status` tinyint(4) DEFAULT '0' COMMENT '0:wait running,1:success,2:failed', `log` text COMMENT 'log', `alertgroup_id` int(11) DEFAULT NULL COMMENT 'alert group id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_alert -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_alertgroup -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alertgroup`; CREATE TABLE `t_ds_alertgroup`( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `alert_instance_ids` varchar (255) DEFAULT NULL COMMENT 'alert instance ids', `create_user_id` int(11) DEFAULT NULL COMMENT 'create user id', `group_name` varchar(255) DEFAULT NULL COMMENT 'group name', `description` varchar(255) DEFAULT NULL, `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `t_ds_alertgroup_name_UN` (`group_name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_alertgroup -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_command -- ---------------------------- DROP TABLE IF EXISTS `t_ds_command`; CREATE TABLE `t_ds_command` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause process, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'Node dependency type: 0 current node, 1 forward, 2 backward', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'Failed policy: 0 end, 1 continue', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', `update_time` datetime DEFAULT NULL COMMENT 'update time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority: 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) COMMENT 'worker group', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_command -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_datasource -- ---------------------------- DROP TABLE IF EXISTS `t_ds_datasource`; CREATE TABLE `t_ds_datasource` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(64) NOT NULL COMMENT 'data source name', `note` varchar(256) DEFAULT NULL COMMENT 'description', `type` tinyint(4) NOT NULL COMMENT 'data source type: 0:mysql,1:postgresql,2:hive,3:spark', `user_id` int(11) NOT NULL COMMENT 'the creator id', `connection_params` text NOT NULL COMMENT 'json connection params', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `t_ds_datasource_name_UN` ('name', 'type') ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_datasource -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_error_command -- ---------------------------- DROP TABLE IF EXISTS `t_ds_error_command`; CREATE TABLE `t_ds_error_command` ( `id` int(11) NOT NULL COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', `process_definition_id` int(11) DEFAULT NULL COMMENT 'process definition id', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group id', `schedule_time` datetime DEFAULT NULL COMMENT 'scheduler time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority, 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) COMMENT 'worker group', `message` text COMMENT 'message', PRIMARY KEY (`id`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC; -- ---------------------------- -- Records of t_ds_error_command -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_process_definition -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_definition`; CREATE TABLE `t_ds_process_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(255) DEFAULT NULL COMMENT 'process definition name', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out, unit: minute', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`,`code`), UNIQUE KEY `process_unique` (`name`,`project_code`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_process_definition -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_process_definition_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_definition_log`; CREATE TABLE `t_ds_process_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'process definition name', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out,unit: minute', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_task_definition -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_definition`; CREATE TABLE `t_ds_task_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', `version` int(11) DEFAULT NULL COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_params` longtext COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', `fail_retry_times` int(11) DEFAULT NULL COMMENT 'number of failed retries', `fail_retry_interval` int(11) DEFAULT NULL COMMENT 'failed retry interval', `timeout_flag` tinyint(2) DEFAULT '0' COMMENT 'timeout flag:0 close, 1 open', `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`,`code`), UNIQUE KEY `task_unique` (`name`,`project_code`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_task_definition_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_definition_log`; CREATE TABLE `t_ds_task_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', `version` int(11) DEFAULT NULL COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_params` text COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', `fail_retry_times` int(11) DEFAULT NULL COMMENT 'number of failed retries', `fail_retry_interval` int(11) DEFAULT NULL COMMENT 'failed retry interval', `timeout_flag` tinyint(2) DEFAULT '0' COMMENT 'timeout flag:0 close, 1 open', `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_task_relation -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_task_relation`; CREATE TABLE `t_ds_process_task_relation` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', `post_task_version` int(11) NOT NULL COMMENT 'post task version', `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', `condition_params` text COMMENT 'condition params(json)', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_task_relation_log -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_task_relation_log`; CREATE TABLE `t_ds_process_task_relation_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', `post_task_version` int(11) NOT NULL COMMENT 'post task version', `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', `condition_params` text COMMENT 'condition params(json)', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_process_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_process_instance`; CREATE TABLE `t_ds_process_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'process instance name', `process_definition_version` int(11) DEFAULT NULL COMMENT 'process definition version', `process_definition_code` bigint(20) not NULL COMMENT 'process definition code', `state` tinyint(4) DEFAULT NULL COMMENT 'process instance Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `recovery` tinyint(4) DEFAULT NULL COMMENT 'process instance failover flag:0:normal,1:failover instance', `start_time` datetime DEFAULT NULL COMMENT 'process instance start time', `end_time` datetime DEFAULT NULL COMMENT 'process instance end time', `run_times` int(11) DEFAULT NULL COMMENT 'process instance run times', `host` varchar(135) DEFAULT NULL COMMENT 'process instance host', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type. 0: only current node,1:before the node,2:later nodes', `max_try_times` tinyint(4) DEFAULT '0' COMMENT 'max try times', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy. 0:end the process when node failed,1:continue running the other nodes when node failed', `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type. 0:no warning,1:warning if process success,2:warning if process failed,3:warning if success', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group id', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `command_start_time` datetime DEFAULT NULL COMMENT 'command start time', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT '1' COMMENT 'flag', `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `is_sub_process` int(11) DEFAULT '0' COMMENT 'flag, whether the process is sub process', `executor_id` int(11) NOT NULL COMMENT 'executor id', `history_cmd` text COMMENT 'history commands of process instance operation', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority. 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) DEFAULT NULL COMMENT 'worker group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `var_pool` longtext COMMENT 'var_pool', PRIMARY KEY (`id`), KEY `process_instance_index` (`process_definition_code`,`id`) USING BTREE, KEY `start_time_index` (`start_time`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_process_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_project -- ---------------------------- DROP TABLE IF EXISTS `t_ds_project`; CREATE TABLE `t_ds_project` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(100) DEFAULT NULL COMMENT 'project name', `code` bigint(20) NOT NULL COMMENT 'encoding', `description` varchar(200) DEFAULT NULL, `user_id` int(11) DEFAULT NULL COMMENT 'creator id', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), KEY `user_id_index` (`user_id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_project -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_queue -- ---------------------------- DROP TABLE IF EXISTS `t_ds_queue`; CREATE TABLE `t_ds_queue` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `queue_name` varchar(64) DEFAULT NULL COMMENT 'queue name', `queue` varchar(64) DEFAULT NULL COMMENT 'yarn queue name', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_queue -- ---------------------------- INSERT INTO `t_ds_queue` VALUES ('1', 'default', 'default', null, null); -- ---------------------------- -- Table structure for t_ds_relation_datasource_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_datasource_user`; CREATE TABLE `t_ds_relation_datasource_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `datasource_id` int(11) DEFAULT NULL COMMENT 'data source id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_datasource_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_process_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_process_instance`; CREATE TABLE `t_ds_relation_process_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `parent_process_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', `parent_task_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', `process_instance_id` int(11) DEFAULT NULL COMMENT 'child process instance id', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_process_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_project_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_project_user`; CREATE TABLE `t_ds_relation_project_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `project_id` int(11) DEFAULT NULL COMMENT 'project id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), KEY `user_id_index` (`user_id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_project_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_resources_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_resources_user`; CREATE TABLE `t_ds_relation_resources_user` ( `id` int(11) NOT NULL AUTO_INCREMENT, `user_id` int(11) NOT NULL COMMENT 'user id', `resources_id` int(11) DEFAULT NULL COMMENT 'resource id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_relation_resources_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_relation_udfs_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_relation_udfs_user`; CREATE TABLE `t_ds_relation_udfs_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'userid', `udf_id` int(11) DEFAULT NULL COMMENT 'udf id', `perm` int(11) DEFAULT '1' COMMENT 'limits of authority', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_resources -- ---------------------------- DROP TABLE IF EXISTS `t_ds_resources`; CREATE TABLE `t_ds_resources` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `alias` varchar(64) DEFAULT NULL COMMENT 'alias', `file_name` varchar(64) DEFAULT NULL COMMENT 'file name', `description` varchar(256) DEFAULT NULL, `user_id` int(11) DEFAULT NULL COMMENT 'user id', `type` tinyint(4) DEFAULT NULL COMMENT 'resource type,0:FILE,1:UDF', `size` bigint(20) DEFAULT NULL COMMENT 'resource size', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `pid` int(11) DEFAULT NULL, `full_name` varchar(64) DEFAULT NULL, `is_directory` tinyint(4) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_resources_un` (`full_name`,`type`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_resources -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_schedules -- ---------------------------- DROP TABLE IF EXISTS `t_ds_schedules`; CREATE TABLE `t_ds_schedules` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', `start_time` datetime NOT NULL COMMENT 'start time', `end_time` datetime NOT NULL COMMENT 'end time', `timezone_id` varchar(40) DEFAULT NULL COMMENT 'timezoneId', `crontab` varchar(256) NOT NULL COMMENT 'crontab description', `failure_strategy` tinyint(4) NOT NULL COMMENT 'failure strategy. 0:end,1:continue', `user_id` int(11) NOT NULL COMMENT 'user id', `release_state` tinyint(4) NOT NULL COMMENT 'release state. 0:offline,1:online ', `warning_type` tinyint(4) NOT NULL COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(256) DEFAULT '' COMMENT 'worker group id', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_schedules -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_session -- ---------------------------- DROP TABLE IF EXISTS `t_ds_session`; CREATE TABLE `t_ds_session` ( `id` varchar(64) NOT NULL COMMENT 'key', `user_id` int(11) DEFAULT NULL COMMENT 'user id', `ip` varchar(45) DEFAULT NULL COMMENT 'ip', `last_login_time` datetime DEFAULT NULL COMMENT 'last login time', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_session -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_task_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_task_instance`; CREATE TABLE `t_ds_task_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'task name', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_code` bigint(20) NOT NULL COMMENT 'task definition code', `task_definition_version` int(11) DEFAULT NULL COMMENT 'task definition version', `process_instance_id` int(11) DEFAULT NULL COMMENT 'process instance id', `state` tinyint(4) DEFAULT NULL COMMENT 'Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `submit_time` datetime DEFAULT NULL COMMENT 'task submit time', `start_time` datetime DEFAULT NULL COMMENT 'task start time', `end_time` datetime DEFAULT NULL COMMENT 'task end time', `host` varchar(135) DEFAULT NULL COMMENT 'host of task running on', `execute_path` varchar(200) DEFAULT NULL COMMENT 'task execute path in the host', `log_path` varchar(200) DEFAULT NULL COMMENT 'task log path', `alert_flag` tinyint(4) DEFAULT NULL COMMENT 'whether alert', `retry_times` int(4) DEFAULT '0' COMMENT 'task retry times', `pid` int(4) DEFAULT NULL COMMENT 'pid of task', `app_link` text COMMENT 'yarn app id', `task_params` text COMMENT 'job custom parameters', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `retry_interval` int(4) DEFAULT NULL COMMENT 'retry interval when task failed ', `max_retry_times` int(2) DEFAULT NULL COMMENT 'max retry times', `task_instance_priority` int(11) DEFAULT NULL COMMENT 'task instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(64) DEFAULT NULL COMMENT 'worker group id', `executor_id` int(11) DEFAULT NULL, `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time', `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time', `var_pool` longtext COMMENT 'var_pool', PRIMARY KEY (`id`), KEY `process_instance_id` (`process_instance_id`) USING BTREE, CONSTRAINT `foreign_key_instance_id` FOREIGN KEY (`process_instance_id`) REFERENCES `t_ds_process_instance` (`id`) ON DELETE CASCADE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_task_instance -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_tenant -- ---------------------------- DROP TABLE IF EXISTS `t_ds_tenant`; CREATE TABLE `t_ds_tenant` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `tenant_code` varchar(64) DEFAULT NULL COMMENT 'tenant code', `description` varchar(256) DEFAULT NULL, `queue_id` int(11) DEFAULT NULL COMMENT 'queue id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_tenant -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_udfs -- ---------------------------- DROP TABLE IF EXISTS `t_ds_udfs`; CREATE TABLE `t_ds_udfs` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT NULL COMMENT 'user id', `func_name` varchar(100) NOT NULL COMMENT 'UDF function name', `class_name` varchar(255) NOT NULL COMMENT 'class of udf', `type` tinyint(4) NOT NULL COMMENT 'Udf function type', `arg_types` varchar(255) DEFAULT NULL COMMENT 'arguments types', `database` varchar(255) DEFAULT NULL COMMENT 'data base', `description` varchar(255) DEFAULT NULL, `resource_id` int(11) NOT NULL COMMENT 'resource id', `resource_name` varchar(255) NOT NULL COMMENT 'resource name', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_udfs -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_user -- ---------------------------- DROP TABLE IF EXISTS `t_ds_user`; CREATE TABLE `t_ds_user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'user id', `user_name` varchar(64) DEFAULT NULL COMMENT 'user name', `user_password` varchar(64) DEFAULT NULL COMMENT 'user password', `user_type` tinyint(4) DEFAULT NULL COMMENT 'user type, 0:administrator,1:ordinary user', `email` varchar(64) DEFAULT NULL COMMENT 'email', `phone` varchar(11) DEFAULT NULL COMMENT 'phone', `tenant_id` int(11) DEFAULT NULL COMMENT 'tenant id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `queue` varchar(64) DEFAULT NULL COMMENT 'queue', `state` int(1) DEFAULT 1 COMMENT 'state 0:disable 1:enable', PRIMARY KEY (`id`), UNIQUE KEY `user_name_unique` (`user_name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_user -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_worker_group -- ---------------------------- DROP TABLE IF EXISTS `t_ds_worker_group`; CREATE TABLE `t_ds_worker_group` ( `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', `name` varchar(256) NOT NULL COMMENT 'worker group name', `addr_list` text NULL DEFAULT NULL COMMENT 'worker addr list. split by [,]', `create_time` datetime NULL DEFAULT NULL COMMENT 'create time', `update_time` datetime NULL DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), UNIQUE KEY `name_unique` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_ds_worker_group -- ---------------------------- -- ---------------------------- -- Table structure for t_ds_version -- ---------------------------- DROP TABLE IF EXISTS `t_ds_version`; CREATE TABLE `t_ds_version` ( `id` int(11) NOT NULL AUTO_INCREMENT, `version` varchar(200) NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `version_UNIQUE` (`version`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COMMENT='version'; -- ---------------------------- -- Records of t_ds_version -- ---------------------------- INSERT INTO `t_ds_version` VALUES ('1', '1.4.0'); -- ---------------------------- -- Records of t_ds_alertgroup -- ---------------------------- INSERT INTO `t_ds_alertgroup`(alert_instance_ids, create_user_id, group_name, description, create_time, update_time) VALUES ("1,2", 1, 'default admin warning group', 'default admin warning group', '2018-11-29 10:20:39', '2018-11-29 10:20:39'); -- ---------------------------- -- Records of t_ds_user -- ---------------------------- INSERT INTO `t_ds_user` VALUES ('1', 'admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', '', '0', '2018-03-27 15:48:50', '2018-10-24 17:40:22', null, 1); -- ---------------------------- -- Table structure for t_ds_plugin_define -- ---------------------------- SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); DROP TABLE IF EXISTS `t_ds_plugin_define`; CREATE TABLE `t_ds_plugin_define` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_name` varchar(100) NOT NULL COMMENT 'the name of plugin eg: email', `plugin_type` varchar(100) NOT NULL COMMENT 'plugin type . alert=alert plugin, job=job plugin', `plugin_params` text COMMENT 'plugin params', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_plugin_define_UN` (`plugin_name`,`plugin_type`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_alert_plugin_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert_plugin_instance`; CREATE TABLE `t_ds_alert_plugin_instance` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_define_id` int NOT NULL, `plugin_instance_params` text COMMENT 'plugin instance params. Also contain the params value which user input in web ui.', `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `instance_name` varchar(200) DEFAULT NULL COMMENT 'alert instance name', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,474
[Bug][SQL] Fix symbol error in sql script
**Describe the bug** **To Reproduce** Steps to reproduce the behavior, for example: 1. execute dolphinscheduler_mysql.sql 2. see error info: [42000][1064] You have an error in your SQL syntax; **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/5474
https://github.com/apache/dolphinscheduler/pull/5462
a925f645719edec4afdb994ef57029c45c55d27e
018f5c89f6ee1dbb8259a6036c4beb1874cd3f5c
"2021-05-15T02:54:44Z"
java
"2021-05-18T02:20:18Z"
sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); -- uc_dolphin_T_t_ds_user_A_state drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_user_A_state; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_user_A_state() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_user' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='state') THEN ALTER TABLE t_ds_user ADD `state` int(1) DEFAULT 1 COMMENT 'state 0:disable 1:enable'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_user_A_state; DROP PROCEDURE uc_dolphin_T_t_ds_user_A_state; -- uc_dolphin_T_t_ds_tenant_A_tenant_name drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_tenant_A_tenant_name; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_tenant_A_tenant_name() BEGIN IF EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_tenant' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='tenant_name') THEN ALTER TABLE t_ds_tenant DROP `tenant_name`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_tenant_A_tenant_name; DROP PROCEDURE uc_dolphin_T_t_ds_tenant_A_tenant_name; -- uc_dolphin_T_t_ds_task_instance_A_first_submit_time drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_first_submit_time; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='first_submit_time') THEN ALTER TABLE t_ds_task_instance ADD `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_first_submit_time(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time; -- uc_dolphin_T_t_ds_task_instance_A_delay_time drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='delay_time') THEN ALTER TABLE t_ds_task_instance ADD `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time'; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_delay_time(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time; -- uc_dolphin_T_t_ds_task_instance_A_var_pool drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_var_pool; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_var_pool() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_task_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='var_pool') THEN ALTER TABLE t_ds_task_instance ADD `var_pool` longtext NULL; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_var_pool(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_var_pool; -- uc_dolphin_T_t_ds_process_instance_A_var_pool drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_instance_A_var_pool; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_instance_A_var_pool() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_instance' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='var_pool') THEN ALTER TABLE t_ds_process_instance ADD `var_pool` longtext NULL; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_instance_A_var_pool(); DROP PROCEDURE uc_dolphin_T_t_ds_process_instance_A_var_pool; -- uc_dolphin_T_t_ds_process_definition_A_modify_by drop PROCEDURE if EXISTS ct_dolphin_T_t_ds_process_definition_version; delimiter d// CREATE PROCEDURE ct_dolphin_T_t_ds_process_definition_version() BEGIN CREATE TABLE IF NOT EXISTS `t_ds_process_definition_version` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', `version` int(11) DEFAULT NULL COMMENT 'process definition version', `process_definition_json` longtext COMMENT 'process definition json content', `description` text, `global_params` text COMMENT 'global parameters', `locations` text COMMENT 'Node location information', `connects` text COMMENT 'Node connection information', `receivers` text COMMENT 'receivers', `receivers_cc` text COMMENT 'cc', `create_time` datetime DEFAULT NULL COMMENT 'create time', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids', PRIMARY KEY (`id`), UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE, KEY `process_definition_index` (`id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8; END; d// delimiter ; CALL ct_dolphin_T_t_ds_process_definition_version; DROP PROCEDURE ct_dolphin_T_t_ds_process_definition_version; -- ---------------------------- -- Table structure for t_ds_plugin_define -- ---------------------------- DROP TABLE IF EXISTS `t_ds_plugin_define`; CREATE TABLE `t_ds_plugin_define` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_name` varchar(100) NOT NULL COMMENT 'the name of plugin eg: email', `plugin_type` varchar(100) NOT NULL COMMENT 'plugin type . alert=alert plugin, job=job plugin', `plugin_params` text COMMENT 'plugin params', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_plugin_define_UN` (`plugin_name`,`plugin_type`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_alert_plugin_instance -- ---------------------------- DROP TABLE IF EXISTS `t_ds_alert_plugin_instance`; CREATE TABLE `t_ds_alert_plugin_instance` ( `id` int NOT NULL AUTO_INCREMENT, `plugin_define_id` int NOT NULL, `plugin_instance_params` text COMMENT 'plugin instance params. Also contain the params value which user input in web ui.', `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `instance_name` varchar(200) DEFAULT NULL COMMENT 'alert instance name', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- uc_dolphin_T_t_ds_process_definition_A_warning_group_id drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_A_warning_group_id; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_A_warning_group_id() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='warning_group_id') THEN ALTER TABLE t_ds_process_definition ADD COLUMN `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id' AFTER `connects`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_definition_A_warning_group_id(); DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_A_warning_group_id; -- uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_process_definition_version' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='warning_group_id') THEN ALTER TABLE t_ds_process_definition_version ADD COLUMN `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id' AFTER `connects`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id(); DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id; -- uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_alertgroup' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='alert_instance_ids') THEN ALTER TABLE t_ds_alertgroup ADD COLUMN `alert_instance_ids` varchar (255) DEFAULT NULL COMMENT 'alert instance ids' AFTER `id`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids(); DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids; -- uc_dolphin_T_t_ds_alertgroup_A_create_user_id drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_create_user_id; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_create_user_id() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_alertgroup' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='create_user_id') THEN ALTER TABLE t_ds_alertgroup ADD COLUMN `create_user_id` int(11) DEFAULT NULL COMMENT 'create user id' AFTER `alert_instance_ids`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_alertgroup_A_create_user_id(); DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_create_user_id; -- uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.STATISTICS WHERE TABLE_NAME='t_ds_alertgroup' AND TABLE_SCHEMA=(SELECT DATABASE()) AND INDEX_NAME ='t_ds_alertgroup_name_UN') THEN ALTER TABLE t_ds_alertgroup ADD UNIQUE KEY `t_ds_alertgroup_name_UN` (`group_name`); END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName(); DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName; -- uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_datasource' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='t_ds_datasource_name_UN') THEN ALTER TABLE t_ds_datasource ADD UNIQUE KEY `t_ds_datasource_name_UN` ('name', 'type'); END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName(); DROP PROCEDURE uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName; -- uc_dolphin_T_t_ds_schedules_A_add_timezone drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_schedules_A_add_timezone; delimiter d// CREATE PROCEDURE uc_dolphin_T_t_ds_schedules_A_add_timezone() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_NAME='t_ds_schedules' AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='timezone_id') THEN ALTER TABLE t_ds_schedules ADD COLUMN `timezone_id` varchar(40) default NULL COMMENT 'schedule timezone id' AFTER `end_time`; END IF; END; d// delimiter ; CALL uc_dolphin_T_t_ds_schedules_A_add_timezone(); DROP PROCEDURE uc_dolphin_T_t_ds_schedules_A_add_timezone; -- ---------------------------- -- These columns will not be used in the new version,if you determine that the historical data is useless, you can delete it using the sql below -- ---------------------------- -- ALTER TABLE t_ds_alert DROP `show_type`, DROP `alert_type`, DROP `receivers`, DROP `receivers_cc`; -- ALTER TABLE t_ds_alertgroup DROP `group_type`; -- ALTER TABLE t_ds_process_definition DROP `receivers`, DROP `receivers_cc`; -- ALTER TABLE t_ds_process_definition_version DROP `receivers`, DROP `receivers_cc`; -- DROP TABLE IF EXISTS t_ds_relation_user_alertgroup; -- ALTER TABLE t_ds_command DROP `dependence`; -- ALTER TABLE t_ds_error_command DROP `dependence`;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
5,487
[Improvement][Task] Remove TaskRecordDao And simply the after() in the AbstractTask class
Dolphin scheduler 目前已经移除了数据质量检测, 可见在配置文件中也已经移除了对 相关数据质量涉及的db的 但是代码中依旧存在TaskRecordDao对数据质量的query, 并且SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'" 中涉及的eamp_hive_log_hd db明显已经不存在于配置的默认数据库中, 但是在重要的抽象类AbstractTask 中依旧存在对 TaskRecordDao的数据质量检测逻辑的判定,建议移除来保持对重要抽象类的纯净 public void after() { if (getExitStatusCode() == Constants.EXIT_CODE_SUCCESS) { // task recor flat : if true , start up qianfan if (TaskRecordDao.getTaskRecordFlag() && TaskType.typeIsNormalTask(taskExecutionContext.getTaskType())) { AbstractParameters params = TaskParametersUtils.getParameters(taskExecutionContext.getTaskType(), taskExecutionContext.getTaskParams()); // replace placeholder Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), taskExecutionContext.getDefinedParams(), params.getLocalParametersMap(), CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), taskExecutionContext.getScheduleTime()); if (paramsMap != null && !paramsMap.isEmpty() && paramsMap.containsKey("v_proc_date")) { String vProcDate = paramsMap.get("v_proc_date").getValue(); if (!StringUtils.isEmpty(vProcDate)) { TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskExecutionContext.getTaskName(), vProcDate); logger.info("task record status : {}", taskRecordState); if (taskRecordState == TaskRecordStatus.FAILURE) { setExitStatusCode(Constants.EXIT_CODE_FAILURE); } } } } } else if (getExitStatusCode() == Constants.EXIT_CODE_KILL) { setExitStatusCode(Constants.EXIT_CODE_KILL); } else { setExitStatusCode(Constants.EXIT_CODE_FAILURE); } }
https://github.com/apache/dolphinscheduler/issues/5487
https://github.com/apache/dolphinscheduler/pull/5492
018f5c89f6ee1dbb8259a6036c4beb1874cd3f5c
bc22ae7c91c9cbd7c971796ba3a45358c2f11864
"2021-05-17T09:46:25Z"
java
"2021-05-18T09:00:03Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskRecordService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import springfox.documentation.annotations.ApiIgnore; /** * task record controller */ @ApiIgnore @RestController @RequestMapping("/projects/task-record") public class TaskRecordController extends BaseController { @Autowired TaskRecordService taskRecordService; /** * query task record list page * * @param loginUser login user * @param taskName task name * @param state state * @param sourceTable source table * @param destTable destination table * @param taskDate task date * @param startTime start time * @param endTime end time * @param pageNo page number * @param pageSize page size * @return task record list */ @GetMapping("/list-paging") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_TASK_RECORD_LIST_PAGING_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "taskName", required = false) String taskName, @RequestParam(value = "state", required = false) String state, @RequestParam(value = "sourceTable", required = false) String sourceTable, @RequestParam(value = "destTable", required = false) String destTable, @RequestParam(value = "taskDate", required = false) String taskDate, @RequestParam(value = "startDate", required = false) String startTime, @RequestParam(value = "endDate", required = false) String endTime, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize ) { Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime, state, pageNo, pageSize); return returnDataListPaging(result); } /** * query history task record list paging * * @param loginUser login user * @param taskName task name * @param state state * @param sourceTable source table * @param destTable destination table * @param taskDate task date * @param startTime start time * @param endTime end time * @param pageNo page number * @param pageSize page size * @return history task record list */ @GetMapping("/history-list-paging") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_TASK_RECORD_LIST_PAGING_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result queryHistoryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "taskName", required = false) String taskName, @RequestParam(value = "state", required = false) String state, @RequestParam(value = "sourceTable", required = false) String sourceTable, @RequestParam(value = "destTable", required = false) String destTable, @RequestParam(value = "taskDate", required = false) String taskDate, @RequestParam(value = "startDate", required = false) String startTime, @RequestParam(value = "endDate", required = false) String endTime, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize ) { Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime, state, pageNo, pageSize); return returnDataListPaging(result); } }