diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java index 36f903c25d308c058a725ba7b4c90fe3a0ca038c..ef1022755fbe5538bd616042a8caecba4a2f5a42 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java @@ -14,13 +14,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.alert.utils; import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.common.utils.*; - import org.apache.dolphinscheduler.plugin.model.AlertData; + import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; @@ -29,11 +30,17 @@ import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.io.IOException; -import java.util.*; +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Enterprise WeChat utils @@ -41,25 +48,21 @@ import java.util.*; public class EnterpriseWeChatUtils { public static final Logger logger = LoggerFactory.getLogger(EnterpriseWeChatUtils.class); - + public static final String ENTERPRISE_WE_CHAT_AGENT_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID); + public static final String ENTERPRISE_WE_CHAT_USERS = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS); private static final String ENTERPRISE_WE_CHAT_CORP_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_CORP_ID); - private static final String ENTERPRISE_WE_CHAT_SECRET = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_SECRET); - private static final String ENTERPRISE_WE_CHAT_TOKEN_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL); private static final String ENTERPRISE_WE_CHAT_TOKEN_URL_REPLACE = ENTERPRISE_WE_CHAT_TOKEN_URL == null ? null : ENTERPRISE_WE_CHAT_TOKEN_URL - .replaceAll("\\{corpId\\}", ENTERPRISE_WE_CHAT_CORP_ID) - .replaceAll("\\{secret\\}", ENTERPRISE_WE_CHAT_SECRET); - + .replaceAll("\\{corpId}", ENTERPRISE_WE_CHAT_CORP_ID) + .replaceAll("\\{secret}", ENTERPRISE_WE_CHAT_SECRET); private static final String ENTERPRISE_WE_CHAT_PUSH_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_PUSH_URL); - private static final String ENTERPRISE_WE_CHAT_TEAM_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG); - private static final String ENTERPRISE_WE_CHAT_USER_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG); - public static final String ENTERPRISE_WE_CHAT_AGENT_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID); - - public static final String ENTERPRISE_WE_CHAT_USERS = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS); + private static final String agentIdRegExp = "\\{agentId}"; + private static final String msgRegExp = "\\{msg}"; + private static final String userRegExp = "\\{toUser}"; /** * get Enterprise WeChat is enable @@ -116,13 +119,13 @@ public class EnterpriseWeChatUtils { * * @param toParty the toParty * @param agentId the agentId - * @param msg the msg + * @param msg the msg * @return Enterprise WeChat send message */ public static String makeTeamSendMsg(String toParty, String agentId, String msg) { - return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty\\}", toParty) - .replaceAll("\\{agentId\\}", agentId) - .replaceAll("\\{msg\\}", msg); + return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty}", toParty) + .replaceAll(agentIdRegExp, agentId) + .replaceAll(msgRegExp, msg); } /** @@ -130,56 +133,56 @@ public class EnterpriseWeChatUtils { * * @param toParty the toParty * @param agentId the agentId - * @param msg the msg + * @param msg the msg * @return Enterprise WeChat send message */ public static String makeTeamSendMsg(Collection toParty, String agentId, String msg) { String listParty = FuncUtils.mkString(toParty, "|"); - return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty\\}", listParty) - .replaceAll("\\{agentId\\}", agentId) - .replaceAll("\\{msg\\}", msg); + return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty}", listParty) + .replaceAll(agentIdRegExp, agentId) + .replaceAll(msgRegExp, msg); } /** * make team single user message * - * @param toUser the toUser + * @param toUser the toUser * @param agentId the agentId - * @param msg the msg + * @param msg the msg * @return Enterprise WeChat send message */ public static String makeUserSendMsg(String toUser, String agentId, String msg) { - return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser\\}", toUser) - .replaceAll("\\{agentId\\}", agentId) - .replaceAll("\\{msg\\}", msg); + return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser}", toUser) + .replaceAll(agentIdRegExp, agentId) + .replaceAll(msgRegExp, msg); } /** * make team multi user message * - * @param toUser the toUser + * @param toUser the toUser * @param agentId the agentId - * @param msg the msg + * @param msg the msg * @return Enterprise WeChat send message */ public static String makeUserSendMsg(Collection toUser, String agentId, String msg) { String listUser = FuncUtils.mkString(toUser, "|"); - return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser\\}", listUser) - .replaceAll("\\{agentId\\}", agentId) - .replaceAll("\\{msg\\}", msg); + return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll(userRegExp, listUser) + .replaceAll(agentIdRegExp, agentId) + .replaceAll(msgRegExp, msg); } /** * send Enterprise WeChat * * @param charset the charset - * @param data the data - * @param token the token + * @param data the data + * @param token the token * @return Enterprise WeChat resp, demo: {"errcode":0,"errmsg":"ok","invaliduser":""} * @throws IOException the IOException */ public static String sendEnterpriseWeChat(String charset, String data, String token) throws IOException { - String enterpriseWeChatPushUrlReplace = ENTERPRISE_WE_CHAT_PUSH_URL.replaceAll("\\{token\\}", token); + String enterpriseWeChatPushUrlReplace = ENTERPRISE_WE_CHAT_PUSH_URL.replaceAll("\\{token}", token); CloseableHttpClient httpClient = HttpClients.createDefault(); try { @@ -205,7 +208,7 @@ public class EnterpriseWeChatUtils { /** * convert table to markdown style * - * @param title the title + * @param title the title * @param content the content * @return markdown table content */ @@ -215,13 +218,13 @@ public class EnterpriseWeChatUtils { if (null != mapItemsList) { for (LinkedHashMap mapItems : mapItemsList) { - Set> entries = mapItems.entrySet(); - Iterator> iterator = entries.iterator(); + Set> entries = mapItems.entrySet(); + Iterator> iterator = entries.iterator(); StringBuilder t = new StringBuilder(String.format("`%s`%s", title, Constants.MARKDOWN_ENTER)); while (iterator.hasNext()) { - Map.Entry entry = iterator.next(); + Map.Entry entry = iterator.next(); t.append(Constants.MARKDOWN_QUOTE); t.append(entry.getKey()).append(":").append(entry.getValue()); t.append(Constants.MARKDOWN_ENTER); @@ -235,30 +238,31 @@ public class EnterpriseWeChatUtils { /** * convert text to markdown style * - * @param title the title + * @param title the title * @param content the content * @return markdown text */ public static String markdownText(String title, String content) { if (StringUtils.isNotEmpty(content)) { - List list; - try { - list = JSONUtils.toList(content, String.class); - } catch (Exception e) { - logger.error("json format exception", e); - return null; - } + List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); + if (null != mapItemsList) { + StringBuilder contents = new StringBuilder(100); + contents.append(String.format("`%s`%n", title)); + for (LinkedHashMap mapItems : mapItemsList) { + + Set> entries = mapItems.entrySet(); + Iterator> iterator = entries.iterator(); + while (iterator.hasNext()) { + Map.Entry entry = iterator.next(); + contents.append(Constants.MARKDOWN_QUOTE); + contents.append(entry.getKey()).append(":").append(entry.getValue()); + contents.append(Constants.MARKDOWN_ENTER); + } - StringBuilder contents = new StringBuilder(100); - contents.append(String.format("`%s`%n", title)); - for (String str : list) { - contents.append(Constants.MARKDOWN_QUOTE); - contents.append(str); - contents.append(Constants.MARKDOWN_ENTER); + } + return contents.toString(); } - return contents.toString(); - } return null; } @@ -278,4 +282,5 @@ public class EnterpriseWeChatUtils { return result; } + } diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java index 7b6cdd013b5b7f528624cf1e5af6c1c2d3f681f3..1a70c5becbd904e81e47b456b5bbc9085208ebea 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java @@ -14,36 +14,38 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.alert.utils; import org.apache.dolphinscheduler.common.enums.AlertType; import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.plugin.model.AlertData; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import java.io.IOException; -import java.util.*; -import org.apache.dolphinscheduler.common.utils.*; - /** * Please manually modify the configuration file before testing. * file: alert.properties - * enterprise.wechat.corp.id - * enterprise.wechat.secret - * enterprise.wechat.token.url - * enterprise.wechat.push.url - * enterprise.wechat.send.msg - * enterprise.wechat.agent.id - * enterprise.wechat.users + * enterprise.wechat.corp.id + * enterprise.wechat.secret + * enterprise.wechat.token.url + * enterprise.wechat.push.url + * enterprise.wechat.send.msg + * enterprise.wechat.agent.id + * enterprise.wechat.users */ @PrepareForTest(PropertyUtils.class) @RunWith(PowerMockRunner.class) @@ -52,14 +54,18 @@ public class EnterpriseWeChatUtilsTest { private static final String toParty = "wwc99134b6fc1edb6"; private static final String enterpriseWechatSecret = "Uuv2KFrkdf7SeKOsTDCpsTkpawXBMNRhFy6VKX5FV"; private static final String enterpriseWechatAgentId = "1000004"; - private static final String enterpriseWechatUsers="LiGang,journey"; + private static final String enterpriseWechatUsers = "LiGang,journey"; private static final String msg = "hello world"; - private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"{toParty}\\\",\\\"agentid\\\":\\\"{agentId}\\\",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"{msg}\\\"},\\\"safe\\\":\\\"0\\\"}"; - private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"{toUser}\\\",\\\"agentid\\\":\\\"{agentId}\\\",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"{msg}\\\"}}"; + private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"{toParty}\\\",\\\"agentid\\\":\\\"{agentId}\\\"" + + + ",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"{msg}\\\"},\\\"safe\\\":\\\"0\\\"}"; + private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"{toUser}\\\",\\\"agentid\\\":\\\"{agentId}\\\"" + + + ",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"{msg}\\\"}}"; @Before - public void init(){ + public void init() { PowerMockito.mockStatic(PropertyUtils.class); Mockito.when(PropertyUtils.getBoolean(Constants.ENTERPRISE_WECHAT_ENABLE)).thenReturn(true); Mockito.when(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG)).thenReturn(enterpriseWechatUserSendMsg); @@ -67,14 +73,13 @@ public class EnterpriseWeChatUtilsTest { } @Test - public void testIsEnable(){ + public void testIsEnable() { Boolean weChartEnable = EnterpriseWeChatUtils.isEnable(); Assert.assertTrue(weChartEnable); } - @Test - public void testMakeTeamSendMsg1(){ + public void testMakeTeamSendMsg1() { String sendMsg = EnterpriseWeChatUtils.makeTeamSendMsg(toParty, enterpriseWechatSecret, msg); Assert.assertTrue(sendMsg.contains(toParty)); Assert.assertTrue(sendMsg.contains(enterpriseWechatSecret)); @@ -82,9 +87,8 @@ public class EnterpriseWeChatUtilsTest { } - @Test - public void testMakeTeamSendMsg2(){ + public void testMakeTeamSendMsg2() { List parties = new ArrayList<>(); parties.add(toParty); parties.add("test1"); @@ -96,7 +100,7 @@ public class EnterpriseWeChatUtilsTest { } @Test - public void tesMakeUserSendMsg1(){ + public void tesMakeUserSendMsg1() { String sendMsg = EnterpriseWeChatUtils.makeUserSendMsg(enterpriseWechatUsers, enterpriseWechatAgentId, msg); Assert.assertTrue(sendMsg.contains(enterpriseWechatUsers)); @@ -105,7 +109,7 @@ public class EnterpriseWeChatUtilsTest { } @Test - public void tesMakeUserSendMsg2(){ + public void tesMakeUserSendMsg2() { List users = new ArrayList<>(); users.add("user1"); users.add("user2"); @@ -118,7 +122,7 @@ public class EnterpriseWeChatUtilsTest { } @Test - public void testMarkdownByAlertForText(){ + public void testMarkdownByAlertForText() { Alert alertForText = createAlertForText(); AlertData alertData = new AlertData(); alertData.setTitle(alertForText.getTitle()) @@ -129,7 +133,7 @@ public class EnterpriseWeChatUtilsTest { } @Test - public void testMarkdownByAlertForTable(){ + public void testMarkdownByAlertForTable() { Alert alertForText = createAlertForTable(); AlertData alertData = new AlertData(); alertData.setTitle(alertForText.getTitle()) @@ -139,17 +143,26 @@ public class EnterpriseWeChatUtilsTest { Assert.assertNotNull(result); } - private Alert createAlertForText(){ - String content ="[\"id:69\"," + - "\"name:UserBehavior-0--1193959466\"," + - "\"Job name: Start workflow\"," + - "\"State: SUCCESS\"," + - "\"Recovery:NO\"," + - "\"Run time: 1\"," + - "\"Start time: 2018-08-06 10:31:34.0\"," + - "\"End time: 2018-08-06 10:31:49.0\"," + - "\"Host: 192.168.xx.xx\"," + - "\"Notify group :4\"]"; + private Alert createAlertForText() { + String content = "[{\"id\":\"69\"," + + + "\"name\":\"UserBehavior-0--1193959466\"," + + + "\"Job name\":\"Start workflow\"," + + + "\"State\":\"SUCCESS\"," + + + "\"Recovery\":\"NO\"," + + + "\"Run time\":\"1\"," + + + "\"Start time\": \"2018-08-06 10:31:34.0\"," + + + "\"End time\": \"2018-08-06 10:31:49.0\"," + + + "\"Host\": \"192.168.xx.xx\"," + + + "\"Notify group\" :\"4\"}]"; Alert alert = new Alert(); alert.setTitle("Mysql Exception"); @@ -161,18 +174,18 @@ public class EnterpriseWeChatUtilsTest { return alert; } - private String list2String(){ + private String list2String() { LinkedHashMap map1 = new LinkedHashMap<>(); - map1.put("mysql service name","mysql200"); - map1.put("mysql address","192.168.xx.xx"); - map1.put("port","3306"); - map1.put("no index of number","80"); - map1.put("database client connections","190"); + map1.put("mysql service name", "mysql200"); + map1.put("mysql address", "192.168.xx.xx"); + map1.put("port", "3306"); + map1.put("no index of number", "80"); + map1.put("database client connections", "190"); LinkedHashMap map2 = new LinkedHashMap<>(); - map2.put("mysql service name","mysql210"); - map2.put("mysql address","192.168.xx.xx"); + map2.put("mysql service name", "mysql210"); + map2.put("mysql address", "192.168.xx.xx"); map2.put("port", "3306"); map2.put("no index of number", "10"); map2.put("database client connections", "90"); @@ -184,11 +197,11 @@ public class EnterpriseWeChatUtilsTest { return mapjson; } - private Alert createAlertForTable(){ + private Alert createAlertForTable() { Alert alert = new Alert(); alert.setTitle("Mysql Exception"); alert.setShowType(ShowType.TABLE); - String content= list2String(); + String content = list2String(); alert.setContent(content); alert.setAlertType(AlertType.EMAIL); alert.setAlertGroupId(1); @@ -196,77 +209,75 @@ public class EnterpriseWeChatUtilsTest { } - - -// @Test -// public void testSendSingleTeamWeChat() { -// try { -// String token = EnterpriseWeChatUtils.getToken(); -// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world"); -// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); -// -// String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); -// Assert.assertEquals("ok",errmsg); -// } catch (IOException e) { -// e.printStackTrace(); -// } -// } -// -// @Test -// public void testSendMultiTeamWeChat() { -// -// try { -// String token = EnterpriseWeChatUtils.getToken(); -// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world"); -// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); -// -// String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); -// Assert.assertEquals("ok",errmsg); -// } catch (IOException e) { -// e.printStackTrace(); -// } -// } -// -// @Test -// public void testSendSingleUserWeChat() { -// try { -// String token = EnterpriseWeChatUtils.getToken(); -// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" + -// ">**matter details** \n" + -// ">matter:meeting
" + -// ">organizer:@miglioguan \n" + -// ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" + -// "> \n" + -// ">meeting room:Guangzhou TIT 1st Floor 301 \n" + -// ">date:May 18, 2018 \n" + -// ">time:9:00-11:00 am \n" + -// "> \n" + -// ">please attend the meeting on time\n" + -// "> \n" + -// ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\""); -// -// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); -// -// String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); -// Assert.assertEquals("ok",errmsg); -// } catch (IOException e) { -// e.printStackTrace(); -// } -// } -// -// @Test -// public void testSendMultiUserWeChat() { -// try { -// String token = EnterpriseWeChatUtils.getToken(); -// -// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world"); -// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); -// -// String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); -// Assert.assertEquals("ok",errmsg); -// } catch (IOException e) { -// e.printStackTrace(); -// } -// } + // @Test + // public void testSendSingleTeamWeChat() { + // try { + // String token = EnterpriseWeChatUtils.getToken(); + // String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world"); + // String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + // + // String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); + // Assert.assertEquals("ok",errmsg); + // } catch (IOException e) { + // e.printStackTrace(); + // } + // } + // + // @Test + // public void testSendMultiTeamWeChat() { + // + // try { + // String token = EnterpriseWeChatUtils.getToken(); + // String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world"); + // String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + // + // String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); + // Assert.assertEquals("ok",errmsg); + // } catch (IOException e) { + // e.printStackTrace(); + // } + // } + // + // @Test + // public void testSendSingleUserWeChat() { + // try { + // String token = EnterpriseWeChatUtils.getToken(); + // String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" + + // ">**matter details** \n" + + // ">matter:meeting
" + + // ">organizer:@miglioguan \n" + + // ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" + + // "> \n" + + // ">meeting room:Guangzhou TIT 1st Floor 301 \n" + + // ">date:May 18, 2018 \n" + + // ">time:9:00-11:00 am \n" + + // "> \n" + + // ">please attend the meeting on time\n" + + // "> \n" + + // ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\""); + // + // String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + // + // String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); + // Assert.assertEquals("ok",errmsg); + // } catch (IOException e) { + // e.printStackTrace(); + // } + // } + // + // @Test + // public void testSendMultiUserWeChat() { + // try { + // String token = EnterpriseWeChatUtils.getToken(); + // + // String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world"); + // String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); + // + // String errmsg = JSONUtils.parseObject(resp).getString("errmsg"); + // Assert.assertEquals("ok",errmsg); + // } catch (IOException e) { + // e.printStackTrace(); + // } + // } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index 29f415bac27afbf83087048566c11427a6442b57..48cb53c5b2ca234c5c07f7737b9d3dd2e85ae0ed 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -14,32 +14,65 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; -import com.fasterxml.jackson.core.JsonProcessingException; +import static org.apache.dolphinscheduler.api.enums.Status.BATCH_COPY_PROCESS_DEFINITION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.BATCH_MOVE_PROCESS_DEFINITION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_DEFINITION; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINITION_VERSION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_VERSIONS_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.RELEASE_PROCESS_DEFINITION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; -import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import javax.servlet.http.HttpServletResponse; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import com.fasterxml.jackson.core.JsonProcessingException; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import springfox.documentation.annotations.ApiIgnore; /** @@ -55,16 +88,19 @@ public class ProcessDefinitionController extends BaseController { @Autowired private ProcessDefinitionService processDefinitionService; + @Autowired + private ProcessDefinitionVersionService processDefinitionVersionService; + /** * create process definition * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param name process definition name - * @param json process definition json + * @param name process definition name + * @param json process definition json * @param description description - * @param locations locations for nodes - * @param connects connects for nodes + * @param locations locations for nodes + * @param connects connects for nodes * @return create result code */ @ApiOperation(value = "save", notes = "CREATE_PROCESS_DEFINITION_NOTES") @@ -86,8 +122,8 @@ public class ProcessDefinitionController extends BaseController { @RequestParam(value = "connects", required = true) String connects, @RequestParam(value = "description", required = false) String description) throws JsonProcessingException { - logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " + - "process_definition_json: {}, desc: {} locations:{}, connects:{}", + logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " + + "process_definition_json: {}, desc: {} locations:{}, connects:{}", loginUser.getUserName(), projectName, name, json, description, locations, connects); Map result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json, description, locations, connects); @@ -97,13 +133,13 @@ public class ProcessDefinitionController extends BaseController { /** * copy process definition * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param processDefinitionIds process definition ids + * @param processDefinitionIds process definition ids * @param targetProjectId target project id * @return copy result code */ - @ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES") + @ApiOperation(value = "copyProcessDefinition", notes = "COPY_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"), @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer") @@ -114,7 +150,7 @@ public class ProcessDefinitionController extends BaseController { public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, - @RequestParam(value = "targetProjectId",required = true) int targetProjectId) { + @RequestParam(value = "targetProjectId", required = true) int targetProjectId) { logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(projectName), @@ -122,19 +158,19 @@ public class ProcessDefinitionController extends BaseController { StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); return returnDataList( - processDefinitionService.batchCopyProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId)); + processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId)); } /** * move process definition * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param processDefinitionIds process definition ids + * @param processDefinitionIds process definition ids * @param targetProjectId target project id * @return move result code */ - @ApiOperation(value = "moveProcessDefinition", notes= "MOVE_PROCESS_DEFINITION_NOTES") + @ApiOperation(value = "moveProcessDefinition", notes = "MOVE_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"), @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer") @@ -145,7 +181,7 @@ public class ProcessDefinitionController extends BaseController { public Result moveProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, - @RequestParam(value = "targetProjectId",required = true) int targetProjectId) { + @RequestParam(value = "targetProjectId", required = true) int targetProjectId) { logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(projectName), @@ -153,15 +189,15 @@ public class ProcessDefinitionController extends BaseController { StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); return returnDataList( - processDefinitionService.batchMoveProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId)); + processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId)); } /** * verify process definition name unique * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param name name + * @param name name * @return true if process definition name not exists, otherwise false */ @ApiOperation(value = "verify-name", notes = "VERIFY_PROCESS_DEFINITION_NAME_NOTES") @@ -172,8 +208,8 @@ public class ProcessDefinitionController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR) public Result verifyProcessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "name", required = true) String name) { + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "name", required = true) String name) { logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}", loginUser.getUserName(), projectName, name); Map result = processDefinitionService.verifyProcessDefinitionName(loginUser, projectName, name); @@ -183,18 +219,18 @@ public class ProcessDefinitionController extends BaseController { /** * update process definition * - * @param loginUser login user - * @param projectName project name - * @param name process definition name - * @param id process definition id + * @param loginUser login user + * @param projectName project name + * @param name process definition name + * @param id process definition id * @param processDefinitionJson process definition json - * @param description description - * @param locations locations for nodes - * @param connects connects for nodes + * @param description description + * @param locations locations for nodes + * @param connects connects for nodes * @return update result code */ - @ApiOperation(value = "updateProcessDefinition", notes= "UPDATE_PROCESS_DEFINITION_NOTES") + @ApiOperation(value = "updateProcessDefinition", notes = "UPDATE_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), @ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @@ -207,33 +243,115 @@ public class ProcessDefinitionController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_PROCESS_DEFINITION_ERROR) public Result updateProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "name", required = true) String name, - @RequestParam(value = "id", required = true) int id, - @RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson, - @RequestParam(value = "locations", required = false) String locations, - @RequestParam(value = "connects", required = false) String connects, - @RequestParam(value = "description", required = false) String description) { - - logger.info("login user {}, update process define, project name: {}, process define name: {}, " + - "process_definition_json: {}, desc: {}, locations:{}, connects:{}", + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "name", required = true) String name, + @RequestParam(value = "id", required = true) int id, + @RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson, + @RequestParam(value = "locations", required = false) String locations, + @RequestParam(value = "connects", required = false) String connects, + @RequestParam(value = "description", required = false) String description) { + + logger.info("login user {}, update process define, project name: {}, process define name: {}, " + + "process_definition_json: {}, desc: {}, locations:{}, connects:{}", loginUser.getUserName(), projectName, name, processDefinitionJson, description, locations, connects); Map result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name, processDefinitionJson, description, locations, connects); return returnDataList(result); } + /** + * query process definition version paging list info + * + * @param loginUser login user info + * @param projectName the process definition project name + * @param pageNo the process definition version list current page number + * @param pageSize the process definition version list page size + * @param processDefinitionId the process definition id + * @return the process definition version list + */ + @ApiOperation(value = "queryProcessDefinitionVersions", notes = "QUERY_PROCESS_DEFINITION_VERSIONS_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value = "/versions") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_PROCESS_DEFINITION_VERSIONS_ERROR) + public Result queryProcessDefinitionVersions(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "pageNo") int pageNo, + @RequestParam(value = "pageSize") int pageSize, + @RequestParam(value = "processDefinitionId") int processDefinitionId) { + + Map result = processDefinitionVersionService.queryProcessDefinitionVersions(loginUser + , projectName, pageNo, pageSize, processDefinitionId); + return returnDataList(result); + } + + /** + * switch certain process definition version + * + * @param loginUser login user info + * @param projectName the process definition project name + * @param processDefinitionId the process definition id + * @param version the version user want to switch + * @return switch version result code + */ + @ApiOperation(value = "switchProcessDefinitionVersion", notes = "SWITCH_PROCESS_DEFINITION_VERSION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100") + }) + @GetMapping(value = "/version/switch") + @ResponseStatus(HttpStatus.OK) + @ApiException(SWITCH_PROCESS_DEFINITION_VERSION_ERROR) + public Result switchProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processDefinitionId") int processDefinitionId, + @RequestParam(value = "version") long version) { + + Map result = processDefinitionService.switchProcessDefinitionVersion(loginUser, projectName + , processDefinitionId, version); + return returnDataList(result); + } + + /** + * delete the certain process definition version by version and process definition id + * + * @param loginUser login user info + * @param projectName the process definition project name + * @param processDefinitionId process definition id + * @param version the process definition version user want to delete + * @return delete version result code + */ + @ApiOperation(value = "deleteProcessDefinitionVersion", notes = "DELETE_PROCESS_DEFINITION_VERSION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100") + }) + @GetMapping(value = "/version/delete") + @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_PROCESS_DEFINITION_VERSION_ERROR) + public Result deleteProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processDefinitionId") int processDefinitionId, + @RequestParam(value = "version") long version) { + + Map result = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(loginUser, projectName, processDefinitionId, version); + return returnDataList(result); + } + /** * release process definition * - * @param loginUser login user - * @param projectName project name - * @param processId process definition id + * @param loginUser login user + * @param projectName project name + * @param processId process definition id * @param releaseState release state * @return release result code */ - - @ApiOperation(value = "releaseProcessDefinition", notes= "RELEASE_PROCESS_DEFINITION_NOTES") + @ApiOperation(value = "releaseProcessDefinition", notes = "RELEASE_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @@ -243,9 +361,9 @@ public class ProcessDefinitionController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(RELEASE_PROCESS_DEFINITION_ERROR) public Result releaseProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "processId", required = true) int processId, - @RequestParam(value = "releaseState", required = true) int releaseState) { + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processId", required = true) int processId, + @RequestParam(value = "releaseState", required = true) int releaseState) { logger.info("login user {}, release process definition, project name: {}, release state: {}", loginUser.getUserName(), projectName, releaseState); @@ -256,12 +374,12 @@ public class ProcessDefinitionController extends BaseController { /** * query datail of process definition * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param processId process definition id + * @param processId process definition id * @return process definition detail */ - @ApiOperation(value = "queryProcessDefinitionById", notes= "QUERY_PROCESS_DEFINITION_BY_ID_NOTES") + @ApiOperation(value = "queryProcessDefinitionById", notes = "QUERY_PROCESS_DEFINITION_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") }) @@ -269,8 +387,8 @@ public class ProcessDefinitionController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR) public Result queryProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processId") Integer processId + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processId") Integer processId ) { logger.info("query detail of process definition, login user:{}, project name:{}, process definition id:{}", loginUser.getUserName(), projectName, processId); @@ -281,7 +399,7 @@ public class ProcessDefinitionController extends BaseController { /** * query Process definition list * - * @param loginUser login user + * @param loginUser login user * @param projectName project name * @return process definition list */ @@ -290,7 +408,7 @@ public class ProcessDefinitionController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_PROCESS_DEFINITION_LIST) public Result queryProcessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName ) { logger.info("query process definition list, login user:{}, project name:{}", loginUser.getUserName(), projectName); @@ -301,15 +419,15 @@ public class ProcessDefinitionController extends BaseController { /** * query process definition list paging * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @param userId user id + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @param userId user id * @return process definition page */ - @ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES") + @ApiOperation(value = "queryProcessDefinitionListPaging", notes = "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"), @@ -338,10 +456,10 @@ public class ProcessDefinitionController extends BaseController { /** * encapsulation treeview structure * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param id process definition id - * @param limit limit + * @param id process definition id + * @param limit limit * @return tree view json data */ @ApiOperation(value = "viewTree", notes = "VIEW_TREE_NOTES") @@ -363,8 +481,8 @@ public class ProcessDefinitionController extends BaseController { /** * get tasks list by process definition id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id * @return task list */ @@ -388,8 +506,8 @@ public class ProcessDefinitionController extends BaseController { /** * get tasks list by process definition id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionIdList process definition id list * @return node list data */ @@ -414,8 +532,8 @@ public class ProcessDefinitionController extends BaseController { /** * delete process definition by id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id * @return delete result code */ @@ -439,8 +557,8 @@ public class ProcessDefinitionController extends BaseController { /** * batch delete process definition by ids * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionIds process definition id list * @return delete result code */ @@ -489,13 +607,13 @@ public class ProcessDefinitionController extends BaseController { /** * batch export process definition by ids * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionIds process definition ids - * @param response response + * @param response response */ - @ApiOperation(value = "batchExportProcessDefinitionByIds", notes= "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES") + @ApiOperation(value = "batchExportProcessDefinitionByIds", notes = "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String") }) @@ -526,7 +644,7 @@ public class ProcessDefinitionController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_PROCESS_DEFINITION_LIST) public Result queryProcessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("projectId") Integer projectId) { + @RequestParam("projectId") Integer projectId) { logger.info("query process definition list, login user:{}, project id:{}", loginUser.getUserName(), projectId); Map result = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java index a603ac050caa28bac77753569256896bce11664e..2676a774e76dde50c973d9deb4d3007661ea25d0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java @@ -14,8 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_TENANT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_TENANT_BY_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TENANT_LIST_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TENANT_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TENANT_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_TENANT_CODE_ERROR; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -24,20 +31,26 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** @@ -57,10 +70,10 @@ public class TenantController extends BaseController { /** * create tenant * - * @param loginUser login user - * @param tenantCode tenant code - * @param tenantName tenant name - * @param queueId queue id + * @param loginUser login user + * @param tenantCode tenant code + * @param tenantName tenant name + * @param queueId queue id * @param description description * @return create result code */ @@ -92,8 +105,8 @@ public class TenantController extends BaseController { * * @param loginUser login user * @param searchVal search value - * @param pageNo page number - * @param pageSize page size + * @param pageNo page number + * @param pageSize page size * @return tenant list page */ @ApiOperation(value = "queryTenantlistPaging", notes = "QUERY_TENANT_LIST_PAGING_NOTES") @@ -141,11 +154,11 @@ public class TenantController extends BaseController { /** * udpate tenant * - * @param loginUser login user - * @param id tennat id - * @param tenantCode tennat code - * @param tenantName tennat name - * @param queueId queue id + * @param loginUser login user + * @param id tennat id + * @param tenantCode tennat code + * @param tenantName tennat name + * @param queueId queue id * @param description description * @return update result code */ @@ -177,7 +190,7 @@ public class TenantController extends BaseController { * delete tenant by id * * @param loginUser login user - * @param id tenant id + * @param id tenant id * @return delete result code */ @ApiOperation(value = "deleteTenantById", notes = "DELETE_TENANT_NOTES") @@ -195,11 +208,10 @@ public class TenantController extends BaseController { return returnDataList(result); } - /** * verify tenant code * - * @param loginUser login user + * @param loginUser login user * @param tenantCode tenant code * @return true if tenant code can user, otherwise return false */ @@ -211,12 +223,10 @@ public class TenantController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(VERIFY_TENANT_CODE_ERROR) public Result verifyTenantCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "tenantCode") String tenantCode - ) { + @RequestParam(value = "tenantCode") String tenantCode) { logger.info("login user {}, verfiy tenant code: {}", loginUser.getUserName(), tenantCode); return tenantService.verifyTenantCode(tenantCode); } - } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index 950d0a511d224151b56129e20289c3e00e5e07ab..e30da31ad4d4ec955ce5241df780058b7319ce72 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -14,14 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.enums; -import org.springframework.context.i18n.LocaleContextHolder; +package org.apache.dolphinscheduler.api.enums; import java.util.Locale; +import org.springframework.context.i18n.LocaleContextHolder; + /** - * status enum + * status enum */ public enum Status { @@ -32,15 +33,15 @@ public enum Status { REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"), TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"), USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"), - USER_NAME_NULL(10004,"user name is null", "用户名不能为空"), + USER_NAME_NULL(10004, "user name is null", "用户名不能为空"), HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"), TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"), TENANT_NAME_EXIST(10009, "tenant code {0} already exists", "租户编码[{0}]已存在"), USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"), ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"), ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"), - USER_NAME_PASSWD_ERROR(10013,"user name or password error", "用户名或密码错误"), - LOGIN_SESSION_FAILED(10014,"create session failed!", "创建session失败"), + USER_NAME_PASSWD_ERROR(10013, "user name or password error", "用户名或密码错误"), + LOGIN_SESSION_FAILED(10014, "create session failed!", "创建session失败"), DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"), DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"), TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"), @@ -53,105 +54,105 @@ public enum Status { SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"), MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"), SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"), - CREATE_ALERT_GROUP_ERROR(10027,"create alert group error", "创建告警组错误"), - QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error", "查询告警组错误"), - LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error", "分页查询告警组错误"), - UPDATE_ALERT_GROUP_ERROR(10030,"update alert group error", "更新告警组错误"), - DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error", "删除告警组错误"), - ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error", "告警组授权用户错误"), - CREATE_DATASOURCE_ERROR(10033,"create datasource error", "创建数据源错误"), - UPDATE_DATASOURCE_ERROR(10034,"update datasource error", "更新数据源错误"), - QUERY_DATASOURCE_ERROR(10035,"query datasource error", "查询数据源错误"), - CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure", "建立数据源连接失败"), - CONNECTION_TEST_FAILURE(10037,"connection test failure", "测试数据源连接失败"), - DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure", "删除数据源失败"), - VERIFY_DATASOURCE_NAME_FAILURE(10039,"verify datasource name failure", "验证数据源名称失败"), - UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource", "未经授权的数据源"), - AUTHORIZED_DATA_SOURCE(10041,"authorized data source", "授权数据源失败"), - LOGIN_SUCCESS(10042,"login success", "登录成功"), - USER_LOGIN_FAILURE(10043,"user login failure", "用户登录失败"), - LIST_WORKERS_ERROR(10044,"list workers error", "查询worker列表错误"), - LIST_MASTERS_ERROR(10045,"list masters error", "查询master列表错误"), - UPDATE_PROJECT_ERROR(10046,"update project error", "更新项目信息错误"), - QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error", "查询项目详细信息错误"), - CREATE_PROJECT_ERROR(10048,"create project error", "创建项目错误"), - LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error", "分页查询项目列表错误"), - DELETE_PROJECT_ERROR(10050,"delete project error", "删除项目错误"), - QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error", "查询未授权项目错误"), - QUERY_AUTHORIZED_PROJECT(10052,"query authorized project", "查询授权项目错误"), - QUERY_QUEUE_LIST_ERROR(10053,"query queue list error", "查询队列列表错误"), - CREATE_RESOURCE_ERROR(10054,"create resource error", "创建资源错误"), - UPDATE_RESOURCE_ERROR(10055,"update resource error", "更新资源错误"), - QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error", "查询资源列表错误"), - QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging", "分页查询资源列表错误"), - DELETE_RESOURCE_ERROR(10058,"delete resource error", "删除资源错误"), - VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error", "资源名称或类型验证错误"), - VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error", "查看资源文件错误"), - CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error", "创建资源文件错误"), - RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty", "资源文件内容不能为空"), - EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error", "更新资源文件错误"), - DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error", "下载资源文件错误"), - CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error", "创建UDF函数错误"), - VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error", "查询UDF函数错误"), - UPDATE_UDF_FUNCTION_ERROR(10067,"update udf function error", "更新UDF函数错误"), - QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error", "分页查询UDF函数列表错误"), - QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error", "查询数据源信息错误"), - VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error", "UDF函数名称验证错误"), - DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error", "删除UDF函数错误"), - AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error", "授权资源文件错误"), - AUTHORIZE_RESOURCE_TREE( 10073,"authorize resource tree display error","授权资源目录树错误"), - UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error", "查询未授权UDF函数错误"), - AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error", "授权UDF函数错误"), - CREATE_SCHEDULE_ERROR(10076,"create schedule error", "创建调度配置错误"), - UPDATE_SCHEDULE_ERROR(10077,"update schedule error", "更新调度配置错误"), - PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error", "上线调度配置错误"), - OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error", "下线调度配置错误"), - QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error", "分页查询调度配置列表错误"), - QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error", "查询调度配置列表错误"), - QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error", "分页查询任务列表错误"), - QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error", "分页查询任务记录错误"), - CREATE_TENANT_ERROR(10084,"create tenant error", "创建租户错误"), - QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error", "分页查询租户列表错误"), - QUERY_TENANT_LIST_ERROR(10086,"query tenant list error", "查询租户列表错误"), - UPDATE_TENANT_ERROR(10087,"update tenant error", "更新租户错误"), - DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error", "删除租户错误"), - VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error", "租户编码验证错误"), - CREATE_USER_ERROR(10090,"create user error", "创建用户错误"), - QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error", "分页查询用户列表错误"), - UPDATE_USER_ERROR(10092,"update user error", "更新用户错误"), - DELETE_USER_BY_ID_ERROR(10093,"delete user by id error", "删除用户错误"), - GRANT_PROJECT_ERROR(10094,"grant project error", "授权项目错误"), - GRANT_RESOURCE_ERROR(10095,"grant resource error", "授权资源错误"), - GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error", "授权UDF函数错误"), - GRANT_DATASOURCE_ERROR(10097,"grant datasource error", "授权数据源错误"), - GET_USER_INFO_ERROR(10098,"get user info error", "获取用户信息错误"), - USER_LIST_ERROR(10099,"user list error", "查询用户列表错误"), - VERIFY_USERNAME_ERROR(10100,"verify username error", "用户名验证错误"), - UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error", "查询未授权用户错误"), - AUTHORIZED_USER_ERROR(10102,"authorized user error", "查询授权用户错误"), - QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error", "查询任务实例日志错误"), - DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error", "下载任务日志文件错误"), - CREATE_PROCESS_DEFINITION(10105,"create process definition", "创建工作流错误"), - VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error", "工作流名称已存在"), - UPDATE_PROCESS_DEFINITION_ERROR(10107,"update process definition error", "更新工作流定义错误"), - RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error", "上线工作流错误"), - QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error", "查询工作流详细信息错误"), - QUERY_PROCESS_DEFINITION_LIST(10110,"query process definition list", "查询工作流列表错误"), - ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error", "查询工作流树形图数据错误"), - GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error", "查询工作流定义节点信息错误"), - QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error", "分页查询工作流实例列表错误"), - QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error", "查询任务实例列表错误"), - UPDATE_PROCESS_INSTANCE_ERROR(10115,"update process instance error", "更新工作流实例错误"), - QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error", "查询工作流实例错误"), - DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error", "删除工作流实例错误"), - QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error", "查询子流程任务实例错误"), - QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"), - QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error", "查询工作流自定义变量信息错误"), - ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"), - QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query process definition list paging error", "分页查询工作流定义列表错误"), - SIGN_OUT_ERROR(10123,"sign out error", "退出错误"), - TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists", "租户编码已存在"), - IP_IS_EMPTY(10125,"ip is empty", "IP地址不能为空"), + CREATE_ALERT_GROUP_ERROR(10027, "create alert group error", "创建告警组错误"), + QUERY_ALL_ALERTGROUP_ERROR(10028, "query all alertgroup error", "查询告警组错误"), + LIST_PAGING_ALERT_GROUP_ERROR(10029, "list paging alert group error", "分页查询告警组错误"), + UPDATE_ALERT_GROUP_ERROR(10030, "update alert group error", "更新告警组错误"), + DELETE_ALERT_GROUP_ERROR(10031, "delete alert group error", "删除告警组错误"), + ALERT_GROUP_GRANT_USER_ERROR(10032, "alert group grant user error", "告警组授权用户错误"), + CREATE_DATASOURCE_ERROR(10033, "create datasource error", "创建数据源错误"), + UPDATE_DATASOURCE_ERROR(10034, "update datasource error", "更新数据源错误"), + QUERY_DATASOURCE_ERROR(10035, "query datasource error", "查询数据源错误"), + CONNECT_DATASOURCE_FAILURE(10036, "connect datasource failure", "建立数据源连接失败"), + CONNECTION_TEST_FAILURE(10037, "connection test failure", "测试数据源连接失败"), + DELETE_DATA_SOURCE_FAILURE(10038, "delete data source failure", "删除数据源失败"), + VERIFY_DATASOURCE_NAME_FAILURE(10039, "verify datasource name failure", "验证数据源名称失败"), + UNAUTHORIZED_DATASOURCE(10040, "unauthorized datasource", "未经授权的数据源"), + AUTHORIZED_DATA_SOURCE(10041, "authorized data source", "授权数据源失败"), + LOGIN_SUCCESS(10042, "login success", "登录成功"), + USER_LOGIN_FAILURE(10043, "user login failure", "用户登录失败"), + LIST_WORKERS_ERROR(10044, "list workers error", "查询worker列表错误"), + LIST_MASTERS_ERROR(10045, "list masters error", "查询master列表错误"), + UPDATE_PROJECT_ERROR(10046, "update project error", "更新项目信息错误"), + QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047, "query project details by id error", "查询项目详细信息错误"), + CREATE_PROJECT_ERROR(10048, "create project error", "创建项目错误"), + LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049, "login user query project list paging error", "分页查询项目列表错误"), + DELETE_PROJECT_ERROR(10050, "delete project error", "删除项目错误"), + QUERY_UNAUTHORIZED_PROJECT_ERROR(10051, "query unauthorized project error", "查询未授权项目错误"), + QUERY_AUTHORIZED_PROJECT(10052, "query authorized project", "查询授权项目错误"), + QUERY_QUEUE_LIST_ERROR(10053, "query queue list error", "查询队列列表错误"), + CREATE_RESOURCE_ERROR(10054, "create resource error", "创建资源错误"), + UPDATE_RESOURCE_ERROR(10055, "update resource error", "更新资源错误"), + QUERY_RESOURCES_LIST_ERROR(10056, "query resources list error", "查询资源列表错误"), + QUERY_RESOURCES_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"), + DELETE_RESOURCE_ERROR(10058, "delete resource error", "删除资源错误"), + VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059, "verify resource by name and type error", "资源名称或类型验证错误"), + VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060, "view resource file online error", "查看资源文件错误"), + CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061, "create resource file online error", "创建资源文件错误"), + RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"), + EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"), + DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"), + CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"), + VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"), + UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"), + QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"), + QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"), + VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"), + DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"), + AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"), + AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"), + UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"), + AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"), + CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"), + UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"), + PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"), + OFFLINE_SCHEDULE_ERROR(10079, "offline schedule error", "下线调度配置错误"), + QUERY_SCHEDULE_LIST_PAGING_ERROR(10080, "query schedule list paging error", "分页查询调度配置列表错误"), + QUERY_SCHEDULE_LIST_ERROR(10081, "query schedule list error", "查询调度配置列表错误"), + QUERY_TASK_LIST_PAGING_ERROR(10082, "query task list paging error", "分页查询任务列表错误"), + QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083, "query task record list paging error", "分页查询任务记录错误"), + CREATE_TENANT_ERROR(10084, "create tenant error", "创建租户错误"), + QUERY_TENANT_LIST_PAGING_ERROR(10085, "query tenant list paging error", "分页查询租户列表错误"), + QUERY_TENANT_LIST_ERROR(10086, "query tenant list error", "查询租户列表错误"), + UPDATE_TENANT_ERROR(10087, "update tenant error", "更新租户错误"), + DELETE_TENANT_BY_ID_ERROR(10088, "delete tenant by id error", "删除租户错误"), + VERIFY_TENANT_CODE_ERROR(10089, "verify tenant code error", "租户编码验证错误"), + CREATE_USER_ERROR(10090, "create user error", "创建用户错误"), + QUERY_USER_LIST_PAGING_ERROR(10091, "query user list paging error", "分页查询用户列表错误"), + UPDATE_USER_ERROR(10092, "update user error", "更新用户错误"), + DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"), + GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"), + GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"), + GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"), + GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"), + GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"), + USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"), + VERIFY_USERNAME_ERROR(10100, "verify username error", "用户名验证错误"), + UNAUTHORIZED_USER_ERROR(10101, "unauthorized user error", "查询未授权用户错误"), + AUTHORIZED_USER_ERROR(10102, "authorized user error", "查询授权用户错误"), + QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"), + DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104, "download task instance log file error", "下载任务日志文件错误"), + CREATE_PROCESS_DEFINITION(10105, "create process definition", "创建工作流错误"), + VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流名称已存在"), + UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"), + RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"), + QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query datail of process definition error", "查询工作流详细信息错误"), + QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"), + ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"), + GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"), + QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"), + QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"), + UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"), + QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"), + DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"), + QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"), + QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119, "query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"), + QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"), + ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"), + QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"), + SIGN_OUT_ERROR(10123, "sign out error", "退出错误"), + TENANT_CODE_HAS_ALREADY_EXISTS(10124, "tenant code has already exists", "租户编码已存在"), + IP_IS_EMPTY(10125, "ip is empty", "IP地址不能为空"), SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}", "调度配置上线错误[{0}]"), CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"), QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"), @@ -159,29 +160,40 @@ public enum Status { QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"), UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"), NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"), - VERIFY_QUEUE_ERROR(10133,"verify queue error", "验证队列信息错误"), - NAME_NULL(10134,"name must be not null", "名称不能为空"), + VERIFY_QUEUE_ERROR(10133, "verify queue error", "验证队列信息错误"), + NAME_NULL(10134, "name must be not null", "名称不能为空"), NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"), SAVE_ERROR(10136, "save error", "保存错误"), DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"), - BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error", "批量删除工作流实例错误"), - PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"), - PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"), - SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"), - DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), - DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), - DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), - DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), - QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"), - DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"), - USER_DISABLED(10148,"The current user is disabled", "当前用户已停用"), - COPY_PROCESS_DEFINITION_ERROR(10149,"copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"), - MOVE_PROCESS_DEFINITION_ERROR(10150,"move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"), - QUERY_USER_CREATED_PROJECT_ERROR(10151,"query user created project error error", "查询用户创建的项目错误"), - PROCESS_DEFINITION_IDS_IS_EMPTY(10152,"process definition ids is empty", "工作流IDS不能为空"), - BATCH_COPY_PROCESS_DEFINITION_ERROR(10153,"batch copy process definition error", "复制工作流错误"), - BATCH_MOVE_PROCESS_DEFINITION_ERROR(10154,"batch move process definition error", "移动工作流错误"), - QUERY_WORKFLOW_LINEAGE_ERROR(10155,"query workflow lineage error", "查询血缘失败"), + BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误"), + PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"), + PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"), + SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"), + DELETE_TENANT_BY_ID_FAIL(10142, "delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), + DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), + DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), + DELETE_WORKER_GROUP_BY_ID_FAIL(10145, "delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), + QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"), + DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"), + USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"), + COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"), + MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"), + SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"), + SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152 + , "Switch process definition version error: not exists process definition, [process definition id {0}]", "切换工作流版本出错:工作流不存在,[工作流id {0}]"), + SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153 + , "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"), + QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"), + QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR(10155 + , "query process definition versions error: [page number:{0}] < 1 or [page size:{1}] < 1", "查询工作流历史版本出错:[pageNo:{0}] < 1 或 [pageSize:{1}] < 1"), + DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"), + + QUERY_USER_CREATED_PROJECT_ERROR(10157, "query user created project error error", "查询用户创建的项目错误"), + PROCESS_DEFINITION_IDS_IS_EMPTY(10158, "process definition ids is empty", "工作流IDS不能为空"), + BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"), + BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"), + QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"), + UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), @@ -194,10 +206,10 @@ public enum Status { HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"), RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"), RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"), - UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}","udf函数绑定了资源文件[{0}]"), - RESOURCE_IS_USED(20014, "resource file is used by process definition","资源文件被上线的流程定义使用了"), - PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist","父资源文件不存在"), - RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource","请检查任务节点并移除无权限或者已删除的资源"), + UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"), + RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"), + PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"), + RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource", "请检查任务节点并移除无权限或者已删除的资源"), RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"), USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"), @@ -214,52 +226,51 @@ public enum Status { PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"), PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."), PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"), - TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error", "查询各状态任务实例数错误"), - COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error", "查询各状态流程实例数错误"), - COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error", "查询各用户流程定义数错误"), - START_PROCESS_INSTANCE_ERROR(50014,"start process instance error", "运行工作流实例错误"), - EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error", "操作工作流实例错误"), - CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error", "检查工作流实例错误"), - QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error", "查询收件人和抄送人错误"), - DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"), - DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"), - PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"), - PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node {0} parameter invalid", "流程节点[{0}]参数无效"), + TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"), + COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"), + COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"), + START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"), + EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"), + CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "检查工作流实例错误"), + QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"), + DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"), + DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"), + PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"), + PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"), PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"), - DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"), - SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"), - DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error", "删除调度配置错误"), - BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error", "批量删除工作流定义错误"), - BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"), - TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"), - EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error", "导出工作流定义错误"), - BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028,"batch export process definition by ids error", "批量导出工作流定义错误"), - IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error", "导入工作流定义错误"), + DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022, "delete process definition by id error", "删除工作流定义错误"), + SCHEDULE_CRON_STATE_ONLINE(50023, "the status of schedule {0} is already on line", "调度配置[{0}]已上线"), + DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"), + BATCH_DELETE_PROCESS_DEFINE_ERROR(50025, "batch delete process definition error", "批量删除工作流定义错误"), + BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026, "batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"), + TENANT_NOT_SUITABLE(50027, "there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"), + EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"), + BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"), + IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"), - HDFS_NOT_STARTUP(60001,"hdfs not startup", "hdfs未启用"), + HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"), /** * for monitor */ - QUERY_DATABASE_STATE_ERROR(70001,"query database state error", "查询数据库状态错误"), - QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error", "查询zookeeper状态错误"), - + QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"), + QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"), - CREATE_ACCESS_TOKEN_ERROR(70010,"create access token error", "创建访问token错误"), - GENERATE_TOKEN_ERROR(70011,"generate token error", "生成token错误"), - QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012,"query access token list paging error", "分页查询访问token列表错误"), - UPDATE_ACCESS_TOKEN_ERROR(70013,"update access token error", "更新访问token错误"), - DELETE_ACCESS_TOKEN_ERROR(70014,"delete access token error", "删除访问token错误"), + CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"), + GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"), + QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012, "query access token list paging error", "分页查询访问token列表错误"), + UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"), + DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"), ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"), - COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error", "查询各状态任务实例数错误"), - NEGTIVE_SIZE_NUMBER_ERROR(80002,"query size number error","查询size错误"), - START_TIME_BIGGER_THAN_END_TIME_ERROR(80003,"start time bigger than end time error","开始时间在结束时间之后错误"), - QUEUE_COUNT_ERROR(90001,"queue count error", "查询队列数据错误"), + COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"), + NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"), + START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"), + QUEUE_COUNT_ERROR(90001, "queue count error", "查询队列数据错误"), - KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error", "获取kerberos启动状态错误"), + KERBEROS_STARTUP_STATE(100001, "get kerberos startup state error", "获取kerberos启动状态错误"), ; private final int code; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java index cc18de439f298d20d87e8081fc5f13b2ac3717eb..f6f786b6b149a55a0e4645d01f6996763807f658 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java @@ -14,13 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import java.util.Map; -import javax.servlet.http.HttpServletResponse; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; + import org.springframework.web.multipart.MultipartFile; + import com.fasterxml.jackson.core.JsonProcessingException; /** @@ -31,13 +36,13 @@ public interface ProcessDefinitionService { /** * create process definition * - * @param loginUser login user - * @param projectName project name - * @param name process definition name + * @param loginUser login user + * @param projectName project name + * @param name process definition name * @param processDefinitionJson process definition json - * @param desc description - * @param locations locations for nodes - * @param connects connects for nodes + * @param desc description + * @param locations locations for nodes + * @param connects connects for nodes * @return create result code * @throws JsonProcessingException JsonProcessingException */ @@ -52,7 +57,7 @@ public interface ProcessDefinitionService { /** * query process definition list * - * @param loginUser login user + * @param loginUser login user * @param projectName project name * @return definition list */ @@ -62,12 +67,12 @@ public interface ProcessDefinitionService { /** * query process definition list paging * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @param userId user id + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @param userId user id * @return process definition page */ Map queryProcessDefinitionListPaging(User loginUser, @@ -80,9 +85,9 @@ public interface ProcessDefinitionService { /** * query datail of process definition * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param processId process definition id + * @param processId process definition id * @return process definition detail */ @@ -92,41 +97,41 @@ public interface ProcessDefinitionService { /** * batch copy process definition + * * @param loginUser loginUser * @param projectName projectName * @param processDefinitionIds processDefinitionIds * @param targetProjectId targetProjectId - * @return */ Map batchCopyProcessDefinition(User loginUser, - String projectName, - String processDefinitionIds, - int targetProjectId); + String projectName, + String processDefinitionIds, + int targetProjectId); /** * batch move process definition + * * @param loginUser loginUser * @param projectName projectName * @param processDefinitionIds processDefinitionIds * @param targetProjectId targetProjectId - * @return */ Map batchMoveProcessDefinition(User loginUser, - String projectName, - String processDefinitionIds, - int targetProjectId); + String projectName, + String processDefinitionIds, + int targetProjectId); /** * update process definition * - * @param loginUser login user - * @param projectName project name - * @param name process definition name - * @param id process definition id + * @param loginUser login user + * @param projectName project name + * @param name process definition name + * @param id process definition id * @param processDefinitionJson process definition json - * @param desc description - * @param locations locations for nodes - * @param connects connects for nodes + * @param desc description + * @param locations locations for nodes + * @param connects connects for nodes * @return update result code */ Map updateProcessDefinition(User loginUser, @@ -139,9 +144,9 @@ public interface ProcessDefinitionService { /** * verify process definition name unique * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param name name + * @param name name * @return true if process definition name not exists, otherwise false */ Map verifyProcessDefinitionName(User loginUser, @@ -151,8 +156,8 @@ public interface ProcessDefinitionService { /** * delete process definition by id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id * @return delete result code */ @@ -163,9 +168,9 @@ public interface ProcessDefinitionService { /** * release process definition: online / offline * - * @param loginUser login user - * @param projectName project name - * @param id process definition id + * @param loginUser login user + * @param projectName project name + * @param id process definition id * @param releaseState release state * @return release result code */ @@ -177,21 +182,21 @@ public interface ProcessDefinitionService { /** * batch export process definition by ids * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionIds process definition ids - * @param response http servlet response + * @param response http servlet response */ void batchExportProcessDefinitionByIds(User loginUser, - String projectName, - String processDefinitionIds, - HttpServletResponse response); + String projectName, + String processDefinitionIds, + HttpServletResponse response); /** * import process definition * - * @param loginUser login user - * @param file process metadata json file + * @param loginUser login user + * @param file process metadata json file * @param currentProjectName current project name * @return import process */ @@ -202,7 +207,7 @@ public interface ProcessDefinitionService { /** * check the process definition node meets the specifications * - * @param processData process data + * @param processData process data * @param processDefinitionJson process definition json * @return check result code */ @@ -237,11 +242,23 @@ public interface ProcessDefinitionService { * Encapsulates the TreeView structure * * @param processId process definition id - * @param limit limit + * @param limit limit * @return tree view json data * @throws Exception exception */ Map viewTree(Integer processId, Integer limit) throws Exception; + + /** + * switch the defined process definition verison + * + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process definition id + * @param version the version user want to switch + * @return switch process definition version result code + */ + Map switchProcessDefinitionVersion(User loginUser, String projectName + , int processDefinitionId, long version); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java new file mode 100644 index 0000000000000000000000000000000000000000..5538194db7a01b29e043a7fe0c624ba71bd6321f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionService.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Map; + +public interface ProcessDefinitionVersionService { + + /** + * add the newest version of one process definition + * + * @param processDefinition the process definition that need to record version + * @return the newest version number of this process definition + */ + long addProcessDefinitionVersion(ProcessDefinition processDefinition); + + /** + * query the pagination versions info by one certain process definition id + * + * @param loginUser login user info to check auth + * @param projectName process definition project name + * @param pageNo page number + * @param pageSize page size + * @param processDefinitionId process definition id + * @return the pagination process definition versions info of the certain process definition + */ + Map queryProcessDefinitionVersions(User loginUser, String projectName, + int pageNo, int pageSize, int processDefinitionId); + + /** + * query one certain process definition version by version number and process definition id + * + * @param processDefinitionId process definition id + * @param version version number + * @return the process definition version info + */ + ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId, + long version); + + /** + * delete one certain process definition by version number and process definition id + * + * @param loginUser login user info to check auth + * @param projectName process definition project name + * @param processDefinitionId process definition id + * @param version version number + * @return delele result code + */ + Map deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName, + int processDefinitionId, long version); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index 40c009aa68399002074beeba1adfa30b748bed8d..54f6e9042ee05d5f4e4858a59fa2e2858e34c7c8 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -14,8 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; +package org.apache.dolphinscheduler.api.service; import static org.apache.dolphinscheduler.common.Constants.DATA_LIST; import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; @@ -24,19 +24,6 @@ import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE; import static org.apache.dolphinscheduler.common.Constants.TASK_LIST; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.text.ParseException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; import org.apache.dolphinscheduler.api.dto.gantt.Task; @@ -72,6 +59,21 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -108,6 +110,9 @@ public class ProcessInstanceService extends BaseService { @Autowired ProcessDefinitionService processDefinitionService; + @Autowired + ProcessDefinitionVersionService processDefinitionVersionService; + @Autowired ExecutorService execService; @@ -118,18 +123,11 @@ public class ProcessInstanceService extends BaseService { LoggerService loggerService; - @Autowired UsersService usersService; /** * return top n SUCCESS process instance order by running time which started between startTime and endTime - * @param loginUser - * @param projectName - * @param size - * @param startTime - * @param endTime - * @return */ public Map queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) { Map result = new HashMap<>(); @@ -155,7 +153,7 @@ public class ProcessInstanceService extends BaseService { return result; } Date end = DateUtils.stringToDate(endTime); - if(start == null || end == null) { + if (start == null || end == null) { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); return result; } @@ -169,6 +167,7 @@ public class ProcessInstanceService extends BaseService { putMsg(result, Status.SUCCESS); return result; } + /** * query process instance by id * @@ -214,7 +213,7 @@ public class ProcessInstanceService extends BaseService { */ public Map queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, String startDate, String endDate, - String searchVal, String executorName,ExecutionStatus stateType, String host, + String searchVal, String executorName, ExecutionStatus stateType, String host, Integer pageNo, Integer pageSize) { Map result = new HashMap<>(); @@ -246,18 +245,18 @@ public class ProcessInstanceService extends BaseService { return result; } - Page page = new Page(pageNo, pageSize); + Page page = new Page<>(pageNo, pageSize); PageInfo pageInfo = new PageInfo(pageNo, pageSize); int executorId = usersService.getUserIdByName(executorName); IPage processInstanceList = processInstanceMapper.queryProcessInstanceListPaging(page, - project.getId(), processDefineId, searchVal, executorId,statusArray, host, start, end); + project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end); List processInstances = processInstanceList.getRecords(); - for(ProcessInstance processInstance: processInstances){ - processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(),processInstance.getEndTime())); + for (ProcessInstance processInstance : processInstances) { + processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime())); User executor = usersService.queryUser(processInstance.getExecutorId()); if (null != executor) { processInstance.setExecutorName(executor.getUserName()); @@ -271,8 +270,6 @@ public class ProcessInstanceService extends BaseService { return result; } - - /** * query task list by process instance id * @@ -305,14 +302,13 @@ public class ProcessInstanceService extends BaseService { /** * add dependent result for dependent task - * @param taskInstanceList */ private void addDependResultForTaskList(List taskInstanceList) throws IOException { - for(TaskInstance taskInstance: taskInstanceList){ - if(taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())){ + for (TaskInstance taskInstance : taskInstanceList) { + if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) { Result logResult = loggerService.queryLog( taskInstance.getId(), 0, 4098); - if(logResult.getCode() == Status.SUCCESS.ordinal()){ + if (logResult.getCode() == Status.SUCCESS.ordinal()) { String log = (String) logResult.getData(); Map resultMap = parseLogForDependentResult(log); taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap)); @@ -321,24 +317,24 @@ public class ProcessInstanceService extends BaseService { } } - public Map parseLogForDependentResult(String log) throws IOException { + public Map parseLogForDependentResult(String log) throws IOException { Map resultMap = new HashMap<>(); - if(StringUtils.isEmpty(log)){ + if (StringUtils.isEmpty(log)) { return resultMap; } BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes( - StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); + StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); String line; while ((line = br.readLine()) != null) { - if(line.contains(DEPENDENT_SPLIT)){ + if (line.contains(DEPENDENT_SPLIT)) { String[] tmpStringArray = line.split(":\\|\\|"); - if(tmpStringArray.length != 2){ + if (tmpStringArray.length != 2) { continue; } String dependResultString = tmpStringArray[1]; String[] dependStringArray = dependResultString.split(","); - if(dependStringArray.length != 2){ + if (dependStringArray.length != 2) { continue; } String key = dependStringArray[0].trim(); @@ -349,7 +345,6 @@ public class ProcessInstanceService extends BaseService { return resultMap; } - /** * query sub process instance detail info by task id * @@ -462,7 +457,7 @@ public class ProcessInstanceService extends BaseService { processInstance.setTimeout(timeout); Tenant tenant = processService.getTenantForProcess(processData.getTenantId(), processDefinition.getUserId()); - if(tenant != null){ + if (tenant != null) { processInstance.setTenantCode(tenant.getTenantCode()); } processInstance.setProcessInstanceJson(processInstanceJson); @@ -477,6 +472,11 @@ public class ProcessInstanceService extends BaseService { processDefinition.setLocations(locations); processDefinition.setConnects(connects); processDefinition.setTimeout(timeout); + processDefinition.setUpdateTime(new Date()); + + // add process definition version + long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition); + processDefinition.setVersion(version); updateDefine = processDefineMapper.updateById(processDefinition); } if (update > 0 && updateDefine > 0) { @@ -485,7 +485,6 @@ public class ProcessInstanceService extends BaseService { putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); } - return result; } @@ -532,6 +531,7 @@ public class ProcessInstanceService extends BaseService { /** * delete process instance by id, at the same time,delete task instance and their mapping relation data + * * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id @@ -554,13 +554,10 @@ public class ProcessInstanceService extends BaseService { return result; } - - processService.removeTaskLogFile(processInstanceId); // delete database cascade int delete = processService.deleteWorkProcessInstanceById(processInstanceId); - processService.deleteAllSubWorkProcessByParentId(processInstanceId); processService.deleteWorkProcessMapByParentId(processInstanceId); @@ -592,7 +589,6 @@ public class ProcessInstanceService extends BaseService { .getBusinessTime(processInstance.getCmdTypeIfComplement(), processInstance.getScheduleTime()); - String workflowInstanceJson = processInstance.getProcessInstanceJson(); ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class); @@ -603,10 +599,9 @@ public class ProcessInstanceService extends BaseService { List globalParams = new ArrayList<>(); if (userDefinedParams != null && userDefinedParams.length() > 0) { - globalParams = JSONUtils.toList(userDefinedParams, Property.class); + globalParams = JSONUtils.toList(userDefinedParams, Property.class); } - List taskNodeList = workflowData.getTasks(); // global param string @@ -618,7 +613,7 @@ public class ProcessInstanceService extends BaseService { } // local params - Map> localUserDefParams = new HashMap<>(); + Map> localUserDefParams = new HashMap<>(); for (TaskNode taskNode : taskNodeList) { String parameter = taskNode.getParams(); Map map = JSONUtils.toMap(parameter); @@ -627,9 +622,9 @@ public class ProcessInstanceService extends BaseService { localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); List localParamsList = JSONUtils.toList(localParams, Property.class); - Map localParamsMap = new HashMap<>(); - localParamsMap.put("taskType",taskNode.getType()); - localParamsMap.put("localParamsList",localParamsList); + Map localParamsMap = new HashMap<>(); + localParamsMap.put("taskType", taskNode.getType()); + localParamsMap.put("localParamsList", localParamsList); if (CollectionUtils.isNotEmpty(localParamsList)) { localUserDefParams.put(taskNode.getName(), localParamsMap); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java index c5a09009d1c82227546ca8f0fa35278fbf32f91c..695b76b2bc7f53daf5e14ea8f7196219fd648201 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -17,8 +17,6 @@ package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; @@ -32,11 +30,20 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.text.MessageFormat; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import java.text.MessageFormat; -import java.util.*; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * task instance service @@ -79,10 +86,10 @@ public class TaskInstanceService extends BaseService { * @param pageSize page size * @return task list page */ - public Map queryTaskListPaging(User loginUser, String projectName, - Integer processInstanceId, String taskName, String executorName, String startDate, - String endDate, String searchVal, ExecutionStatus stateType,String host, - Integer pageNo, Integer pageSize) { + public Map queryTaskListPaging(User loginUser, String projectName, + Integer processInstanceId, String taskName, String executorName, String startDate, + String endDate, String searchVal, ExecutionStatus stateType, String host, + Integer pageNo, Integer pageSize) { Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); @@ -93,23 +100,23 @@ public class TaskInstanceService extends BaseService { } int[] statusArray = null; - if(stateType != null){ + if (stateType != null) { statusArray = new int[]{stateType.ordinal()}; } Date start = null; Date end = null; - try { - if(StringUtils.isNotEmpty(startDate)){ - start = DateUtils.getScheduleDate(startDate); + if (StringUtils.isNotEmpty(startDate)) { + start = DateUtils.getScheduleDate(startDate); + if (start == null) { + return generateInvalidParamRes(result, "startDate"); } - if(StringUtils.isNotEmpty( endDate)){ - end = DateUtils.getScheduleDate(endDate); + } + if (StringUtils.isNotEmpty(endDate)) { + end = DateUtils.getScheduleDate(endDate); + if (end == null) { + return generateInvalidParamRes(result, "endDate"); } - } catch (Exception e) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); - return result; } Page page = new Page(pageNo, pageSize); @@ -124,18 +131,30 @@ public class TaskInstanceService extends BaseService { exclusionSet.add("taskJson"); List taskInstanceList = taskInstanceIPage.getRecords(); - for(TaskInstance taskInstance : taskInstanceList){ + for (TaskInstance taskInstance : taskInstanceList) { taskInstance.setDuration(DateUtils.differSec(taskInstance.getStartTime(), taskInstance.getEndTime())); User executor = usersService.queryUser(taskInstance.getExecutorId()); if (null != executor) { taskInstance.setExecutorName(executor.getUserName()); } } - pageInfo.setTotalCount((int)taskInstanceIPage.getTotal()); - pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(),exclusionSet)); + pageInfo.setTotalCount((int) taskInstanceIPage.getTotal()); + pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet)); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } + + /*** + * generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name + * @param result exist result map + * @param params invalid params name + * @return update result map + */ + private Map generateInvalidParamRes(Map result, String params) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params)); + return result; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java index 586c1a1b53ebc6dee8acb0a1265cdca1e8d1d38b..8e83e22a3d6a202ac6cb6f4fb2f50a6c838b7a0a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java @@ -14,338 +14,85 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; -import java.util.Date; -import java.util.HashMap; -import java.util.List; import java.util.Map; /** * tenant service */ -@Service -public class TenantService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(TenantService.class); - - @Autowired - private TenantMapper tenantMapper; - - @Autowired - private ProcessInstanceMapper processInstanceMapper; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - private UserMapper userMapper; - - - - /** - * create tenant - * - * - * @param loginUser login user - * @param tenantCode tenant code - * @param tenantName tenant name - * @param queueId queue id - * @param desc description - * @return create result code - * @throws Exception exception - */ - @Transactional(rollbackFor = Exception.class) - public Map createTenant(User loginUser, - String tenantCode, - String tenantName, - int queueId, - String desc) throws Exception { - - Map result = new HashMap<>(); - result.put(Constants.STATUS, false); - if (checkAdmin(loginUser, result)) { - return result; - } - - if (checkTenantExists(tenantCode)){ - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode); - return result; - } - - - Tenant tenant = new Tenant(); - Date now = new Date(); - - if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")){ - putMsg(result, Status.VERIFY_TENANT_CODE_ERROR); - return result; - } - tenant.setTenantCode(tenantCode); - tenant.setTenantName(tenantName); - tenant.setQueueId(queueId); - tenant.setDescription(desc); - tenant.setCreateTime(now); - tenant.setUpdateTime(now); - - // save - tenantMapper.insert(tenant); - - // if hdfs startup - if (PropertyUtils.getResUploadStartupState()){ - createTenantDirIfNotExists(tenantCode); - } - - putMsg(result, Status.SUCCESS); - - return result; -} - - - - /** - * query tenant list paging - * - * @param loginUser login user - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @return tenant list page - */ - public Map queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - - Map result = new HashMap<>(); - if (checkAdmin(loginUser, result)) { - return result; - } - - Page page = new Page(pageNo, pageSize); - IPage tenantIPage = tenantMapper.queryTenantPaging(page, searchVal); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotalCount((int)tenantIPage.getTotal()); - pageInfo.setLists(tenantIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * updateProcessInstance tenant - * - * @param loginUser login user - * @param id tennat id - * @param tenantCode tennat code - * @param tenantName tennat name - * @param queueId queue id - * @param desc description - * @return update result code - * @throws Exception exception - */ - public Map updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception { - - Map result = new HashMap<>(); - result.put(Constants.STATUS, false); - - if (checkAdmin(loginUser, result)) { - return result; - } - - Tenant tenant = tenantMapper.queryById(id); +public interface TenantService { - if (tenant == null){ - putMsg(result, Status.TENANT_NOT_EXIST); - return result; - } - - // updateProcessInstance tenant /** - * if the tenant code is modified, the original resource needs to be copied to the new tenant. + * create tenant + * + * @param loginUser login user + * @param tenantCode tenant code + * @param tenantName tenant name + * @param queueId queue id + * @param desc description + * @return create result code + * @throws Exception exception */ - if (!tenant.getTenantCode().equals(tenantCode)){ - if (checkTenantExists(tenantCode)){ - // if hdfs startup - if (PropertyUtils.getResUploadStartupState()){ - String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; - String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); - //init hdfs resource - HadoopUtils.getInstance().mkdir(resourcePath); - HadoopUtils.getInstance().mkdir(udfsPath); - } - }else { - putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS); - return result; - } - } - - Date now = new Date(); - - if (StringUtils.isNotEmpty(tenantCode)){ - tenant.setTenantCode(tenantCode); - } - - if (StringUtils.isNotEmpty(tenantName)){ - tenant.setTenantName(tenantName); - } - - if (queueId != 0){ - tenant.setQueueId(queueId); - } - tenant.setDescription(desc); - tenant.setUpdateTime(now); - tenantMapper.updateById(tenant); - - result.put(Constants.STATUS, Status.SUCCESS); - result.put(Constants.MSG, Status.SUCCESS.getMsg()); - return result; - } - - /** - * delete tenant - * - * @param loginUser login user - * @param id tenant id - * @return delete result code - * @throws Exception exception - */ - @Transactional(rollbackFor = Exception.class) - public Map deleteTenantById(User loginUser, int id) throws Exception { - Map result = new HashMap<>(); - - if (checkAdmin(loginUser, result)) { - return result; - } - - Tenant tenant = tenantMapper.queryById(id); - if (tenant == null){ - putMsg(result, Status.TENANT_NOT_EXIST); - return result; - } - - List processInstances = getProcessInstancesByTenant(tenant); - if(CollectionUtils.isNotEmpty(processInstances)){ - putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size()); - return result; - } - - List processDefinitions = processDefinitionMapper.queryDefinitionListByTenant(tenant.getId()); - if(CollectionUtils.isNotEmpty(processDefinitions)){ - putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size()); - return result; - } - - List userList = userMapper.queryUserListByTenant(tenant.getId()); - if(CollectionUtils.isNotEmpty(userList)){ - putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size()); - return result; - } - - // if resource upload startup - if (PropertyUtils.getResUploadStartupState()){ - String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode(); - - if (HadoopUtils.getInstance().exists(tenantPath)){ - HadoopUtils.getInstance().delete(tenantPath, true); - } - } - - tenantMapper.deleteById(id); - processInstanceMapper.updateProcessInstanceByTenantId(id, -1); - putMsg(result, Status.SUCCESS); - return result; - } + Map createTenant(User loginUser, + String tenantCode, + String tenantName, + int queueId, + String desc) throws Exception; - private List getProcessInstancesByTenant(Tenant tenant) { - return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), org.apache.dolphinscheduler.common.Constants.NOT_TERMINATED_STATES); - } - - /** - * query tenant list - * - * @param loginUser login user - * @return tenant list - */ - public Map queryTenantList(User loginUser) { - - Map result = new HashMap<>(); - - List resourceList = tenantMapper.selectList(null); - result.put(Constants.DATA_LIST, resourceList); - putMsg(result, Status.SUCCESS); - - return result; - } - - /** - * query tenant list via tenant code - * @param tenantCode tenant code - * @return tenant list - */ - public Map queryTenantList(String tenantCode) { - Map result = new HashMap<>(); - - List resourceList = tenantMapper.queryByTenantCode(tenantCode); - if (CollectionUtils.isNotEmpty(resourceList)) { - result.put(Constants.DATA_LIST, resourceList); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.TENANT_NOT_EXIST); - } + /** + * query tenant list paging + * + * @param loginUser login user + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @return tenant list page + */ + Map queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize); - return result; - } + /** + * updateProcessInstance tenant + * + * @param loginUser login user + * @param id tennat id + * @param tenantCode tennat code + * @param tenantName tennat name + * @param queueId queue id + * @param desc description + * @return update result code + * @throws Exception exception + */ + Map updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId, + String desc) throws Exception; - /** - * verify tenant code - * - * @param tenantCode tenant code - * @return true if tenant code can user, otherwise return false - */ - public Result verifyTenantCode(String tenantCode) { - Result result = new Result(); - if (checkTenantExists(tenantCode)) { - logger.error("tenant {} has exist, can't create again.", tenantCode); - putMsg(result, Status.TENANT_NAME_EXIST, tenantCode); - } else { - putMsg(result, Status.SUCCESS); - } - return result; - } + /** + * delete tenant + * + * @param loginUser login user + * @param id tenant id + * @return delete result code + * @throws Exception exception + */ + Map deleteTenantById(User loginUser, int id) throws Exception; + /** + * query tenant list + * + * @param loginUser login user + * @return tenant list + */ + Map queryTenantList(User loginUser); - /** - * check tenant exists - * - * @param tenantCode tenant code - * @return ture if the tenant code exists, otherwise return false - */ - private boolean checkTenantExists(String tenantCode) { - List tenants = tenantMapper.queryByTenantCode(tenantCode); - return CollectionUtils.isNotEmpty(tenants); - } + /** + * verify tenant code + * + * @param tenantCode tenant code + * @return true if tenant code can user, otherwise return false + */ + Result verifyTenantCode(String tenantCode); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java index 2c2bb3b7622bb37025930cea6b9745f11a25e37a..e205cd1880120b26f2d76f22a8fea928fbad5b20 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java @@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.BaseService; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.CheckUtils; @@ -56,6 +57,7 @@ import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; @@ -125,6 +127,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements @Autowired private ProjectService projectService; + @Autowired + private ProcessDefinitionVersionService processDefinitionVersionService; + @Autowired private ProcessDefinitionMapper processDefineMapper; @@ -202,8 +207,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements processDefine.setCreateTime(now); processDefine.setUpdateTime(now); processDefine.setFlag(Flag.YES); + + // save the new process definition processDefineMapper.insert(processDefine); + // add process definition version + long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine); + + processDefine.setVersion(version); + + processDefineMapper.updateVersionByProcessDefinitionId(processDefine.getId(), version); + // return processDefinition object with ID result.put(Constants.DATA_LIST, processDefineMapper.selectById(processDefine.getId())); putMsg(result, Status.SUCCESS); @@ -239,7 +253,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return sb.toString(); } - /** * query process definition list * @@ -265,7 +278,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return result; } - /** * query process definition list paging * @@ -311,7 +323,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements */ public Map queryProcessDefinitionById(User loginUser, String projectName, Integer processId) { - Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); @@ -398,9 +409,14 @@ public class ProcessDefinitionServiceImpl extends BaseService implements processDefine.setGlobalParamList(globalParamsList); processDefine.setUpdateTime(now); processDefine.setFlag(Flag.YES); + + // add process definition version + long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine); + processDefine.setVersion(version); + if (processDefineMapper.updateById(processDefine) > 0) { putMsg(result, Status.SUCCESS); - + result.put(Constants.DATA_LIST, processDefineMapper.queryByDefineId(id)); } else { putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); } @@ -1058,7 +1074,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements } } - /** * check the process definition node meets the specifications * @@ -1127,7 +1142,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return result; } - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); @@ -1185,7 +1199,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements } - /** * query process definition all by project id * @@ -1277,7 +1290,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements TaskNode taskNode = dag.getNode(nodeName); treeViewDto.setType(taskNode.getType()); - //set treeViewDto instances for (int i = limit - 1; i >= 0; i--) { ProcessInstance processInstance = processInstanceList.get(i); @@ -1334,7 +1346,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return result; } - /** * Generate the DAG Graph based on the process definition id * @@ -1360,7 +1371,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return new DAG<>(); } - /** * whether the graph has a ring * @@ -1525,6 +1535,66 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return result; } + /** + * switch the defined process definition verison + * + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process definition id + * @param version the version user want to switch + * @return switch process definition version result code + */ + @Override + public Map switchProcessDefinitionVersion(User loginUser, String projectName + , int processDefinitionId, long version) { + + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId); + if (Objects.isNull(processDefinition)) { + putMsg(result + , Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR + , processDefinitionId); + return result; + } + + ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService + .queryByProcessDefinitionIdAndVersion(processDefinitionId, version); + if (Objects.isNull(processDefinitionVersion)) { + putMsg(result + , Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR + , processDefinitionId + , version); + return result; + } + + processDefinition.setVersion(processDefinitionVersion.getVersion()); + processDefinition.setProcessDefinitionJson(processDefinitionVersion.getProcessDefinitionJson()); + processDefinition.setDescription(processDefinitionVersion.getDescription()); + processDefinition.setLocations(processDefinitionVersion.getLocations()); + processDefinition.setConnects(processDefinitionVersion.getConnects()); + processDefinition.setTimeout(processDefinitionVersion.getTimeout()); + processDefinition.setGlobalParams(processDefinitionVersion.getGlobalParams()); + processDefinition.setUpdateTime(new Date()); + processDefinition.setReceivers(processDefinitionVersion.getReceivers()); + processDefinition.setReceiversCc(processDefinitionVersion.getReceiversCc()); + processDefinition.setResourceIds(processDefinitionVersion.getResourceIds()); + + if (processDefineMapper.updateById(processDefinition) > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR); + } + return result; + } + /** * do batch move process definition * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java new file mode 100644 index 0000000000000000000000000000000000000000..6364242190d13f465898f6fe4f5b4c021af0d29d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionVersionServiceImpl.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.google.common.collect.ImmutableMap; + +@Service +public class ProcessDefinitionVersionServiceImpl extends BaseService implements + ProcessDefinitionVersionService { + + @Autowired + private ProcessDefinitionVersionMapper processDefinitionVersionMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private ProjectMapper projectMapper; + + /** + * add the newest version of one process definition + * + * @param processDefinition the process definition that need to record version + * @return the newest version number of this process definition + */ + public long addProcessDefinitionVersion(ProcessDefinition processDefinition) { + + long version = this.queryMaxVersionByProcessDefinitionId(processDefinition.getId()) + 1; + + ProcessDefinitionVersion processDefinitionVersion = ProcessDefinitionVersion + .newBuilder() + .processDefinitionId(processDefinition.getId()) + .version(version) + .processDefinitionJson(processDefinition.getProcessDefinitionJson()) + .description(processDefinition.getDescription()) + .locations(processDefinition.getLocations()) + .connects(processDefinition.getConnects()) + .timeout(processDefinition.getTimeout()) + .globalParams(processDefinition.getGlobalParams()) + .createTime(processDefinition.getUpdateTime()) + .receivers(processDefinition.getReceivers()) + .receiversCc(processDefinition.getReceiversCc()) + .resourceIds(processDefinition.getResourceIds()) + .build(); + + processDefinitionVersionMapper.insert(processDefinitionVersion); + + return version; + } + + /** + * query the max version number by the process definition id + * + * @param processDefinitionId process definition id + * @return the max version number of this id + */ + private long queryMaxVersionByProcessDefinitionId(int processDefinitionId) { + Long maxVersion = processDefinitionVersionMapper.queryMaxVersionByProcessDefinitionId(processDefinitionId); + if (Objects.isNull(maxVersion)) { + return 0L; + } else { + return maxVersion; + } + } + + /** + * query the pagination versions info by one certain process definition id + * + * @param loginUser login user info to check auth + * @param projectName process definition project name + * @param pageNo page number + * @param pageSize page size + * @param processDefinitionId process definition id + * @return the pagination process definition versions info of the certain process definition + */ + public Map queryProcessDefinitionVersions(User loginUser, String projectName, int pageNo, int pageSize, int processDefinitionId) { + + Map result = new HashMap<>(); + + // check the if pageNo or pageSize less than 1 + if (pageNo <= 0 || pageSize <= 0) { + putMsg(result + , Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR + , pageNo + , pageSize); + return result; + } + + Project project = projectMapper.queryByName(projectName); + + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + Page page = new Page<>(pageNo, pageSize); + IPage processDefinitionVersionsPaging = processDefinitionVersionMapper.queryProcessDefinitionVersionsPaging(page, processDefinitionId); + List processDefinitionVersions = processDefinitionVersionsPaging.getRecords(); + pageInfo.setLists(processDefinitionVersions); + pageInfo.setTotalCount((int) processDefinitionVersionsPaging.getTotal()); + return ImmutableMap.of( + Constants.MSG, Status.SUCCESS.getMsg() + , Constants.STATUS, Status.SUCCESS + , Constants.DATA_LIST, pageInfo); + } + + /** + * query one certain process definition version by version number and process definition id + * + * @param processDefinitionId process definition id + * @param version version number + * @return the process definition version info + */ + public ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId, long version) { + return processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version); + } + + /** + * delete one certain process definition by version number and process definition id + * + * @param loginUser login user info to check auth + * @param projectName process definition project name + * @param processDefinitionId process definition id + * @param version version number + * @return delele result code + */ + public Map deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName, int processDefinitionId, long version) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version); + putMsg(result, Status.SUCCESS); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java new file mode 100644 index 0000000000000000000000000000000000000000..3a267bcc8c917b1cd3a3b38b5d41b6331f283bdf --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java @@ -0,0 +1,331 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.service.TenantService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * tenant service + */ +@Service +public class TenantServiceImpl extends BaseService implements TenantService { + + private static final Logger logger = LoggerFactory.getLogger(TenantServiceImpl.class); + + @Autowired + private TenantMapper tenantMapper; + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + private UserMapper userMapper; + + /** + * create tenant + * + * @param loginUser login user + * @param tenantCode tenant code + * @param tenantName tenant name + * @param queueId queue id + * @param desc description + * @return create result code + * @throws Exception exception + */ + @Transactional(rollbackFor = Exception.class) + public Map createTenant(User loginUser, + String tenantCode, + String tenantName, + int queueId, + String desc) throws Exception { + + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + if (checkAdmin(loginUser, result)) { + return result; + } + + if (checkTenantExists(tenantCode)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode); + return result; + } + + Tenant tenant = new Tenant(); + Date now = new Date(); + + if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")) { + putMsg(result, Status.VERIFY_TENANT_CODE_ERROR); + return result; + } + tenant.setTenantCode(tenantCode); + tenant.setTenantName(tenantName); + tenant.setQueueId(queueId); + tenant.setDescription(desc); + tenant.setCreateTime(now); + tenant.setUpdateTime(now); + + // save + tenantMapper.insert(tenant); + + // if hdfs startup + if (PropertyUtils.getResUploadStartupState()) { + createTenantDirIfNotExists(tenantCode); + } + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query tenant list paging + * + * @param loginUser login user + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @return tenant list page + */ + public Map queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + + Map result = new HashMap<>(5); + if (checkAdmin(loginUser, result)) { + return result; + } + + Page page = new Page<>(pageNo, pageSize); + IPage tenantIPage = tenantMapper.queryTenantPaging(page, searchVal); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotalCount((int) tenantIPage.getTotal()); + pageInfo.setLists(tenantIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * updateProcessInstance tenant + * + * @param loginUser login user + * @param id tennat id + * @param tenantCode tennat code + * @param tenantName tennat name + * @param queueId queue id + * @param desc description + * @return update result code + * @throws Exception exception + */ + public Map updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId, + String desc) throws Exception { + + Map result = new HashMap<>(5); + result.put(Constants.STATUS, false); + + if (checkAdmin(loginUser, result)) { + return result; + } + + Tenant tenant = tenantMapper.queryById(id); + + if (tenant == null) { + putMsg(result, Status.TENANT_NOT_EXIST); + return result; + } + + // updateProcessInstance tenant + /** + * if the tenant code is modified, the original resource needs to be copied to the new tenant. + */ + if (!tenant.getTenantCode().equals(tenantCode)) { + if (checkTenantExists(tenantCode)) { + // if hdfs startup + if (PropertyUtils.getResUploadStartupState()) { + String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; + String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); + //init hdfs resource + HadoopUtils.getInstance().mkdir(resourcePath); + HadoopUtils.getInstance().mkdir(udfsPath); + } + } else { + putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS); + return result; + } + } + + Date now = new Date(); + + if (StringUtils.isNotEmpty(tenantCode)) { + tenant.setTenantCode(tenantCode); + } + + if (StringUtils.isNotEmpty(tenantName)) { + tenant.setTenantName(tenantName); + } + + if (queueId != 0) { + tenant.setQueueId(queueId); + } + tenant.setDescription(desc); + tenant.setUpdateTime(now); + tenantMapper.updateById(tenant); + + result.put(Constants.STATUS, Status.SUCCESS); + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + return result; + } + + /** + * delete tenant + * + * @param loginUser login user + * @param id tenant id + * @return delete result code + * @throws Exception exception + */ + @Transactional(rollbackFor = Exception.class) + public Map deleteTenantById(User loginUser, int id) throws Exception { + Map result = new HashMap<>(5); + + if (checkAdmin(loginUser, result)) { + return result; + } + + Tenant tenant = tenantMapper.queryById(id); + if (tenant == null) { + putMsg(result, Status.TENANT_NOT_EXIST); + return result; + } + + List processInstances = getProcessInstancesByTenant(tenant); + if (CollectionUtils.isNotEmpty(processInstances)) { + putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size()); + return result; + } + + List processDefinitions = + processDefinitionMapper.queryDefinitionListByTenant(tenant.getId()); + if (CollectionUtils.isNotEmpty(processDefinitions)) { + putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size()); + return result; + } + + List userList = userMapper.queryUserListByTenant(tenant.getId()); + if (CollectionUtils.isNotEmpty(userList)) { + putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size()); + return result; + } + + // if resource upload startup + if (PropertyUtils.getResUploadStartupState()) { + String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode(); + + if (HadoopUtils.getInstance().exists(tenantPath)) { + HadoopUtils.getInstance().delete(tenantPath, true); + } + } + + tenantMapper.deleteById(id); + processInstanceMapper.updateProcessInstanceByTenantId(id, -1); + putMsg(result, Status.SUCCESS); + return result; + } + + private List getProcessInstancesByTenant(Tenant tenant) { + return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES); + } + + /** + * query tenant list + * + * @param loginUser login user + * @return tenant list + */ + public Map queryTenantList(User loginUser) { + + Map result = new HashMap<>(5); + + List resourceList = tenantMapper.selectList(null); + result.put(Constants.DATA_LIST, resourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * verify tenant code + * + * @param tenantCode tenant code + * @return true if tenant code can user, otherwise return false + */ + public Result verifyTenantCode(String tenantCode) { + Result result = new Result(); + if (checkTenantExists(tenantCode)) { + putMsg(result, Status.TENANT_NAME_EXIST, tenantCode); + } else { + putMsg(result, Status.SUCCESS); + } + return result; + } + + /** + * check tenant exists + * + * @param tenantCode tenant code + * @return ture if the tenant code exists, otherwise return false + */ + private boolean checkTenantExists(String tenantCode) { + List tenants = tenantMapper.queryByTenantCode(tenantCode); + return CollectionUtils.isNotEmpty(tenants); + } +} diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties index 2005d05fdee5d4c86190dc81759c93d75d288657..d1da3e94a83938b251108c845bd4916887cf9ed3 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties @@ -258,3 +258,7 @@ COPY_PROCESS_DEFINITION_NOTES= copy process definition notes MOVE_PROCESS_DEFINITION_NOTES= move process definition notes TARGET_PROJECT_ID= target project id IS_COPY = is copy +DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version +QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions +SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version +VERSION=version diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index 2e079285ca3efbfa900fe0cd08649bee05004531..267f93b14d45cc8b6141d9c0f0487b46341556a2 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -258,3 +258,7 @@ COPY_PROCESS_DEFINITION_NOTES= copy process definition notes MOVE_PROCESS_DEFINITION_NOTES= move process definition notes TARGET_PROJECT_ID= target project id IS_COPY = is copy +DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version +QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions +SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version +VERSION=version diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index 31f70ef40c796dae2c9682268b29624cae0dd22a..16262e6bbc791ce68b700ce8202b9ef823610cba 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -256,4 +256,7 @@ COPY_PROCESS_DEFINITION_NOTES= 复制工作流定义 MOVE_PROCESS_DEFINITION_NOTES= 移动工作流定义 TARGET_PROJECT_ID= 目标项目ID IS_COPY = 是否复制 - +DELETE_PROCESS_DEFINITION_VERSION_NOTES=删除流程历史版本 +QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=查询流程历史版本信息 +SWITCH_PROCESS_DEFINITION_VERSION_NOTES=切换流程版本 +VERSION=版本号 diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java index ab0a3ce1f7cfc41046c2be518a8cbd6593da4640..f2a54a1a8889b8ca80adcc6d52244f96118ac786 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java @@ -14,10 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; @@ -25,8 +26,18 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.User; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -38,12 +49,6 @@ import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.mock.web.MockHttpServletResponse; -import javax.servlet.http.HttpServletResponse; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; /** * process definition controller test @@ -59,10 +64,13 @@ public class ProcessDefinitionControllerTest { @Mock private ProcessDefinitionServiceImpl processDefinitionService; + @Mock + private ProcessDefinitionVersionService processDefinitionVersionService; + protected User user; @Before - public void before(){ + public void before() { User loginUser = new User(); loginUser.setId(1); loginUser.setUserType(UserType.GENERAL_USER); @@ -73,7 +81,11 @@ public class ProcessDefinitionControllerTest { @Test public void testCreateProcessDefinition() throws Exception { - String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\"" + + ":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\" + + "necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"" + + ",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," + + "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; String projectName = "test"; @@ -82,14 +94,14 @@ public class ProcessDefinitionControllerTest { String connects = "[]"; Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - result.put("processDefinitionId",1); + result.put("processDefinitionId", 1); Mockito.when(processDefinitionService.createProcessDefinition(user, projectName, name, json, description, locations, connects)).thenReturn(result); Result response = processDefinitionController.createProcessDefinition(user, projectName, name, json, locations, connects, description); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } private void putMsg(Map result, Status status, Object... statusParams) { @@ -109,17 +121,21 @@ public class ProcessDefinitionControllerTest { String projectName = "test"; String name = "dag_test"; - Mockito.when(processDefinitionService.verifyProcessDefinitionName(user,projectName,name)).thenReturn(result); + Mockito.when(processDefinitionService.verifyProcessDefinitionName(user, projectName, name)).thenReturn(result); - Result response = processDefinitionController.verifyProcessDefinitionName(user,projectName,name); - Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),response.getCode().intValue()); + Result response = processDefinitionController.verifyProcessDefinitionName(user, projectName, name); + Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(), response.getCode().intValue()); } @Test public void updateProcessDefinition() throws Exception { - String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\"" + + ",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"}" + + ",\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\"" + + ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\"" + + ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; String projectName = "test"; String name = "dag_test"; @@ -128,14 +144,14 @@ public class ProcessDefinitionControllerTest { int id = 1; Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - result.put("processDefinitionId",1); + result.put("processDefinitionId", 1); - Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id,name, json, + Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id, name, json, description, locations, connects)).thenReturn(result); - Result response = processDefinitionController.updateProcessDefinition(user, projectName, name,id, json, + Result response = processDefinitionController.updateProcessDefinition(user, projectName, name, id, json, locations, connects, description); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test @@ -145,15 +161,19 @@ public class ProcessDefinitionControllerTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result); - Result response = processDefinitionController.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal()); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal())).thenReturn(result); + Result response = processDefinitionController.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test public void testQueryProcessDefinitionById() throws Exception { - String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1" + + "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}" + + "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\"" + + ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":" + + "\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; String projectName = "test"; String name = "dag_test"; @@ -174,10 +194,10 @@ public class ProcessDefinitionControllerTest { putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); - Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName,id)).thenReturn(result); - Result response = processDefinitionController.queryProcessDefinitionById(user, projectName,id); + Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName, id)).thenReturn(result); + Result response = processDefinitionController.queryProcessDefinitionById(user, projectName, id); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test @@ -190,10 +210,10 @@ public class ProcessDefinitionControllerTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - Mockito.when(processDefinitionService.batchCopyProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result); - Result response = processDefinitionController.copyProcessDefinition(user, projectName,id,targetProjectId); + Mockito.when(processDefinitionService.batchCopyProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result); + Result response = processDefinitionController.copyProcessDefinition(user, projectName, id, targetProjectId); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test @@ -206,35 +226,37 @@ public class ProcessDefinitionControllerTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - Mockito.when(processDefinitionService.batchMoveProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result); - Result response = processDefinitionController.moveProcessDefinition(user, projectName,id,targetProjectId); + Mockito.when(processDefinitionService.batchMoveProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result); + Result response = processDefinitionController.moveProcessDefinition(user, projectName, id, targetProjectId); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } - @Test public void testQueryProcessDefinitionList() throws Exception { String projectName = "test"; - List resourceList = getDefinitionList(); + List resourceList = getDefinitionList(); Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, resourceList); - Mockito.when(processDefinitionService.queryProcessDefinitionList(user, projectName)).thenReturn(result); Result response = processDefinitionController.queryProcessDefinitionList(user, projectName); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } - public List getDefinitionList(){ + public List getDefinitionList() { List resourceList = new ArrayList<>(); - String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1" + + "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}" + + "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval" + + "\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\"" + + ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; String projectName = "test"; String name = "dag_test"; @@ -266,7 +288,7 @@ public class ProcessDefinitionControllerTest { resourceList.add(processDefinition); resourceList.add(processDefinition2); - return resourceList; + return resourceList; } @Test @@ -277,13 +299,13 @@ public class ProcessDefinitionControllerTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result); - Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName,id); + Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName, id)).thenReturn(result); + Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName, id); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } - @Test + @Test public void testGetNodeListByDefinitionId() throws Exception { String projectName = "test"; int id = 1; @@ -292,9 +314,9 @@ public class ProcessDefinitionControllerTest { putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result); - Result response = processDefinitionController.getNodeListByDefinitionId(user,projectName,id); + Result response = processDefinitionController.getNodeListByDefinitionId(user, projectName, id); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test @@ -306,57 +328,57 @@ public class ProcessDefinitionControllerTest { putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result); - Result response = processDefinitionController.getNodeListByDefinitionIdList(user,projectName,idList); + Result response = processDefinitionController.getNodeListByDefinitionIdList(user, projectName, idList); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test - public void testQueryProcessDefinitionAllByProjectId() throws Exception{ + public void testQueryProcessDefinitionAllByProjectId() throws Exception { int projectId = 1; - Map result = new HashMap<>(); - putMsg(result,Status.SUCCESS); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.queryProcessDefinitionAllByProjectId(projectId)).thenReturn(result); - Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user,projectId); + Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user, projectId); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test - public void testViewTree() throws Exception{ + public void testViewTree() throws Exception { String projectName = "test"; int processId = 1; int limit = 2; - Map result = new HashMap<>(); - putMsg(result,Status.SUCCESS); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS); - Mockito.when(processDefinitionService.viewTree(processId,limit)).thenReturn(result); - Result response = processDefinitionController.viewTree(user,projectName,processId,limit); + Mockito.when(processDefinitionService.viewTree(processId, limit)).thenReturn(result); + Result response = processDefinitionController.viewTree(user, projectName, processId, limit); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test - public void testQueryProcessDefinitionListPaging() throws Exception{ + public void testQueryProcessDefinitionListPaging() throws Exception { String projectName = "test"; int pageNo = 1; int pageSize = 10; String searchVal = ""; int userId = 1; - Map result = new HashMap<>(); - putMsg(result,Status.SUCCESS); - result.put(Constants.DATA_LIST,new PageInfo(1,10)); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, new PageInfo(1, 10)); - Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user,projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result); - Result response = processDefinitionController.queryProcessDefinitionListPaging(user,projectName,pageNo,searchVal,userId,pageSize); + Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user, projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result); + Result response = processDefinitionController.queryProcessDefinitionListPaging(user, projectName, pageNo, searchVal, userId, pageSize); - Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } @Test - public void testBatchExportProcessDefinitionByIds() throws Exception{ + public void testBatchExportProcessDefinitionByIds() throws Exception { String processDefinitionIds = "1,2"; String projectName = "test"; @@ -365,4 +387,67 @@ public class ProcessDefinitionControllerTest { processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response); } + @Test + public void testQueryProcessDefinitionVersions() { + String projectName = "test"; + Map resultMap = new HashMap<>(); + putMsg(resultMap, Status.SUCCESS); + resultMap.put(Constants.DATA_LIST, new PageInfo(1, 10)); + Mockito.when(processDefinitionVersionService.queryProcessDefinitionVersions( + user + , projectName + , 1 + , 10 + , 1)) + .thenReturn(resultMap); + Result result = processDefinitionController.queryProcessDefinitionVersions( + user + , projectName + , 1 + , 10 + , 1); + + Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); + } + + @Test + public void testSwitchProcessDefinitionVersion() { + String projectName = "test"; + Map resultMap = new HashMap<>(); + putMsg(resultMap, Status.SUCCESS); + Mockito.when(processDefinitionService.switchProcessDefinitionVersion( + user + , projectName + , 1 + , 10)) + .thenReturn(resultMap); + Result result = processDefinitionController.switchProcessDefinitionVersion( + user + , projectName + , 1 + , 10); + + Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); + } + + @Test + public void testDeleteProcessDefinitionVersion() { + String projectName = "test"; + Map resultMap = new HashMap<>(); + putMsg(resultMap, Status.SUCCESS); + Mockito.when(processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion( + user + , projectName + , 1 + , 10)) + .thenReturn(resultMap); + Result result = processDefinitionController.deleteProcessDefinitionVersion( + user + , projectName + , 1 + , 10); + + Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); + } + } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index 23d3986481599735a60214675d47a0f2b40844e9..d791cc37174c2eed81279e45e83506d0f38dbb9c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.dto.ProcessMeta; @@ -98,6 +99,9 @@ public class ProcessDefinitionServiceTest { @Mock private TaskInstanceMapper taskInstanceMapper; + @Mock + private ProcessDefinitionVersionService processDefinitionVersionService; + private static final String SHELL_JSON = "{\n" + " \"globalParams\": [\n" + " \n" @@ -390,7 +394,11 @@ public class ProcessDefinitionServiceTest { // instance exit ProcessDefinition definition = getProcessDefinition(); definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"); - definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"); + definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\"," + + "\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234" + + "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"," + + "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," + + "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"); definition.setConnects("[]"); Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition); @@ -432,7 +440,11 @@ public class ProcessDefinitionServiceTest { ProcessDefinition definition = getProcessDefinition(); definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"); - definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"); + definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\"" + + ",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234" + + "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"," + + "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," + + "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"); definition.setConnects("[]"); // check target project result == null @@ -568,14 +580,14 @@ public class ProcessDefinitionServiceTest { //FIXME has function exit code 1 when exception //process definition offline -// List schedules = new ArrayList<>(); -// Schedule schedule = getSchedule(); -// schedules.add(schedule); -// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules); -// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1); -// Map offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", -// 46, ReleaseState.OFFLINE.getCode()); -// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS)); + // List schedules = new ArrayList<>(); + // Schedule schedule = getSchedule(); + // schedules.add(schedule); + // Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules); + // Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1); + // Map offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", + // 46, ReleaseState.OFFLINE.getCode()); + // Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS)); } @Test @@ -850,9 +862,12 @@ public class ProcessDefinitionServiceTest { String projectName = "project_test1"; Project project = getProject(projectName); + ProcessDefinition processDefinition = getProcessDefinition(); + Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); - Mockito.when(processService.findProcessDefineById(1)).thenReturn(getProcessDefinition()); + Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition); + Mockito.when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L); String sqlDependentJson = "{\n" + " \"globalParams\": [\n" diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java new file mode 100644 index 0000000000000000000000000000000000000000..169ef2bcfe601226b3736b134b510d70e1e66d9c --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionVersionServiceTest.java @@ -0,0 +1,274 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionVersionServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; + +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; + +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.google.common.collect.Lists; + +@RunWith(MockitoJUnitRunner.class) +public class ProcessDefinitionVersionServiceTest { + + @InjectMocks + private ProcessDefinitionVersionServiceImpl processDefinitionVersionService; + + @Mock + private ProcessDefinitionVersionMapper processDefinitionVersionMapper; + + @Mock + private ProjectMapper projectMapper; + + @Mock + private ProjectServiceImpl projectService; + + @Test + public void testAddProcessDefinitionVersion() { + long expectedVersion = 5L; + ProcessDefinition processDefinition = getProcessDefinition(); + Mockito.when(processDefinitionVersionMapper + .queryMaxVersionByProcessDefinitionId(processDefinition.getId())) + .thenReturn(expectedVersion); + + long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition); + + Assert.assertEquals(expectedVersion + 1, version); + } + + @Test + @SuppressWarnings("unchecked") + public void testQueryProcessDefinitionVersions() { + // pageNo <= 0 + int pageNo = -1; + int pageSize = 10; + int processDefinitionId = 66; + + String projectName = "project_test1"; + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map resultMap1 = processDefinitionVersionService.queryProcessDefinitionVersions( + loginUser + , projectName + , pageNo + , pageSize + , processDefinitionId); + Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR + , resultMap1.get(Constants.STATUS)); + + // pageSize <= 0 + pageNo = 1; + pageSize = -1; + Map resultMap2 = processDefinitionVersionService.queryProcessDefinitionVersions( + loginUser + , projectName + , pageNo + , pageSize + , processDefinitionId); + Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR + , resultMap2.get(Constants.STATUS)); + + Map res = new HashMap<>(); + putMsg(res, Status.PROJECT_NOT_FOUNT); + Project project = getProject(projectName); + Mockito.when(projectMapper.queryByName(projectName)) + .thenReturn(project); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)) + .thenReturn(res); + + // project auth fail + pageNo = 1; + pageSize = 10; + Map resultMap3 = processDefinitionVersionService.queryProcessDefinitionVersions( + loginUser + , projectName + , pageNo + , pageSize + , processDefinitionId); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, resultMap3.get(Constants.STATUS)); + + putMsg(res, Status.SUCCESS); + + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)) + .thenReturn(res); + + ProcessDefinitionVersion processDefinitionVersion = getProcessDefinitionVersion(getProcessDefinition()); + + Mockito.when(processDefinitionVersionMapper + .queryProcessDefinitionVersionsPaging(Mockito.any(Page.class), Mockito.eq(processDefinitionId))) + .thenReturn(new Page() + .setRecords(Lists.newArrayList(processDefinitionVersion))); + + Map resultMap4 = processDefinitionVersionService.queryProcessDefinitionVersions( + loginUser + , projectName + , pageNo + , pageSize + , processDefinitionId); + Assert.assertEquals(Status.SUCCESS, resultMap4.get(Constants.STATUS)); + Assert.assertEquals(processDefinitionVersion + , ((PageInfo) resultMap4.get(Constants.DATA_LIST)) + .getLists().get(0)); + } + + @Test + public void testQueryByProcessDefinitionIdAndVersion() { + + ProcessDefinitionVersion expectedProcessDefinitionVersion = + getProcessDefinitionVersion(getProcessDefinition()); + + int processDefinitionId = 66; + long version = 10; + Mockito.when(processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version)) + .thenReturn(expectedProcessDefinitionVersion); + + ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService + .queryByProcessDefinitionIdAndVersion(processDefinitionId, version); + + Assert.assertEquals(expectedProcessDefinitionVersion, processDefinitionVersion); + } + + @Test + public void testDeleteByProcessDefinitionIdAndVersion() { + String projectName = "project_test1"; + int processDefinitionId = 66; + long version = 10; + Project project = getProject(projectName); + Mockito.when(projectMapper.queryByName(projectName)) + .thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + // project auth fail + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)) + .thenReturn(new HashMap<>()); + + Map resultMap1 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion( + loginUser + , projectName + , processDefinitionId + , version); + + Assert.assertEquals(0, resultMap1.size()); + + Map res = new HashMap<>(); + putMsg(res, Status.SUCCESS); + + Mockito.when(processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version)) + .thenReturn(1); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)) + .thenReturn(res); + + Map resultMap2 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion( + loginUser + , projectName + , processDefinitionId + , version); + + Assert.assertEquals(Status.SUCCESS, resultMap2.get(Constants.STATUS)); + + } + + /** + * get mock processDefinitionVersion by processDefinition + * + * @return processDefinitionVersion + */ + private ProcessDefinitionVersion getProcessDefinitionVersion(ProcessDefinition processDefinition) { + return ProcessDefinitionVersion + .newBuilder() + .processDefinitionId(processDefinition.getId()) + .version(1) + .processDefinitionJson(processDefinition.getProcessDefinitionJson()) + .description(processDefinition.getDescription()) + .locations(processDefinition.getLocations()) + .connects(processDefinition.getConnects()) + .timeout(processDefinition.getTimeout()) + .globalParams(processDefinition.getGlobalParams()) + .createTime(processDefinition.getUpdateTime()) + .receivers(processDefinition.getReceivers()) + .receiversCc(processDefinition.getReceiversCc()) + .resourceIds(processDefinition.getResourceIds()) + .build(); + } + + /** + * get mock processDefinition + * + * @return ProcessDefinition + */ + private ProcessDefinition getProcessDefinition() { + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(66); + processDefinition.setName("test_pdf"); + processDefinition.setProjectId(2); + processDefinition.setTenantId(1); + processDefinition.setDescription(""); + + return processDefinition; + } + + /** + * get mock Project + * + * @param projectName projectName + * @return Project + */ + private Project getProject(String projectName) { + Project project = new Project(); + project.setId(1); + project.setName(projectName); + project.setUserId(1); + return project; + } + + private void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index 82031ca9ebd0daf020ad84e2defd88ba112c6b41..5511f69aebe16f1455f03a020282de4d8bb2fbc3 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import static org.mockito.ArgumentMatchers.eq; @@ -87,6 +88,9 @@ public class ProcessInstanceServiceTest { @Mock ProcessDefinitionService processDefinitionService; + @Mock + ProcessDefinitionVersionService processDefinitionVersionService; + @Mock ExecutorService execService; @@ -99,12 +103,11 @@ public class ProcessInstanceServiceTest { @Mock UsersService usersService; - private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + - "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + - "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + - "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + - "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; - + private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + + "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + + "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + + "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + + "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; @Test public void testQueryProcessInstanceList() { @@ -265,19 +268,16 @@ public class ProcessInstanceServiceTest { Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } - @Test - public void testParseLogForDependentResult() { - String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" + - "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" + - "[root@node2 current]# "; - try { - Map resultMap = - processInstanceService.parseLogForDependentResult(logString); - Assert.assertEquals(1, resultMap.size()); - } catch (IOException e) { - - } + public void testParseLogForDependentResult() throws IOException { + String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172]" + + " - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" + + "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172]" + + " - task : 223_10739_452334 exit status code : 0\n" + + "[root@node2 current]# "; + Map resultMap = + processInstanceService.parseLogForDependentResult(logString); + Assert.assertEquals(1, resultMap.size()); } @Test @@ -371,6 +371,7 @@ public class ProcessInstanceServiceTest { when(processService.getTenantForProcess(Mockito.anyInt(), Mockito.anyInt())).thenReturn(tenant); when(processService.updateProcessInstance(processInstance)).thenReturn(1); when(processDefinitionService.checkProcessNodeList(Mockito.any(), eq(shellJson))).thenReturn(result); + when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L); Map processInstanceFinishRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1, shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", ""); Assert.assertEquals(Status.UPDATE_PROCESS_INSTANCE_ERROR, processInstanceFinishRes.get(Constants.STATUS)); @@ -401,6 +402,7 @@ public class ProcessInstanceServiceTest { when(projectMapper.queryByName(projectName)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); when(processService.findProcessInstanceDetailById(1)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); Map processInstanceNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1); Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS)); @@ -559,5 +561,4 @@ public class ProcessInstanceServiceTest { } } - } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java index 99ec76a7455c1d16eba0d5f471973123203b04cd..85b23b3fcb4a948168678da2a9a52620b50938e0 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; @@ -35,9 +36,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.After; import org.junit.Assert; -import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; @@ -53,7 +52,6 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(MockitoJUnitRunner.class) public class ProjectServiceTest { - private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class); @InjectMocks @@ -73,17 +71,6 @@ public class ProjectServiceTest { private String userName = "ProjectServiceTest"; - @Before - public void setUp() { - - } - - - @After - public void after() { - - } - @Test public void testCreateProject() { @@ -105,7 +92,6 @@ public class ProjectServiceTest { logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - } @Test @@ -148,6 +134,21 @@ public class ProjectServiceTest { logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + Map result2 = new HashMap<>(); + + result2 = projectService.checkProjectAndAuth(loginUser, null, projectName); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result2.get(Constants.STATUS)); + + Project project1 = getProject(); + // USER_NO_OPERATION_PROJECT_PERM + project1.setUserId(2); + result2 = projectService.checkProjectAndAuth(loginUser, project1, projectName); + Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result2.get(Constants.STATUS)); + + //success + project1.setUserId(1); + projectService.checkProjectAndAuth(loginUser, project1, projectName); + } @Test @@ -225,7 +226,6 @@ public class ProjectServiceTest { logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - } @Test @@ -322,7 +322,6 @@ public class ProjectServiceTest { Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } - private Project getProject() { Project project = new Project(); project.setId(1); @@ -337,7 +336,6 @@ public class ProjectServiceTest { return list; } - /** * create admin user */ @@ -369,13 +367,11 @@ public class ProjectServiceTest { return list; } - private String getDesc() { - return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" + - ".deleteProjectRelation(projectId,userId)projectUserMappe" + - "r.deleteProjectRelation(projectId,userId)projectUserMapper" + - ".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)"; + return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" + + ".deleteProjectRelation(projectId,userId)projectUserMappe" + + "r.deleteProjectRelation(projectId,userId)projectUserMapper" + + ".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)"; } - } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index f93ed05535768f0518ebe293e82845c58f0d0769..16547b3fd70df9d028e9e17e2ff7887c496543ab 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.api.service; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; @@ -74,15 +75,11 @@ public class TaskInstanceServiceTest { @Mock TaskInstanceMapper taskInstanceMapper; - @Mock - ProcessInstanceService processInstanceService; - @Mock UsersService usersService; @Test public void queryTaskListPaging() { - String projectName = "project_test1"; User loginUser = getAdminUser(); Map result = new HashMap<>(); @@ -95,7 +92,6 @@ public class TaskInstanceServiceTest { "test_user", "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); - //project putMsg(result, Status.SUCCESS, projectName); Project project = getProject(projectName); @@ -133,6 +129,23 @@ public class TaskInstanceServiceTest { Map executorNullRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", "test_user", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); Assert.assertEquals(Status.SUCCESS, executorNullRes.get(Constants.STATUS)); + + //start/end date null + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn); + Map executorNullDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + "", null, null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); + Assert.assertEquals(Status.SUCCESS, executorNullDateRes.get(Constants.STATUS)); + + //start date error format + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn); + Map executorErrorStartDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + "", "error date", null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorStartDateRes.get(Constants.STATUS)); + Map executorErrorEndDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + "", null, "error date", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorEndDateRes.get(Constants.STATUS)); } /** diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java index f7f506b69beb5de0d39e0dc42a464460718f9f5d..5dcf59cf74cdb5875f5c84b81db313341b1aa15c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java @@ -14,14 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.Map; +package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.TenantServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -35,6 +32,12 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -54,54 +57,61 @@ public class TenantServiceTest { private static final Logger logger = LoggerFactory.getLogger(TenantServiceTest.class); @InjectMocks - private TenantService tenantService; + private TenantServiceImpl tenantService; + @Mock private TenantMapper tenantMapper; + @Mock private ProcessDefinitionMapper processDefinitionMapper; + @Mock private ProcessInstanceMapper processInstanceMapper; + @Mock private UserMapper userMapper; - private String tenantCode = "TenantServiceTest"; - private String tenantName = "TenantServiceTest"; + private static final String tenantCode = "TenantServiceTest"; + private static final String tenantName = "TenantServiceTest"; @Test - public void testCreateTenant(){ + public void testCreateTenant() { User loginUser = getLoginUser(); Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getList()); try { //check tenantCode - Map result = tenantService.createTenant(getLoginUser(), "%!1111", tenantName, 1, "TenantServiceTest"); + Map result = + tenantService.createTenant(getLoginUser(), "%!1111", tenantName, 1, "TenantServiceTest"); logger.info(result.toString()); - Assert.assertEquals(Status.VERIFY_TENANT_CODE_ERROR,result.get(Constants.STATUS)); + Assert.assertEquals(Status.VERIFY_TENANT_CODE_ERROR, result.get(Constants.STATUS)); //check exist result = tenantService.createTenant(loginUser, tenantCode, tenantName, 1, "TenantServiceTest"); logger.info(result.toString()); - Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS)); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // success result = tenantService.createTenant(loginUser, "test", "test", 1, "TenantServiceTest"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); - + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } catch (Exception e) { - logger.error("create tenant error",e); - Assert.assertTrue(false); + logger.error("create tenant error", e); + Assert.fail(); } } @Test - public void testQueryTenantListPage(){ + @SuppressWarnings("unchecked") + public void testQueryTenantListPage() { - IPage page = new Page<>(1,10); + IPage page = new Page<>(1, 10); page.setRecords(getList()); page.setTotal(1L); - Mockito.when(tenantMapper.queryTenantPaging(Mockito.any(Page.class), Mockito.eq("TenantServiceTest"))).thenReturn(page); + Mockito.when(tenantMapper.queryTenantPaging(Mockito.any(Page.class), Mockito.eq("TenantServiceTest"))) + .thenReturn(page); Map result = tenantService.queryTenantList(getLoginUser(), "TenantServiceTest", 1, 10); logger.info(result.toString()); PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); @@ -110,87 +120,71 @@ public class TenantServiceTest { } @Test - public void testUpdateTenant(){ + public void testUpdateTenant() { Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); try { // id not exist - Map result = tenantService.updateTenant(getLoginUser(), 912222, tenantCode, tenantName, 1, "desc"); + Map result = + tenantService.updateTenant(getLoginUser(), 912222, tenantCode, tenantName, 1, "desc"); logger.info(result.toString()); // success - Assert.assertEquals(Status.TENANT_NOT_EXIST,result.get(Constants.STATUS)); + Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS)); result = tenantService.updateTenant(getLoginUser(), 1, tenantCode, "TenantServiceTest001", 1, "desc"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { - logger.error("update tenant error",e); - Assert.assertTrue(false); + logger.error("update tenant error", e); + Assert.fail(); } } @Test - public void testDeleteTenantById(){ + public void testDeleteTenantById() { Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); - Mockito.when(processInstanceMapper.queryByTenantIdAndStatus(1, Constants.NOT_TERMINATED_STATES)).thenReturn(getInstanceList()); + Mockito.when(processInstanceMapper.queryByTenantIdAndStatus(1, Constants.NOT_TERMINATED_STATES)) + .thenReturn(getInstanceList()); Mockito.when(processDefinitionMapper.queryDefinitionListByTenant(2)).thenReturn(getDefinitionsList()); - Mockito.when( userMapper.queryUserListByTenant(3)).thenReturn(getUserList()); + Mockito.when(userMapper.queryUserListByTenant(3)).thenReturn(getUserList()); try { //TENANT_NOT_EXIST - Map result = tenantService.deleteTenantById(getLoginUser(),12); + Map result = tenantService.deleteTenantById(getLoginUser(), 12); logger.info(result.toString()); - Assert.assertEquals(Status.TENANT_NOT_EXIST,result.get(Constants.STATUS)); + Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS)); //DELETE_TENANT_BY_ID_FAIL - result = tenantService.deleteTenantById(getLoginUser(),1); + result = tenantService.deleteTenantById(getLoginUser(), 1); logger.info(result.toString()); - Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL,result.get(Constants.STATUS)); + Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL, result.get(Constants.STATUS)); //DELETE_TENANT_BY_ID_FAIL_DEFINES Mockito.when(tenantMapper.queryById(2)).thenReturn(getTenant(2)); - result = tenantService.deleteTenantById(getLoginUser(),2); + result = tenantService.deleteTenantById(getLoginUser(), 2); logger.info(result.toString()); - Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES,result.get(Constants.STATUS)); + Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, result.get(Constants.STATUS)); //DELETE_TENANT_BY_ID_FAIL_USERS Mockito.when(tenantMapper.queryById(3)).thenReturn(getTenant(3)); - result = tenantService.deleteTenantById(getLoginUser(),3); + result = tenantService.deleteTenantById(getLoginUser(), 3); logger.info(result.toString()); - Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_USERS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_USERS, result.get(Constants.STATUS)); // success Mockito.when(tenantMapper.queryById(4)).thenReturn(getTenant(4)); - result = tenantService.deleteTenantById(getLoginUser(),4); + result = tenantService.deleteTenantById(getLoginUser(), 4); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { - logger.error("delete tenant error",e); - Assert.assertTrue(false); + logger.error("delete tenant error", e); + Assert.fail(); } } @Test - public void testQueryTenantList(){ - - Mockito.when( tenantMapper.selectList(null)).thenReturn(getList()); - Map result = tenantService.queryTenantList(getLoginUser()); - logger.info(result.toString()); - List tenantList = (List) result.get(Constants.DATA_LIST); - Assert.assertTrue(CollectionUtils.isNotEmpty(tenantList)); - - Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(getList()); - Map successRes = tenantService.queryTenantList("1"); - Assert.assertEquals(Status.SUCCESS,successRes.get(Constants.STATUS)); - - Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(null); - Map tenantNotExistRes = tenantService.queryTenantList("1"); - Assert.assertEquals(Status.TENANT_NOT_EXIST,tenantNotExistRes.get(Constants.STATUS)); - } - - @Test - public void testVerifyTenantCode(){ + public void testVerifyTenantCode() { Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getList()); // tenantCode not exist @@ -209,12 +203,10 @@ public class TenantServiceTest { Assert.assertEquals(resultString, result.getMsg()); } - /** * get user - * @return */ - private User getLoginUser(){ + private User getLoginUser() { User loginUser = new User(); loginUser.setUserType(UserType.ADMIN_USER); @@ -223,9 +215,8 @@ public class TenantServiceTest { /** * get list - * @return */ - private List getList(){ + private List getList() { List tenantList = new ArrayList<>(); tenantList.add(getTenant()); return tenantList; @@ -233,16 +224,15 @@ public class TenantServiceTest { /** * get tenant - * @return */ - private Tenant getTenant(){ + private Tenant getTenant() { return getTenant(1); } + /** * get tenant - * @return */ - private Tenant getTenant(int id){ + private Tenant getTenant(int id) { Tenant tenant = new Tenant(); tenant.setId(id); tenant.setTenantCode(tenantCode); @@ -250,25 +240,24 @@ public class TenantServiceTest { return tenant; } - private List getUserList(){ + private List getUserList() { List userList = new ArrayList<>(); userList.add(getLoginUser()); return userList; } - private List getInstanceList(){ + private List getInstanceList() { List processInstances = new ArrayList<>(); ProcessInstance processInstance = new ProcessInstance(); processInstances.add(processInstance); return processInstances; } - private List getDefinitionsList(){ + private List getDefinitionsList() { List processDefinitions = new ArrayList<>(); ProcessDefinition processDefinition = new ProcessDefinition(); processDefinitions.add(processDefinition); return processDefinitions; } - } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertEvent.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertEvent.java new file mode 100644 index 0000000000000000000000000000000000000000..0c8ed89fd7798549edbdcb8eac0e9f240df75bd0 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertEvent.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.enums; + +public enum AlertEvent { + + SERVER_DOWN,TIME_OUT +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertWarnLevel.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertWarnLevel.java new file mode 100644 index 0000000000000000000000000000000000000000..71579a9611fac3739596fe3698368a1a286ce21c --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AlertWarnLevel.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.enums; + +public enum AlertWarnLevel { + + MIDDLE,SERIOUS +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java index 65a078778e4057c7d544395611db878f2a3cce70..3e88a84ec2afc7a95e4fbf1434c5d4e9dbef1560 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -32,7 +32,7 @@ import java.io.IOException; import java.util.*; import static com.fasterxml.jackson.databind.DeserializationFeature.*; - +import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; /** * json utils @@ -48,6 +48,7 @@ public class JSONUtils { .configure(FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true) .configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true) + .configure(REQUIRE_SETTERS_FOR_GETTERS, true) .setTimeZone(TimeZone.getDefault()) ; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java index 685d72c1e867587908a40f3d925b6ef834034a92..cd101f06b615d3d6ae3687ff4689ec4bee1ed470 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java @@ -17,22 +17,25 @@ package org.apache.dolphinscheduler.dao; +import org.apache.dolphinscheduler.common.enums.AlertEvent; import org.apache.dolphinscheduler.common.enums.AlertStatus; import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.AlertWarnLevel; import org.apache.dolphinscheduler.common.enums.ShowType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.entity.ProcessAlertContent; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.ServerAlertContent; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AlertMapper; import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper; import java.util.ArrayList; import java.util.Date; -import java.util.LinkedHashMap; import java.util.List; import org.slf4j.Logger; @@ -103,14 +106,12 @@ public class AlertDao extends AbstractBaseDao { */ public void sendServerStopedAlert(int alertgroupId, String host, String serverType) { Alert alert = new Alert(); - List serverStopList = new ArrayList<>(1); - LinkedHashMap serverStopedMap = new LinkedHashMap(); - serverStopedMap.put("type", serverType); - serverStopedMap.put("host", host); - serverStopedMap.put("event", "server down"); - serverStopedMap.put("warning level", "serious"); - serverStopList.add(serverStopedMap); - String content = JSONUtils.toJsonString(serverStopList); + List serverAlertContents = new ArrayList<>(1); + ServerAlertContent serverStopAlertContent = ServerAlertContent.newBuilder(). + type(serverType).host(host).event(AlertEvent.SERVER_DOWN).warningLevel(AlertWarnLevel.SERIOUS). + build(); + serverAlertContents.add(serverStopAlertContent); + String content = JSONUtils.toJsonString(serverAlertContents); alert.setTitle("Fault tolerance warning"); saveTaskTimeoutAlert(alert, content, alertgroupId, null, null); } @@ -126,14 +127,15 @@ public class AlertDao extends AbstractBaseDao { String receivers = processDefinition.getReceivers(); String receiversCc = processDefinition.getReceiversCc(); Alert alert = new Alert(); - List processTimeoutList = new ArrayList<>(1); - LinkedHashMap processTimeoutMap = new LinkedHashMap(); - processTimeoutMap.put("id", String.valueOf(processInstance.getId())); - processTimeoutMap.put("name", processInstance.getName()); - processTimeoutMap.put("event", "timeout"); - processTimeoutMap.put("warnLevel", "middle"); - processTimeoutList.add(processTimeoutMap); - String content = JSONUtils.toJsonString(processTimeoutList); + List processAlertContentList = new ArrayList<>(1); + ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() + .processId(processInstance.getId()) + .processName(processInstance.getName()) + .event(AlertEvent.TIME_OUT) + .warningLevel(AlertWarnLevel.MIDDLE) + .build(); + processAlertContentList.add(processAlertContent); + String content = JSONUtils.toJsonString(processAlertContentList); alert.setTitle("Process Timeout Warn"); saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc); } @@ -169,16 +171,17 @@ public class AlertDao extends AbstractBaseDao { public void sendTaskTimeoutAlert(int alertgroupId, String receivers, String receiversCc, int processInstanceId, String processInstanceName, int taskId, String taskName) { Alert alert = new Alert(); - List taskTimeoutList = new ArrayList<>(1); - LinkedHashMap taskTimeoutMap = new LinkedHashMap(); - taskTimeoutMap.put("process instance id", String.valueOf(processInstanceId)); - taskTimeoutMap.put("process name", processInstanceName); - taskTimeoutMap.put("task id", String.valueOf(taskId)); - taskTimeoutMap.put("task name", taskName); - taskTimeoutMap.put("event", "timeout"); - taskTimeoutMap.put("warnLevel", "middle"); - taskTimeoutList.add(taskTimeoutMap); - String content = JSONUtils.toJsonString(taskTimeoutList); + List processAlertContentList = new ArrayList<>(1); + ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() + .processId(processInstanceId) + .processName(processInstanceName) + .taskId(taskId) + .taskName(taskName) + .event(AlertEvent.TIME_OUT) + .warningLevel(AlertWarnLevel.MIDDLE) + .build(); + processAlertContentList.add(processAlertContent); + String content = JSONUtils.toJsonString(processAlertContentList); alert.setTitle("Task Timeout Warn"); saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc); } @@ -210,4 +213,5 @@ public class AlertDao extends AbstractBaseDao { public AlertMapper getAlertMapper() { return alertMapper; } + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessAlertContent.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessAlertContent.java new file mode 100644 index 0000000000000000000000000000000000000000..71058f4af8d93ea53db5a22a1390f3e1112406cf --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessAlertContent.java @@ -0,0 +1,236 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.AlertEvent; +import org.apache.dolphinscheduler.common.enums.AlertWarnLevel; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; + +import java.io.Serializable; +import java.util.Date; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonInclude(Include.NON_NULL) +public class ProcessAlertContent implements Serializable { + @JsonProperty("processId") + private int processId; + @JsonProperty("processName") + private String processName; + @JsonProperty("processType") + private CommandType processType; + @JsonProperty("processState") + private ExecutionStatus processState; + @JsonProperty("recovery") + private Flag recovery; + @JsonProperty("runTimes") + private int runTimes; + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @JsonProperty("processStartTime") + private Date processStartTime; + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @JsonProperty("processEndTime") + private Date processEndTime; + @JsonProperty("processHost") + private String processHost; + @JsonProperty("taskId") + private int taskId; + @JsonProperty("taskName") + private String taskName; + @JsonProperty("event") + private AlertEvent event; + @JsonProperty("warnLevel") + private AlertWarnLevel warnLevel; + @JsonProperty("taskType") + private String taskType; + @JsonProperty("retryTimes") + private int retryTimes; + @JsonProperty("taskState") + private ExecutionStatus taskState; + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @JsonProperty("taskStartTime") + private Date taskStartTime; + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @JsonProperty("taskEndTime") + private Date taskEndTime; + @JsonProperty("taskHost") + private String taskHost; + @JsonProperty("logPath") + private String logPath; + + private ProcessAlertContent(Builder builder) { + this.processId = builder.processId; + this.processName = builder.processName; + this.processType = builder.processType; + this.recovery = builder.recovery; + this.processState = builder.processState; + this.runTimes = builder.runTimes; + this.processStartTime = builder.processStartTime; + this.processEndTime = builder.processEndTime; + this.processHost = builder.processHost; + this.taskId = builder.taskId; + this.taskName = builder.taskName; + this.event = builder.event; + this.warnLevel = builder.warnLevel; + this.taskType = builder.taskType; + this.taskState = builder.taskState; + this.taskStartTime = builder.taskStartTime; + this.taskEndTime = builder.taskEndTime; + this.taskHost = builder.taskHost; + this.logPath = builder.logPath; + this.retryTimes = builder.retryTimes; + + } + + public static Builder newBuilder() { + return new Builder(); + } + + public static class Builder { + + private int processId; + private String processName; + private CommandType processType; + private Flag recovery; + private ExecutionStatus processState; + private int runTimes; + private Date processStartTime; + private Date processEndTime; + private String processHost; + private int taskId; + private String taskName; + private AlertEvent event; + private AlertWarnLevel warnLevel; + private String taskType; + private int retryTimes; + private ExecutionStatus taskState; + private Date taskStartTime; + private Date taskEndTime; + private String taskHost; + private String logPath; + + public Builder processId(int processId) { + this.processId = processId; + return this; + } + + public Builder processName(String processName) { + this.processName = processName; + return this; + } + + public Builder processType(CommandType processType) { + this.processType = processType; + return this; + } + + public Builder recovery(Flag recovery) { + this.recovery = recovery; + return this; + } + + public Builder processState(ExecutionStatus processState) { + this.processState = processState; + return this; + } + + public Builder runTimes(int runTimes) { + this.runTimes = runTimes; + return this; + } + + public Builder processStartTime(Date processStartTime) { + this.processStartTime = processStartTime; + return this; + } + + public Builder processEndTime(Date processEndTime) { + this.processEndTime = processEndTime; + return this; + } + + public Builder processHost(String processHost) { + this.processHost = processHost; + return this; + } + + public Builder taskId(int taskId) { + this.taskId = taskId; + return this; + } + + public Builder taskName(String taskName) { + this.taskName = taskName; + return this; + } + + public Builder event(AlertEvent event) { + this.event = event; + return this; + } + + public Builder warningLevel(AlertWarnLevel warnLevel) { + this.warnLevel = warnLevel; + return this; + } + + public Builder taskType(String taskType) { + this.taskType = taskType; + return this; + } + + public Builder retryTimes(int retryTimes) { + this.retryTimes = retryTimes; + return this; + } + + public Builder taskState(ExecutionStatus taskState) { + this.taskState = taskState; + return this; + } + + public Builder taskStartTime(Date taskStartTime) { + this.taskStartTime = taskStartTime; + return this; + } + + public Builder taskEndTime(Date taskEndTime) { + this.taskEndTime = taskEndTime; + return this; + } + + public Builder taskHost(String taskHost) { + this.taskHost = taskHost; + return this; + } + + public Builder logPath(String logPath) { + this.logPath = logPath; + return this; + } + + public ProcessAlertContent build() { + return new ProcessAlertContent(this); + } + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java index 36484829965c800f6db986d41c70fffecc20ca5b..56f6cfe905da51c7dad619f1077da16e3b13ae35 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java @@ -14,18 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.entity; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import com.baomidou.mybatisplus.core.toolkit.StringUtils; -import com.fasterxml.jackson.annotation.JsonFormat; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.util.ArrayList; import java.util.Date; @@ -33,6 +28,13 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.baomidou.mybatisplus.core.toolkit.StringUtils; +import com.fasterxml.jackson.annotation.JsonFormat; + /** * process definition @@ -54,7 +56,7 @@ public class ProcessDefinition { /** * version */ - private int version; + private long version; /** * release state : online/offline @@ -96,13 +98,13 @@ public class ProcessDefinition { /** * create time */ - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date createTime; /** * update time */ - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date updateTime; /** @@ -182,11 +184,11 @@ public class ProcessDefinition { this.name = name; } - public int getVersion() { + public long getVersion() { return version; } - public void setVersion(int version) { + public void setVersion(long version) { this.version = version; } @@ -276,9 +278,9 @@ public class ProcessDefinition { } public void setGlobalParams(String globalParams) { - if (globalParams == null){ + if (globalParams == null) { this.globalParamList = new ArrayList<>(); - }else { + } else { this.globalParamList = JSONUtils.toList(globalParams, Property.class); } this.globalParams = globalParams; @@ -295,7 +297,7 @@ public class ProcessDefinition { public Map getGlobalParamMap() { if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) { - List propList = JSONUtils.toList(globalParams,Property.class); + List propList = JSONUtils.toList(globalParams, Property.class); globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionVersion.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionVersion.java new file mode 100644 index 0000000000000000000000000000000000000000..26779ba92597fd69a298f29f20632840f9cf08f2 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionVersion.java @@ -0,0 +1,329 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonFormat; + + +/** + * process definition version + */ +@TableName("t_ds_process_definition_version") +public class ProcessDefinitionVersion { + + /** + * id + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + + /** + * process definition id + */ + private int processDefinitionId; + + /** + * version + */ + private long version; + + /** + * definition json string + */ + private String processDefinitionJson; + + /** + * description + */ + private String description; + + /** + * receivers + */ + private String receivers; + + /** + * receivers cc + */ + private String receiversCc; + + /** + * process warning time out. unit: minute + */ + private int timeout; + + /** + * resource ids + */ + private String resourceIds; + + /** + * create time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + + /** + * user defined parameters + */ + private String globalParams; + + /** + * locations array for web + */ + private String locations; + + /** + * connects array for web + */ + private String connects; + + public String getGlobalParams() { + return globalParams; + } + + public void setGlobalParams(String globalParams) { + this.globalParams = globalParams; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + public long getVersion() { + return version; + } + + public void setVersion(long version) { + this.version = version; + } + + public String getProcessDefinitionJson() { + return processDefinitionJson; + } + + public void setProcessDefinitionJson(String processDefinitionJson) { + this.processDefinitionJson = processDefinitionJson; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getLocations() { + return locations; + } + + public void setLocations(String locations) { + this.locations = locations; + } + + public String getConnects() { + return connects; + } + + public void setConnects(String connects) { + this.connects = connects; + } + + public String getReceivers() { + return receivers; + } + + public void setReceivers(String receivers) { + this.receivers = receivers; + } + + public String getReceiversCc() { + return receiversCc; + } + + public void setReceiversCc(String receiversCc) { + this.receiversCc = receiversCc; + } + + public int getTimeout() { + return timeout; + } + + public void setTimeout(int timeout) { + this.timeout = timeout; + } + + public String getResourceIds() { + return resourceIds; + } + + public void setResourceIds(String resourceIds) { + this.resourceIds = resourceIds; + } + + @Override + public String toString() { + return "ProcessDefinitionVersion{" + + "id=" + id + + ", processDefinitionId=" + processDefinitionId + + ", version=" + version + + ", processDefinitionJson='" + processDefinitionJson + '\'' + + ", description='" + description + '\'' + + ", globalParams='" + globalParams + '\'' + + ", createTime=" + createTime + + ", locations='" + locations + '\'' + + ", connects='" + connects + '\'' + + ", receivers='" + receivers + '\'' + + ", receiversCc='" + receiversCc + '\'' + + ", timeout=" + timeout + + ", resourceIds='" + resourceIds + '\'' + + '}'; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public static final class Builder { + private int id; + private int processDefinitionId; + private long version; + private String processDefinitionJson; + private String description; + private String globalParams; + private Date createTime; + private String locations; + private String connects; + private String receivers; + private String receiversCc; + private int timeout; + private String resourceIds; + + private Builder() { + } + + public Builder id(int id) { + this.id = id; + return this; + } + + public Builder processDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + return this; + } + + public Builder version(long version) { + this.version = version; + return this; + } + + public Builder processDefinitionJson(String processDefinitionJson) { + this.processDefinitionJson = processDefinitionJson; + return this; + } + + public Builder description(String description) { + this.description = description; + return this; + } + + public Builder globalParams(String globalParams) { + this.globalParams = globalParams; + return this; + } + + public Builder createTime(Date createTime) { + this.createTime = createTime; + return this; + } + + public Builder locations(String locations) { + this.locations = locations; + return this; + } + + public Builder connects(String connects) { + this.connects = connects; + return this; + } + + public Builder receivers(String receivers) { + this.receivers = receivers; + return this; + } + + public Builder receiversCc(String receiversCc) { + this.receiversCc = receiversCc; + return this; + } + + public Builder timeout(int timeout) { + this.timeout = timeout; + return this; + } + + public Builder resourceIds(String resourceIds) { + this.resourceIds = resourceIds; + return this; + } + + public ProcessDefinitionVersion build() { + ProcessDefinitionVersion processDefinitionVersion = new ProcessDefinitionVersion(); + processDefinitionVersion.setId(id); + processDefinitionVersion.setProcessDefinitionId(processDefinitionId); + processDefinitionVersion.setVersion(version); + processDefinitionVersion.setProcessDefinitionJson(processDefinitionJson); + processDefinitionVersion.setDescription(description); + processDefinitionVersion.setGlobalParams(globalParams); + processDefinitionVersion.setCreateTime(createTime); + processDefinitionVersion.setLocations(locations); + processDefinitionVersion.setConnects(connects); + processDefinitionVersion.setReceivers(receivers); + processDefinitionVersion.setReceiversCc(receiversCc); + processDefinitionVersion.setTimeout(timeout); + processDefinitionVersion.setResourceIds(resourceIds); + return processDefinitionVersion; + } + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ServerAlertContent.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ServerAlertContent.java new file mode 100644 index 0000000000000000000000000000000000000000..211863f73f4d9b402865832f95789c189b3ff087 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ServerAlertContent.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.AlertEvent; +import org.apache.dolphinscheduler.common.enums.AlertWarnLevel; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class ServerAlertContent { + + /** + * server type :master or worker + */ + @JsonProperty("type") + final String type; + @JsonProperty("host") + final String host; + @JsonProperty("event") + final AlertEvent event; + @JsonProperty("warningLevel") + final AlertWarnLevel warningLevel; + + private ServerAlertContent(Builder builder) { + this.type = builder.type; + this.host = builder.host; + this.event = builder.event; + this.warningLevel = builder.warningLevel; + + } + + public static Builder newBuilder() { + return new Builder(); + } + + public static class Builder { + private String type; + + private String host; + + private AlertEvent event; + + private AlertWarnLevel warningLevel; + + public Builder type(String type) { + this.type = type; + return this; + } + + public Builder host(String host) { + this.host = host; + return this; + } + + public Builder event(AlertEvent event) { + this.event = event; + return this; + } + + public Builder warningLevel(AlertWarnLevel warningLevel) { + this.warningLevel = warningLevel; + return this; + } + + public ServerAlertContent build() { + return new ServerAlertContent(this); + } + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java index 4df93f2e9febb38b1dd004e01729a96801485165..86e3172f23f8185e370a8f7304c3a74bd05aeea0 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java @@ -14,18 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; + import org.apache.ibatis.annotations.MapKey; import org.apache.ibatis.annotations.Param; import java.util.List; import java.util.Map; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; + /** * process definition mapper interface */ @@ -34,6 +37,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * query process definition by name + * * @param projectId projectId * @param name name * @return process definition @@ -43,6 +47,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * query process definition by id + * * @param processDefineId processDefineId * @return process definition */ @@ -50,6 +55,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * process definition page + * * @param page page * @param searchVal searchVal * @param userId userId @@ -65,6 +71,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * query all process definition list + * * @param projectId projectId * @return process definition list */ @@ -72,6 +79,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * query process definition by ids + * * @param ids ids * @return process definition list */ @@ -79,6 +87,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * query process definition by tenant + * * @param tenantId tenantId * @return process definition list */ @@ -86,6 +95,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * count process definition group by user + * * @param userId userId * @param projectIds projectIds * @param isAdmin isAdmin @@ -98,6 +108,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * list all resource ids + * * @return resource ids list */ @MapKey("id") @@ -105,8 +116,17 @@ public interface ProcessDefinitionMapper extends BaseMapper { /** * list all resource ids by user id + * * @return resource ids list */ @MapKey("id") List> listResourcesByUser(@Param("userId") Integer userId); + + /** + * update process definition version by process definitionId + * + * @param processDefinitionId process definition id + * @param version version + */ + void updateVersionByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.java new file mode 100644 index 0000000000000000000000000000000000000000..27efda432738d50cdb00a4586998208d5c1bfd63 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion; + +import org.apache.ibatis.annotations.Param; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * process definition mapper interface + */ +public interface ProcessDefinitionVersionMapper extends BaseMapper { + + /** + * query max version by process definition id + * + * @param processDefinitionId process definition id + * @return the max version of this process definition id + */ + Long queryMaxVersionByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId); + + /** + * query the paging process definition version list by pagination info + * + * @param page pagination info + * @param processDefinitionId process definition id + * @return the paging process definition version list + */ + IPage queryProcessDefinitionVersionsPaging(Page page, + @Param("processDefinitionId") int processDefinitionId); + + /** + * query the certain process definition version info by process definition id and version number + * + * @param processDefinitionId process definition id + * @param version version number + * @return the process definition version info + */ + ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version); + + /** + * delete the certain process definition version by process definition id and version number + * + * @param processDefinitionId process definition id + * @param version version number + * @return delete result + */ + int deleteByProcessDefinitionIdAndVersion(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version); + +} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml index 3e538a23e06639aa80312ca5e5a98b69485cfdbd..0481f7deab4f7ad18307d162b52e906e6a79a5f9 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml @@ -102,4 +102,10 @@ FROM t_ds_process_definition WHERE user_id = #{userId} and release_state = 1 and resource_ids is not null and resource_ids != '' + + + update t_ds_process_definition + set version = #{version} + where id = #{processDefinitionId} + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml new file mode 100644 index 0000000000000000000000000000000000000000..b2d0b859825d6a40389b943308b58d7dbb5f26d4 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + delete + from t_ds_process_definition_version + where process_definition_id = #{processDefinitionId} + and version = #{version} + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java index ad91e79fb59d06a41d85c0a4cff50e6928b69daf..c58c92b3bbbdbad6e708f22ad378b5ad3554b532 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java @@ -14,14 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.mapper; +package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Queue; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Date; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -31,9 +39,8 @@ import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.transaction.annotation.Transactional; -import java.util.Date; -import java.util.List; -import java.util.Map; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(SpringRunner.class) @SpringBootTest @@ -59,9 +66,10 @@ public class ProcessDefinitionMapperTest { /** * insert + * * @return ProcessDefinition */ - private ProcessDefinition insertOne(){ + private ProcessDefinition insertOne() { //insertOne ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setName("def 1"); @@ -77,9 +85,10 @@ public class ProcessDefinitionMapperTest { /** * insert + * * @return ProcessDefinition */ - private ProcessDefinition insertTwo(){ + private ProcessDefinition insertTwo() { //insertOne ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setName("def 2"); @@ -95,7 +104,7 @@ public class ProcessDefinitionMapperTest { * test update */ @Test - public void testUpdate(){ + public void testUpdate() { //insertOne ProcessDefinition processDefinition = insertOne(); //update @@ -108,7 +117,7 @@ public class ProcessDefinitionMapperTest { * test delete */ @Test - public void testDelete(){ + public void testDelete() { ProcessDefinition processDefinition = insertOne(); int delete = processDefinitionMapper.deleteById(processDefinition.getId()); Assert.assertEquals(1, delete); @@ -175,8 +184,8 @@ public class ProcessDefinitionMapperTest { @Test public void testQueryDefineListPaging() { ProcessDefinition processDefinition = insertOne(); - Page page = new Page(1,3); - IPage processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010,true); + Page page = new Page(1, 3); + IPage processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010, true); Assert.assertNotEquals(processDefinitionIPage.getTotal(), 0); } @@ -186,7 +195,7 @@ public class ProcessDefinitionMapperTest { @Test public void testQueryAllDefinitionList() { ProcessDefinition processDefinition = insertOne(); - List processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010); + List processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010); Assert.assertNotEquals(processDefinitionIPage.size(), 0); } @@ -214,7 +223,7 @@ public class ProcessDefinitionMapperTest { @Test public void testCountDefinitionGroupByUser() { - User user= new User(); + User user = new User(); user.setUserName("user1"); user.setUserPassword("1"); user.setEmail("xx@123.com"); @@ -239,7 +248,7 @@ public class ProcessDefinitionMapperTest { } @Test - public void listResourcesTest(){ + public void listResourcesTest() { ProcessDefinition processDefinition = insertOne(); processDefinition.setResourceIds("3,5"); processDefinition.setReleaseState(ReleaseState.ONLINE); @@ -248,11 +257,22 @@ public class ProcessDefinitionMapperTest { } @Test - public void listResourcesByUserTest(){ + public void listResourcesByUserTest() { ProcessDefinition processDefinition = insertOne(); processDefinition.setResourceIds("3,5"); processDefinition.setReleaseState(ReleaseState.ONLINE); List> maps = processDefinitionMapper.listResourcesByUser(processDefinition.getUserId()); Assert.assertNotNull(maps); } + + @Test + public void testUpdateVersionByProcessDefinitionId() { + long expectedVersion = 10; + ProcessDefinition processDefinition = insertOne(); + processDefinition.setVersion(expectedVersion); + processDefinitionMapper.updateVersionByProcessDefinitionId( + processDefinition.getId(), processDefinition.getVersion()); + ProcessDefinition processDefinition1 = processDefinitionMapper.selectById(processDefinition.getId()); + Assert.assertEquals(expectedVersion, processDefinition1.getVersion()); + } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapperTest.java new file mode 100644 index 0000000000000000000000000000000000000000..e825e3384785aaed758b7faaa0609a5707471976 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapperTest.java @@ -0,0 +1,172 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion; + +import java.util.Date; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +@RunWith(SpringRunner.class) +@SpringBootTest +@Transactional +@Rollback(true) +public class ProcessDefinitionVersionMapperTest { + + + @Autowired + ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + ProcessDefinitionVersionMapper processDefinitionVersionMapper; + + @Autowired + UserMapper userMapper; + + @Autowired + QueueMapper queueMapper; + + @Autowired + TenantMapper tenantMapper; + + @Autowired + ProjectMapper projectMapper; + + /** + * insert + * + * @return ProcessDefinition + */ + private ProcessDefinitionVersion insertOne() { + // insertOne + ProcessDefinitionVersion processDefinitionVersion + = new ProcessDefinitionVersion(); + processDefinitionVersion.setProcessDefinitionId(66); + processDefinitionVersion.setVersion(10); + processDefinitionVersion.setProcessDefinitionJson(StringUtils.EMPTY); + processDefinitionVersion.setDescription(StringUtils.EMPTY); + processDefinitionVersion.setGlobalParams(StringUtils.EMPTY); + processDefinitionVersion.setCreateTime(new Date()); + processDefinitionVersion.setLocations(StringUtils.EMPTY); + processDefinitionVersion.setConnects(StringUtils.EMPTY); + processDefinitionVersion.setReceivers(StringUtils.EMPTY); + processDefinitionVersion.setReceiversCc(StringUtils.EMPTY); + processDefinitionVersion.setTimeout(10); + processDefinitionVersion.setResourceIds("1,2"); + processDefinitionVersionMapper.insert(processDefinitionVersion); + return processDefinitionVersion; + } + + /** + * insert + * + * @return ProcessDefinitionVersion + */ + private ProcessDefinitionVersion insertTwo() { + // insertTwo + ProcessDefinitionVersion processDefinitionVersion + = new ProcessDefinitionVersion(); + processDefinitionVersion.setProcessDefinitionId(67); + processDefinitionVersion.setVersion(11); + processDefinitionVersion.setProcessDefinitionJson(StringUtils.EMPTY); + processDefinitionVersion.setDescription(StringUtils.EMPTY); + processDefinitionVersion.setGlobalParams(StringUtils.EMPTY); + processDefinitionVersion.setCreateTime(new Date()); + processDefinitionVersion.setLocations(StringUtils.EMPTY); + processDefinitionVersion.setConnects(StringUtils.EMPTY); + processDefinitionVersion.setReceivers(StringUtils.EMPTY); + processDefinitionVersion.setReceiversCc(StringUtils.EMPTY); + processDefinitionVersion.setTimeout(10); + processDefinitionVersion.setResourceIds("1,2"); + processDefinitionVersionMapper.insert(processDefinitionVersion); + return processDefinitionVersion; + } + + /** + * test insert + */ + @Test + public void testInsert() { + ProcessDefinitionVersion processDefinitionVersion = insertOne(); + Assert.assertTrue(processDefinitionVersion.getId() > 0); + } + + /** + * test query + */ + @Test + public void testQueryMaxVersionByProcessDefinitionId() { + ProcessDefinitionVersion processDefinitionVersion = insertOne(); + + Long version = processDefinitionVersionMapper.queryMaxVersionByProcessDefinitionId( + processDefinitionVersion.getProcessDefinitionId()); + // query + Assert.assertEquals(10, (long) version); + } + + @Test + public void testQueryProcessDefinitionVersionsPaging() { + insertOne(); + insertTwo(); + + Page page = new Page<>(1, 3); + + IPage processDefinitionVersionIPage = + processDefinitionVersionMapper.queryProcessDefinitionVersionsPaging(page, 10); + + Assert.assertTrue(processDefinitionVersionIPage.getSize() >= 2); + } + + @Test + public void testDeleteByProcessDefinitionIdAndVersion() { + ProcessDefinitionVersion processDefinitionVersion = insertOne(); + int i = processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion( + processDefinitionVersion.getProcessDefinitionId(), processDefinitionVersion.getVersion()); + Assert.assertEquals(1, i); + } + + @Test + public void testQueryByProcessDefinitionIdAndVersion() { + ProcessDefinitionVersion processDefinitionVersion1 = insertOne(); + ProcessDefinitionVersion processDefinitionVersion3 = processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion( + processDefinitionVersion1.getProcessDefinitionId(), 10); + + ProcessDefinitionVersion processDefinitionVersion2 = insertTwo(); + ProcessDefinitionVersion processDefinitionVersion4 = processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion( + processDefinitionVersion2.getProcessDefinitionId(), 11); + + Assert.assertEquals(processDefinitionVersion1.getProcessDefinitionId(), + processDefinitionVersion3.getProcessDefinitionId()); + Assert.assertEquals(processDefinitionVersion2.getProcessDefinitionId(), + processDefinitionVersion4.getProcessDefinitionId()); + + } + +} \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java index 10e62d8d9bcc2db862dffadb2f6038bff91b39dd..92e3cae8574927254d1a3e8267f5ba71aef25c1c 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java @@ -18,10 +18,17 @@ package org.apache.dolphinscheduler.remote; import io.netty.bootstrap.Bootstrap; -import io.netty.channel.*; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.epoll.EpollEventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; + import org.apache.dolphinscheduler.remote.codec.NettyDecoder; import org.apache.dolphinscheduler.remote.codec.NettyEncoder; import org.apache.dolphinscheduler.remote.command.Command; @@ -38,6 +45,8 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.CallerThreadExecutePolicy; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; +import org.apache.dolphinscheduler.remote.utils.NettyUtils; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,7 +56,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; /** - * remoting netty client + * remoting netty client */ public class NettyRemotingClient { @@ -59,7 +68,7 @@ public class NettyRemotingClient { private final Bootstrap bootstrap = new Bootstrap(); /** - * encoder + * encoder */ private final NettyEncoder encoder = new NettyEncoder(); @@ -69,57 +78,69 @@ public class NettyRemotingClient { private final ConcurrentHashMap channels = new ConcurrentHashMap(128); /** - * started flag + * started flag */ private final AtomicBoolean isStarted = new AtomicBoolean(false); /** - * worker group + * worker group */ - private final NioEventLoopGroup workerGroup; + private final EventLoopGroup workerGroup; /** - * client config + * client config */ private final NettyClientConfig clientConfig; /** - * saync semaphore + * saync semaphore */ private final Semaphore asyncSemaphore = new Semaphore(200, true); /** - * callback thread executor + * callback thread executor */ private final ExecutorService callbackExecutor; /** - * client handler + * client handler */ private final NettyClientHandler clientHandler; /** - * response future executor + * response future executor */ private final ScheduledExecutorService responseFutureExecutor; /** - * client init + * client init + * * @param clientConfig client config */ - public NettyRemotingClient(final NettyClientConfig clientConfig){ + public NettyRemotingClient(final NettyClientConfig clientConfig) { this.clientConfig = clientConfig; - this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() { - private AtomicInteger threadIndex = new AtomicInteger(0); + if (NettyUtils.useEpoll()) { + this.workerGroup = new EpollEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); - @Override - public Thread newThread(Runnable r) { - return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet())); - } - }); + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet())); + } + }); + } else { + this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet())); + } + }); + } this.callbackExecutor = new ThreadPoolExecutor(5, 10, 1, TimeUnit.MINUTES, - new LinkedBlockingQueue<>(1000), new NamedThreadFactory("CallbackExecutor", 10), - new CallerThreadExecutePolicy()); + new LinkedBlockingQueue<>(1000), new NamedThreadFactory("CallbackExecutor", 10), + new CallerThreadExecutePolicy()); this.clientHandler = new NettyClientHandler(this, callbackExecutor); this.responseFutureExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("ResponseFutureExecutor")); @@ -128,26 +149,26 @@ public class NettyRemotingClient { } /** - * start + * start */ - private void start(){ + private void start() { this.bootstrap - .group(this.workerGroup) - .channel(NioSocketChannel.class) - .option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive()) - .option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay()) - .option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize()) - .option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize()) - .handler(new ChannelInitializer() { - @Override - public void initChannel(SocketChannel ch) throws Exception { - ch.pipeline().addLast( - new NettyDecoder(), - clientHandler, - encoder); - } - }); + .group(this.workerGroup) + .channel(NioSocketChannel.class) + .option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive()) + .option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay()) + .option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize()) + .option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize()) + .handler(new ChannelInitializer() { + @Override + public void initChannel(SocketChannel ch) throws Exception { + ch.pipeline().addLast( + new NettyDecoder(), + clientHandler, + encoder); + } + }); this.responseFutureExecutor.scheduleAtFixedRate(new Runnable() { @Override public void run() { @@ -159,10 +180,11 @@ public class NettyRemotingClient { } /** - * async send - * @param host host - * @param command command - * @param timeoutMillis timeoutMillis + * async send + * + * @param host host + * @param command command + * @param timeoutMillis timeoutMillis * @param invokeCallback callback function * @throws InterruptedException * @throws RemotingException @@ -182,22 +204,22 @@ public class NettyRemotingClient { * control concurrency number */ boolean acquired = this.asyncSemaphore.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS); - if(acquired){ + if (acquired) { final ReleaseSemaphore releaseSemaphore = new ReleaseSemaphore(this.asyncSemaphore); /** * response future */ final ResponseFuture responseFuture = new ResponseFuture(opaque, - timeoutMillis, - invokeCallback, - releaseSemaphore); + timeoutMillis, + invokeCallback, + releaseSemaphore); try { - channel.writeAndFlush(command).addListener(new ChannelFutureListener(){ + channel.writeAndFlush(command).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { - if(future.isSuccess()){ + if (future.isSuccess()) { responseFuture.setSendOk(true); return; } else { @@ -207,28 +229,29 @@ public class NettyRemotingClient { responseFuture.putResponse(null); try { responseFuture.executeInvokeCallback(); - } catch (Throwable ex){ + } catch (Throwable ex) { logger.error("execute callback error", ex); - } finally{ + } finally { responseFuture.release(); } } }); - } catch (Throwable ex){ + } catch (Throwable ex) { responseFuture.release(); throw new RemotingException(String.format("send command to host: %s failed", host), ex); } - } else{ + } else { String message = String.format("try to acquire async semaphore timeout: %d, waiting thread num: %d, total permits: %d", - timeoutMillis, asyncSemaphore.getQueueLength(), asyncSemaphore.availablePermits()); + timeoutMillis, asyncSemaphore.getQueueLength(), asyncSemaphore.availablePermits()); throw new RemotingTooMuchRequestException(message); } } /** * sync send - * @param host host - * @param command command + * + * @param host host + * @param command command * @param timeoutMillis timeoutMillis * @return command * @throws InterruptedException @@ -244,7 +267,7 @@ public class NettyRemotingClient { channel.writeAndFlush(command).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { - if(future.isSuccess()){ + if (future.isSuccess()) { responseFuture.setSendOk(true); return; } else { @@ -259,10 +282,10 @@ public class NettyRemotingClient { * sync wait for result */ Command result = responseFuture.waitResponse(); - if(result == null){ - if(responseFuture.isSendOK()){ + if (result == null) { + if (responseFuture.isSendOK()) { throw new RemotingTimeoutException(host.toString(), timeoutMillis, responseFuture.getCause()); - } else{ + } else { throw new RemotingException(host.toString(), responseFuture.getCause()); } } @@ -270,8 +293,9 @@ public class NettyRemotingClient { } /** - * send task - * @param host host + * send task + * + * @param host host * @param command command * @throws RemotingException */ @@ -296,33 +320,35 @@ public class NettyRemotingClient { } /** - * register processor + * register processor + * * @param commandType command type - * @param processor processor + * @param processor processor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { this.registerProcessor(commandType, processor, null); } /** - * register processor + * register processor * * @param commandType command type - * @param processor processor - * @param executor thread executor + * @param processor processor + * @param executor thread executor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { this.clientHandler.registerProcessor(commandType, processor, executor); } /** - * get channel + * get channel + * * @param host * @return */ public Channel getChannel(Host host) { Channel channel = channels.get(host); - if(channel != null && channel.isActive()){ + if (channel != null && channel.isActive()) { return channel; } return createChannel(host, true); @@ -330,17 +356,18 @@ public class NettyRemotingClient { /** * create channel - * @param host host + * + * @param host host * @param isSync sync flag * @return channel */ public Channel createChannel(Host host, boolean isSync) { ChannelFuture future; try { - synchronized (bootstrap){ + synchronized (bootstrap) { future = bootstrap.connect(new InetSocketAddress(host.getIp(), host.getPort())); } - if(isSync){ + if (isSync) { future.sync(); } if (future.isSuccess()) { @@ -358,16 +385,16 @@ public class NettyRemotingClient { * close */ public void close() { - if(isStarted.compareAndSet(true, false)){ + if (isStarted.compareAndSet(true, false)) { try { closeChannels(); - if(workerGroup != null){ + if (workerGroup != null) { this.workerGroup.shutdownGracefully(); } - if(callbackExecutor != null){ + if (callbackExecutor != null) { this.callbackExecutor.shutdownNow(); } - if(this.responseFutureExecutor != null){ + if (this.responseFutureExecutor != null) { this.responseFutureExecutor.shutdownNow(); } } catch (Exception ex) { @@ -378,9 +405,9 @@ public class NettyRemotingClient { } /** - * close channels + * close channels */ - private void closeChannels(){ + private void closeChannels() { for (Channel channel : this.channels.values()) { channel.close(); } @@ -389,11 +416,12 @@ public class NettyRemotingClient { /** * close channel + * * @param host host */ - public void closeChannel(Host host){ + public void closeChannel(Host host) { Channel channel = this.channels.remove(host); - if(channel != null){ + if (channel != null) { channel.close(); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java index 3eed82b1e108562151885ba7ad54b04577f451ea..e96f22aafcc1e56a66d515b7bde6cad60fda9c86 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java @@ -22,9 +22,12 @@ import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelPipeline; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.epoll.EpollEventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; + import org.apache.dolphinscheduler.remote.codec.NettyDecoder; import org.apache.dolphinscheduler.remote.codec.NettyEncoder; import org.apache.dolphinscheduler.remote.command.CommandType; @@ -32,6 +35,8 @@ import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.remote.handler.NettyServerHandler; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.Constants; +import org.apache.dolphinscheduler.remote.utils.NettyUtils; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,44 +47,44 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; /** - * remoting netty server + * remoting netty server */ public class NettyRemotingServer { private final Logger logger = LoggerFactory.getLogger(NettyRemotingServer.class); /** - * server bootstrap + * server bootstrap */ private final ServerBootstrap serverBootstrap = new ServerBootstrap(); /** - * encoder + * encoder */ private final NettyEncoder encoder = new NettyEncoder(); /** - * default executor + * default executor */ private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS); /** * boss group */ - private final NioEventLoopGroup bossGroup; + private final EventLoopGroup bossGroup; /** - * worker group + * worker group */ - private final NioEventLoopGroup workGroup; + private final EventLoopGroup workGroup; /** - * server config + * server config */ private final NettyServerConfig serverConfig; /** - * server handler + * server handler */ private final NettyServerHandler serverHandler = new NettyServerHandler(this); @@ -89,59 +94,78 @@ public class NettyRemotingServer { private final AtomicBoolean isStarted = new AtomicBoolean(false); /** - * server init + * server init * * @param serverConfig server config */ - public NettyRemotingServer(final NettyServerConfig serverConfig){ + public NettyRemotingServer(final NettyServerConfig serverConfig) { this.serverConfig = serverConfig; + if (NettyUtils.useEpoll()) { + this.bossGroup = new EpollEventLoopGroup(1, new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); - this.bossGroup = new NioEventLoopGroup(1, new ThreadFactory() { - private AtomicInteger threadIndex = new AtomicInteger(0); + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet())); + } + }); - @Override - public Thread newThread(Runnable r) { - return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet())); - } - }); + this.workGroup = new EpollEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); - this.workGroup = new NioEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() { - private AtomicInteger threadIndex = new AtomicInteger(0); + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet())); + } + }); + } else { + this.bossGroup = new NioEventLoopGroup(1, new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet())); + } + }); - @Override - public Thread newThread(Runnable r) { - return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet())); - } - }); + this.workGroup = new NioEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet())); + } + }); + } } /** - * server start + * server start */ - public void start(){ + public void start() { if (isStarted.compareAndSet(false, true)) { this.serverBootstrap - .group(this.bossGroup, this.workGroup) - .channel(NioServerSocketChannel.class) - .option(ChannelOption.SO_REUSEADDR, true) - .option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog()) - .childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive()) - .childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay()) - .childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize()) - .childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize()) - .childHandler(new ChannelInitializer() { - - @Override - protected void initChannel(NioSocketChannel ch) throws Exception { - initNettyChannel(ch); - } - }); + .group(this.bossGroup, this.workGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_REUSEADDR, true) + .option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog()) + .childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive()) + .childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay()) + .childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize()) + .childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize()) + .childHandler(new ChannelInitializer() { + + @Override + protected void initChannel(NioSocketChannel ch) throws Exception { + initNettyChannel(ch); + } + }); ChannelFuture future; try { future = serverBootstrap.bind(serverConfig.getListenPort()).sync(); } catch (Exception e) { - logger.error("NettyRemotingServer bind fail {}, exit",e.getMessage(), e); + logger.error("NettyRemotingServer bind fail {}, exit", e.getMessage(), e); throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort())); } if (future.isSuccess()) { @@ -155,11 +179,12 @@ public class NettyRemotingServer { } /** - * init netty channel + * init netty channel + * * @param ch socket channel * @throws Exception */ - private void initNettyChannel(NioSocketChannel ch) throws Exception{ + private void initNettyChannel(NioSocketChannel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); pipeline.addLast("encoder", encoder); pipeline.addLast("decoder", new NettyDecoder()); @@ -167,27 +192,29 @@ public class NettyRemotingServer { } /** - * register processor + * register processor + * * @param commandType command type - * @param processor processor + * @param processor processor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { this.registerProcessor(commandType, processor, null); } /** - * register processor + * register processor * * @param commandType command type - * @param processor processor - * @param executor thread executor + * @param processor processor + * @param executor thread executor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { this.serverHandler.registerProcessor(commandType, processor, executor); } /** - * get default thread executor + * get default thread executor + * * @return thread executor */ public ExecutorService getDefaultExecutor() { @@ -195,12 +222,12 @@ public class NettyRemotingServer { } public void close() { - if(isStarted.compareAndSet(true, false)){ + if (isStarted.compareAndSet(true, false)) { try { - if(bossGroup != null){ + if (bossGroup != null) { this.bossGroup.shutdownGracefully(); } - if(workGroup != null){ + if (workGroup != null) { this.workGroup.shutdownGracefully(); } defaultExecutor.shutdown(); diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java index 48736ca6940ce47422ae41636616c04958561c1f..297d4041a79f3aaa443d2a926045ac2e4d3bf8af 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java @@ -42,4 +42,14 @@ public class Constants { public static final String LOCAL_ADDRESS = IPUtils.getFirstNoLoopbackIP4Address(); + /** + * netty epoll enable switch + */ + public static final String NETTY_EPOLL_ENABLE = System.getProperty("netty.epoll.enable"); + + /** + * OS Name + */ + public static final String OS_NAME = System.getProperty("os.name"); + } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NettyUtils.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NettyUtils.java new file mode 100644 index 0000000000000000000000000000000000000000..3e242bd95d2c4ef40b55bbfece37864a0116f4cd --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NettyUtils.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.utils; + +import io.netty.channel.epoll.Epoll; + +/** + * NettyUtils + */ +public class NettyUtils { + + private NettyUtils() { + } + + public static boolean useEpoll() { + String osName = Constants.OS_NAME; + if (!osName.toLowerCase().contains("linux")) { + return false; + } + if (!Epoll.isAvailable()) { + return false; + } + String enableNettyEpoll = Constants.NETTY_EPOLL_ENABLE; + return Boolean.parseBoolean(enableNettyEpoll); + } + +} diff --git a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyUtilTest.java b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyUtilTest.java new file mode 100644 index 0000000000000000000000000000000000000000..f5f60dc73507b85dc23cf49bf4afbd7fb80ed329 --- /dev/null +++ b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyUtilTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote; + +import org.apache.dolphinscheduler.remote.utils.NettyUtils; + +import org.junit.Assert; +import org.junit.Test; + +/** + * NettyUtilTest + */ +public class NettyUtilTest { + + @Test + public void testUserEpoll() { + System.setProperty("netty.epoll.enable", "false"); + Assert.assertFalse(NettyUtils.useEpoll()); + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java index 040ea5a43fc6cb8ef7a63746d21ad1ffc00b1b76..01218e5d8b7df1b4afc87e8f186ab25f79269a7a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java @@ -14,8 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.master.registry; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.NetUtils; +import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.registry.HeartBeatTask; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.state.ConnectionState; +import org.apache.curator.framework.state.ConnectionStateListener; + import java.util.Date; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -23,15 +35,6 @@ import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.state.ConnectionState; -import org.apache.curator.framework.state.ConnectionStateListener; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.NetUtils; -import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; -import org.apache.dolphinscheduler.server.master.config.MasterConfig; -import org.apache.dolphinscheduler.server.registry.HeartBeatTask; -import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -40,7 +43,7 @@ import org.springframework.stereotype.Service; import com.google.common.collect.Sets; /** - * master registry + * master registry */ @Service public class MasterRegistry { @@ -48,7 +51,7 @@ public class MasterRegistry { private final Logger logger = LoggerFactory.getLogger(MasterRegistry.class); /** - * zookeeper registry center + * zookeeper registry center */ @Autowired private ZookeeperRegistryCenter zookeeperRegistryCenter; @@ -65,19 +68,18 @@ public class MasterRegistry { private ScheduledExecutorService heartBeatExecutor; /** - * worker start time + * master start time */ private String startTime; - @PostConstruct - public void init(){ + public void init() { this.startTime = DateUtils.dateToString(new Date()); this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor")); } /** - * registry + * registry */ public void registry() { String address = NetUtils.getHost(); @@ -86,12 +88,12 @@ public class MasterRegistry { zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() { @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { - if(newState == ConnectionState.LOST){ + if (newState == ConnectionState.LOST) { logger.error("master : {} connection lost from zookeeper", address); - } else if(newState == ConnectionState.RECONNECTED){ + } else if (newState == ConnectionState.RECONNECTED) { logger.info("master : {} reconnected to zookeeper", address); zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, ""); - } else if(newState == ConnectionState.SUSPENDED){ + } else if (newState == ConnectionState.SUSPENDED) { logger.warn("master : {} connection SUSPENDED ", address); } } @@ -103,36 +105,35 @@ public class MasterRegistry { Sets.newHashSet(getMasterPath()), zookeeperRegistryCenter); - this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, masterHeartbeatInterval, masterHeartbeatInterval, TimeUnit.SECONDS); - logger.info("master node : {} registry to ZK successfully with heartBeatInterval : {}s", address, masterHeartbeatInterval); + this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, 0, masterHeartbeatInterval, TimeUnit.SECONDS); + logger.info("master node : {} registry to ZK path {} successfully with heartBeatInterval : {}s" + , address, localNodePath, masterHeartbeatInterval); } /** - * remove registry info + * remove registry info */ public void unRegistry() { String address = getLocalAddress(); String localNodePath = getMasterPath(); heartBeatExecutor.shutdownNow(); zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath); - logger.info("master node : {} unRegistry to ZK.", address); + logger.info("master node : {} unRegistry from ZK path {}." + , address, localNodePath); } /** - * get master path - * @return + * get master path */ private String getMasterPath() { String address = getLocalAddress(); - String localNodePath = this.zookeeperRegistryCenter.getMasterPath() + "/" + address; - return localNodePath; + return this.zookeeperRegistryCenter.getMasterPath() + "/" + address; } /** - * get local address - * @return + * get local address */ - private String getLocalAddress(){ + private String getLocalAddress() { return NetUtils.getHost() + ":" + masterConfig.getListenPort(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java index 08c602251913ce95fa13c7582bec334faf6c25a8..58ade83d6847c65e3207a38e833271715ecefc82 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java @@ -21,18 +21,17 @@ import org.apache.dolphinscheduler.common.enums.AlertType; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ShowType; import org.apache.dolphinscheduler.common.enums.WarningType; -import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.DaoFactory; import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.entity.ProcessAlertContent; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import java.util.ArrayList; import java.util.Date; -import java.util.LinkedHashMap; import java.util.List; import org.slf4j.Logger; @@ -98,39 +97,40 @@ public class AlertManager { String res = ""; if (processInstance.getState().typeIsSuccess()) { - List successTaskList = new ArrayList<>(1); - LinkedHashMap successTaskMap = new LinkedHashMap(); - successTaskMap.put("id", String.valueOf(processInstance.getId())); - successTaskMap.put("name", processInstance.getName()); - successTaskMap.put("job type", getCommandCnName(processInstance.getCommandType())); - successTaskMap.put("state", processInstance.getState().toString()); - successTaskMap.put("recovery", processInstance.getRecovery().toString()); - successTaskMap.put("run time", String.valueOf(processInstance.getRunTimes())); - successTaskMap.put("start time", DateUtils.dateToString(processInstance.getStartTime())); - successTaskMap.put("end time", DateUtils.dateToString(processInstance.getEndTime())); - successTaskMap.put("host", processInstance.getHost()); - successTaskList.add(successTaskMap); + List successTaskList = new ArrayList<>(1); + ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() + .processId(processInstance.getId()) + .processName(processInstance.getName()) + .processType(processInstance.getCommandType()) + .processState(processInstance.getState()) + .recovery(processInstance.getRecovery()) + .runTimes(processInstance.getRunTimes()) + .processStartTime(processInstance.getStartTime()) + .processEndTime(processInstance.getEndTime()) + .processHost(processInstance.getHost()) + .build(); + successTaskList.add(processAlertContent); res = JSONUtils.toJsonString(successTaskList); } else if (processInstance.getState().typeIsFailure()) { - List failedTaskList = new ArrayList<>(); - + List failedTaskList = new ArrayList<>(); for (TaskInstance task : taskInstances) { if (task.getState().typeIsSuccess()) { continue; } - LinkedHashMap failedTaskMap = new LinkedHashMap(); - failedTaskMap.put("process instance id", String.valueOf(processInstance.getId())); - failedTaskMap.put("process instance name", processInstance.getName()); - failedTaskMap.put("task id", String.valueOf(task.getId())); - failedTaskMap.put("task name", task.getName()); - failedTaskMap.put("task type", task.getTaskType()); - failedTaskMap.put("task state", task.getState().toString()); - failedTaskMap.put("task start time", DateUtils.dateToString(task.getStartTime())); - failedTaskMap.put("task end time", DateUtils.dateToString(task.getEndTime())); - failedTaskMap.put("host", task.getHost()); - failedTaskMap.put("log path", task.getLogPath()); - failedTaskList.add(failedTaskMap); + ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() + .processId(processInstance.getId()) + .processName(processInstance.getName()) + .taskId(task.getId()) + .taskName(task.getName()) + .taskType(task.getTaskType()) + .taskState(task.getState()) + .taskStartTime(task.getStartTime()) + .taskEndTime(task.getEndTime()) + .taskHost(task.getHost()) + .logPath(task.getLogPath()) + .build(); + failedTaskList.add(processAlertContent); } res = JSONUtils.toJsonString(failedTaskList); } @@ -147,15 +147,16 @@ public class AlertManager { */ private String getWorkerToleranceContent(ProcessInstance processInstance, List toleranceTaskList) { - List> toleranceTaskInstanceList = new ArrayList<>(); + List toleranceTaskInstanceList = new ArrayList<>(); for (TaskInstance taskInstance : toleranceTaskList) { - LinkedHashMap toleranceWorkerContentMap = new LinkedHashMap(); - toleranceWorkerContentMap.put("process name", processInstance.getName()); - toleranceWorkerContentMap.put("task name", taskInstance.getName()); - toleranceWorkerContentMap.put("host", taskInstance.getHost()); - toleranceWorkerContentMap.put("task retry times", String.valueOf(taskInstance.getRetryTimes())); - toleranceTaskInstanceList.add(toleranceWorkerContentMap); + ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() + .processName(processInstance.getName()) + .taskName(taskInstance.getName()) + .taskHost(taskInstance.getHost()) + .retryTimes(taskInstance.getRetryTimes()) + .build(); + toleranceTaskInstanceList.add(processAlertContent); } return JSONUtils.toJsonString(toleranceTaskInstanceList); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java index 310ab62bf1d763e1b6bc0da648d7006dcbce84fe..cf49285b9f1e26302be0f3b3df068ad6fcc8afe2 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java @@ -14,53 +14,52 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.utils; -import java.nio.charset.StandardCharsets; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.commons.io.FileUtils; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.service.log.LogClientService; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; -import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; - /** - * mainly used to get the start command line of a process + * mainly used to get the start command line of a process. */ public class ProcessUtils { /** - * logger + * logger. */ - private final static Logger logger = LoggerFactory.getLogger(ProcessUtils.class); + private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); /** * Initialization regularization, solve the problem of pre-compilation performance, - * avoid the thread safety problem of multi-thread operation + * avoid the thread safety problem of multi-thread operation. */ private static final Pattern MACPATTERN = Pattern.compile("-[+|-]-\\s(\\d+)"); private static final Pattern WINDOWSATTERN = Pattern.compile("(\\d+)"); /** - * build command line characters + * build command line characters. * @param commandList command list * @return command - * @throws IOException io exception */ - public static String buildCommandStr(List commandList) throws IOException { + public static String buildCommandStr(List commandList) { String cmdstr; String[] cmd = commandList.toArray(new String[commandList.size()]); SecurityManager security = System.getSecurityManager(); @@ -102,7 +101,6 @@ public class ProcessUtils { } } - cmdstr = createCommandLine( isShellFile(executablePath) ? VERIFICATION_CMD_BAT : VERIFICATION_WIN32, quoteString(executablePath), cmd); @@ -111,13 +109,12 @@ public class ProcessUtils { } /** - * get executable path + * get executable path. * * @param path path * @return executable path - * @throws IOException io exception */ - private static String getExecutablePath(String path) throws IOException { + private static String getExecutablePath(String path) { boolean pathIsQuoted = isQuoted(true, path, "Executable name has embedded quote, split the arguments"); File fileToRun = new File(pathIsQuoted ? path.substring(1, path.length() - 1) : path); @@ -125,7 +122,7 @@ public class ProcessUtils { } /** - * whether is shell file + * whether is shell file. * * @param executablePath executable path * @return true if endsWith .CMD or .BAT @@ -136,7 +133,7 @@ public class ProcessUtils { } /** - * quote string + * quote string. * * @param arg argument * @return format arg @@ -147,7 +144,7 @@ public class ProcessUtils { } /** - * get tokens from command + * get tokens from command. * * @param command command * @return token string array @@ -162,7 +159,7 @@ public class ProcessUtils { } /** - * Lazy Pattern + * Lazy Pattern. */ private static class LazyPattern { // Escape-support version: @@ -171,34 +168,29 @@ public class ProcessUtils { } /** - * verification cmd bat + * verification cmd bat. */ private static final int VERIFICATION_CMD_BAT = 0; /** - * verification win32 + * verification win32. */ private static final int VERIFICATION_WIN32 = 1; /** - * verification legacy + * verification legacy. */ private static final int VERIFICATION_LEGACY = 2; /** - * escape verification + * escape verification. */ private static final char[][] ESCAPE_VERIFICATION = {{' ', '\t', '<', '>', '&', '|', '^'}, - {' ', '\t', '<', '>'}, {' ', '\t'}}; - - /** - * matcher - */ - private static Matcher matcher; + {' ', '\t', '<', '>'}, {' ', '\t'}}; /** - * create command line + * create command line. * @param verificationType verification type * @param executablePath executable path * @param cmd cmd @@ -227,7 +219,7 @@ public class ProcessUtils { } /** - * whether is quoted + * whether is quoted. * @param noQuotesInside * @param arg * @param errorMessage @@ -255,7 +247,7 @@ public class ProcessUtils { } /** - * whether needs escaping + * whether needs escaping. * * @param verificationType verification type * @param arg arg @@ -277,16 +269,14 @@ public class ProcessUtils { } /** - * kill yarn application + * kill yarn application. * * @param appIds app id list * @param logger logger * @param tenantCode tenant code * @param executePath execute path - * @throws IOException io exception */ - public static void cancelApplication(List appIds, Logger logger, String tenantCode,String executePath) - throws IOException { + public static void cancelApplication(List appIds, Logger logger, String tenantCode, String executePath) { if (appIds.size() > 0) { String appid = appIds.get(appIds.size() - 1); String commandFile = String @@ -324,17 +314,17 @@ public class ProcessUtils { } /** - * kill tasks according to different task types + * kill tasks according to different task types. * * @param taskExecutionContext taskExecutionContext */ public static void kill(TaskExecutionContext taskExecutionContext) { try { int processId = taskExecutionContext.getProcessId(); - if(processId == 0 ){ + if (processId == 0) { logger.error("process kill failed, process id :{}, task id:{}", processId, taskExecutionContext.getTaskInstanceId()); - return ; + return; } String cmd = String.format("sudo kill -9 %s", getPidsStr(processId)); @@ -352,13 +342,13 @@ public class ProcessUtils { } /** - * get pids str + * get pids str. * * @param processId process id * @return pids * @throws Exception exception */ - public static String getPidsStr(int processId)throws Exception{ + public static String getPidsStr(int processId) throws Exception { StringBuilder sb = new StringBuilder(); Matcher mat; // pstree pid get sub pids @@ -370,14 +360,14 @@ public class ProcessUtils { mat = WINDOWSATTERN.matcher(pids); } - while (mat.find()){ + while (mat.find()) { sb.append(mat.group(1)).append(" "); } return sb.toString().trim(); } /** - * find logs and kill yarn tasks + * find logs and kill yarn tasks. * * @param taskExecutionContext taskExecutionContext */ @@ -392,7 +382,7 @@ public class ProcessUtils { Constants.RPC_PORT, taskExecutionContext.getLogPath()); } finally { - if(logClient != null){ + if (logClient != null) { logClient.close(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java index a6f0f1a29ee28903af95fdfc0633ca8524ff633f..3dedeced06c201228cc14ba75ef2f9c8e4df5cc5 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java @@ -312,14 +312,8 @@ public abstract class AbstractCommandExecutor { * @param commands process builder */ private void printCommand(List commands) { - String cmdStr; - - try { - cmdStr = ProcessUtils.buildCommandStr(commands); - logger.info("task run command:\n{}", cmdStr); - } catch (IOException e) { - logger.error(e.getMessage(), e); - } + String cmdStr = ProcessUtils.buildCommandStr(commands); + logger.info("task run command:\n{}", cmdStr); } /** diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java index ea822cbfb11ae609febefd79cf647e4f6a1e7bb6..7763e07314d8a4e59905d3a108c8bf7dfba6c3e0 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.server.master.registry; +import static org.apache.dolphinscheduler.common.Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH; + import org.apache.dolphinscheduler.remote.utils.Constants; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; @@ -24,6 +26,10 @@ import org.apache.dolphinscheduler.server.zk.SpringZKServer; import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient; import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; + +import java.util.List; +import java.util.concurrent.TimeUnit; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -31,10 +37,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringRunner; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import static org.apache.dolphinscheduler.common.Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH; /** * master registry test */ @@ -65,6 +67,7 @@ public class MasterRegistryTest { @Test public void testUnRegistry() throws InterruptedException { + masterRegistry.init(); masterRegistry.registry(); TimeUnit.SECONDS.sleep(masterConfig.getMasterHeartbeatInterval() + 2); //wait heartbeat info write into zk node masterRegistry.unRegistry(); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java index 1e0adaad9b958b72c24939b53a4ece2ed3c8c43d..ace5cd847113234cfb7bf4a50fe45524e55161e9 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java @@ -40,11 +40,8 @@ public class ProcessUtilsTest { public void testBuildCommandStr() { List commands = new ArrayList<>(); commands.add("sudo"); - try { - Assert.assertEquals(ProcessUtils.buildCommandStr(commands), "sudo"); - } catch (IOException e) { - Assert.fail(e.getMessage()); - } + Assert.assertEquals(ProcessUtils.buildCommandStr(commands), "sudo"); + } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue index e8e709f1d9bb3be747592595197ad052bc5feee2..466bfd4fefa31ef538b4fc602383f82ea485615a 100755 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue @@ -103,7 +103,7 @@ {{spinnerLoading ? 'Loading...' : $t('Save')}} + + {{spinnerLoading ? 'Loading...' : $t('Version Info')}} +
@@ -147,6 +158,7 @@ import { findComponentDownward } from '@/module/util/' import disabledState from '@/module/mixin/disabledState' import { mapActions, mapState, mapMutations } from 'vuex' + import mVersions from '../../projects/pages/definition/pages/list/_source/versions' let eventModel @@ -176,7 +188,7 @@ releaseState: String }, methods: { - ...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState']), + ...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState', 'switchProcessDefinitionVersion', 'getProcessDefinitionVersionsPage', 'deleteProcessDefinitionVersion']), ...mapMutations('dag', ['addTasks', 'cacheTasks', 'resetParams', 'setIsEditDag', 'setName', 'addConnects']), // DAG automatic layout @@ -196,6 +208,7 @@ ], Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style + HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3}, ConnectionOverlays: [ [ 'Arrow', @@ -369,6 +382,12 @@ this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => { this.$message.success(res.msg) this.spinnerLoading = false + // Jump process definition + if (this.type === 'instance') { + this.$router.push({ path: `/projects/instance/list/${this.urlParam.id}?_t=${new Date().getTime()}` }) + } else { + this.$router.push({ path: `/projects/definition/list/${this.urlParam.id}?_t=${new Date().getTime()}` }) + } resolve() }).catch(e => { this.$message.error(e.msg || '') @@ -656,6 +675,135 @@ if(eventModel && this.taskId == $id){ eventModel.remove() } + }, + + /** + * query the process definition pagination version + */ + _version (item) { + let self = this + this.getProcessDefinitionVersionsPage({ + pageNo: 1, + pageSize: 10, + processDefinitionId: this.urlParam.id + }).then(res => { + let processDefinitionVersions = res.data.lists + let total = res.data.totalCount + let pageSize = res.data.pageSize + let pageNo = res.data.currentPage + if (this.versionsModel) { + this.versionsModel.remove() + } + this.versionsModel = this.$drawer({ + direction: 'right', + closable: true, + showMask: true, + escClose: true, + render (h) { + return h(mVersions, { + on: { + /** + * switch version in process definition version list + * + * @param version the version user want to change + * @param processDefinitionId the process definition id + * @param fromThis fromThis + */ + mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { + + self.$store.state.dag.isSwitchVersion = true + + self.switchProcessDefinitionVersion({ + version: version, + processDefinitionId: processDefinitionId + }).then(res => { + self.$message.success($t('Switch Version Successfully')) + setTimeout(() => { + fromThis.$destroy() + self.versionsModel.remove() + }, 0) + self.$router.push({ path: `/projects/definition/list/${processDefinitionId}?_t=${new Date().getTime()}` }) + }).catch(e => { + self.$store.state.dag.isSwitchVersion = false + self.$message.error(e.msg || '') + }) + }, + + /** + * Paging event of process definition versions + * + * @param pageNo page number + * @param pageSize page size + * @param processDefinitionId the process definition id of page version + * @param fromThis fromThis + */ + mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) { + self.getProcessDefinitionVersionsPage({ + pageNo: pageNo, + pageSize: pageSize, + processDefinitionId: processDefinitionId + }).then(res => { + fromThis.processDefinitionVersions = res.data.lists + fromThis.total = res.data.totalCount + fromThis.pageSize = res.data.pageSize + fromThis.pageNo = res.data.currentPage + }).catch(e => { + self.$message.error(e.msg || '') + }) + }, + + /** + * delete one version of process definition + * + * @param version the version need to delete + * @param processDefinitionId the process definition id user want to delete + * @param fromThis fromThis + */ + mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { + self.deleteProcessDefinitionVersion({ + version: version, + processDefinitionId: processDefinitionId + }).then(res => { + self.$message.success(res.msg || '') + fromThis.$emit('mVersionGetProcessDefinitionVersionsPage', { + pageNo: 1, + pageSize: 10, + processDefinitionId: processDefinitionId, + fromThis: fromThis + }) + }).catch(e => { + self.$message.error(e.msg || '') + }) + }, + + /** + * remove this drawer + * + * @param fromThis + */ + close ({ fromThis }) { + setTimeout(() => { + fromThis.$destroy() + self.versionsModel.remove() + }, 0) + } + }, + props: { + processDefinition: { + id: self.urlParam.id, + version: self.$store.state.dag.version + }, + processDefinitionVersions: processDefinitionVersions, + total: total, + pageNo: pageNo, + pageSize: pageSize + } + }) + } + }) + }).catch(e => { + this.$message.error(e.msg || '') + }) } }, watch: { @@ -684,6 +832,7 @@ ], Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style + HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3}, ConnectionOverlays: [ [ 'Arrow', diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js index 360a0fb3562302e9a50807411b997407c078a8b3..c1f77f6876cd1b641b106d0b5e415f74cb6cd45d 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js @@ -72,7 +72,7 @@ Affirm.paramVerification = (name) => { } } else { // View history direct jump - flag = name === 'projects-instance-details' ? true : !dagStore.isEditDag + flag = name === 'projects-instance-details' ? true : (dagStore.isSwitchVersion || !dagStore.isEditDag) } return flag } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue index b881a80875f8e4e412187f14e9de91f96c549b5a..017a38f31d5f44e375af8707a3f1a5feed059f82 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue @@ -99,6 +99,7 @@ name: 'udp', data () { return { + originalName: '', // dag name name: '', // dag description @@ -169,12 +170,15 @@ this.$emit('onUdp') } - // verify that the name exists - this.store.dispatch('dag/verifDAGName', this.name).then(res => { + if (this.originalName !== this.name) { + this.store.dispatch('dag/verifDAGName', this.name).then(res => { + _verif() + }).catch(e => { + this.$message.error(e.msg || '') + }) + } else { _verif() - }).catch(e => { - this.$message.error(e.msg || '') - }) + } }, /** * Close the popup @@ -196,6 +200,7 @@ this.udpList = dag.globalParams this.udpListCache = dag.globalParams this.name = dag.name + this.originalName = dag.name this.description = dag.description this.syncDefine = dag.syncDefine this.timeout = dag.timeout || 0 diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue index 5b5032cf23a309c191c9408bd59813bce8beac5b..8182dac639ec9d7dc3890f7c4c15d08c5b8aa247 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue @@ -95,6 +95,7 @@ this.getMasterData().then(res => { this.masterList = _.map(res, (v, i) => { return _.assign(v, { + id: v.host + "_" + v.id, resInfo: JSON.parse(v.resInfo) }) }) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue index aa2deca77ea34cf966f84aa0560f0bc0514a608e..609987e871823937f002f676a619994010fff545 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue @@ -115,6 +115,7 @@ this.getWorkerData().then(res => { this.workerList = _.map(res, (v, i) => { return _.assign(v, { + id: v.host + "_" + v.id, resInfo: JSON.parse(v.resInfo) }) }) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue index 577bb4f805059497dcb834a78a5a07b563e0ef32..5de30e90b4712f691d15a5981f18378bacfd977b 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue @@ -117,6 +117,7 @@ + @@ -148,6 +149,7 @@ import mTiming from './timing' import { mapActions } from 'vuex' import { publishStatus } from '@/conf/home/pages/dag/_source/config' + import mVersions from './versions' export default { name: 'definition-list', @@ -164,7 +166,7 @@ pageSize: Number }, methods: { - ...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition','exportDefinition','copyProcess']), + ...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition', 'exportDefinition', 'getProcessDefinitionVersionsPage', 'copyProcess', 'switchProcessDefinitionVersion', 'deleteProcessDefinitionVersion']), ...mapActions('security', ['getWorkerGroupsAll']), _rtPublishStatus (code) { return _.filter(publishStatus, v => v.code === code)[0].desc @@ -334,6 +336,125 @@ }) }, + _version (item) { + let self = this + this.getProcessDefinitionVersionsPage({ + pageNo: 1, + pageSize: 10, + processDefinitionId: item.id + }).then(res => { + let processDefinitionVersions = res.data.lists + let total = res.data.totalCount + let pageSize = res.data.pageSize + let pageNo = res.data.currentPage + if (this.versionsModel) { + this.versionsModel.remove() + } + this.versionsModel = this.$drawer({ + direction: 'right', + closable: true, + showMask: true, + escClose: true, + render (h) { + return h(mVersions, { + on: { + /** + * switch version in process definition version list + * + * @param version the version user want to change + * @param processDefinitionId the process definition id + * @param fromThis fromThis + */ + mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { + self.switchProcessDefinitionVersion({ + version: version, + processDefinitionId: processDefinitionId + }).then(res => { + self.$message.success($t('Switch Version Successfully')) + setTimeout(() => { + fromThis.$destroy() + self.versionsModel.remove() + }, 0) + self.$router.push({ path: `/projects/definition/list/${processDefinitionId}` }) + }).catch(e => { + self.$message.error(e.msg || '') + }) + }, + + /** + * Paging event of process definition versions + * + * @param pageNo page number + * @param pageSize page size + * @param processDefinitionId the process definition id of page version + * @param fromThis fromThis + */ + mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) { + self.getProcessDefinitionVersionsPage({ + pageNo: pageNo, + pageSize: pageSize, + processDefinitionId: processDefinitionId + }).then(res => { + fromThis.processDefinitionVersions = res.data.lists + fromThis.total = res.data.totalCount + fromThis.pageSize = res.data.pageSize + fromThis.pageNo = res.data.currentPage + }).catch(e => { + self.$message.error(e.msg || '') + }) + }, + + /** + * delete one version of process definition + * + * @param version the version need to delete + * @param processDefinitionId the process definition id user want to delete + * @param fromThis fromThis + */ + mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) { + self.deleteProcessDefinitionVersion({ + version: version, + processDefinitionId: processDefinitionId + }).then(res => { + self.$message.success(res.msg || '') + fromThis.$emit('mVersionGetProcessDefinitionVersionsPage', { + pageNo: 1, + pageSize: 10, + processDefinitionId: processDefinitionId, + fromThis: fromThis + }) + }).catch(e => { + self.$message.error(e.msg || '') + }) + }, + + /** + * remove this drawer + * + * @param fromThis + */ + close ({ fromThis }) { + setTimeout(() => { + fromThis.$destroy() + self.versionsModel.remove() + }, 0) + } + }, + props: { + processDefinition: item, + processDefinitionVersions: processDefinitionVersions, + total: total, + pageNo: pageNo, + pageSize: pageSize + } + }) + } + }) + }).catch(e => { + this.$message.error(e.msg || '') + }) + }, + _batchExport () { this.exportDefinition({ processDefinitionIds: this.strSelectIds, @@ -423,6 +544,6 @@ }, mounted () { }, - components: { } + components: { mVersions } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/versions.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/versions.vue new file mode 100644 index 0000000000000000000000000000000000000000..891f0f62a7f6db1d4e9b05071b02c31c50eafd8c --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/versions.vue @@ -0,0 +1,255 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one or more +* contributor license agreements. See the NOTICE file distributed with +* this work for additional information regarding copyright ownership. +* The ASF licenses this file to You under the Apache License, Version 2.0 +* (the "License"); you may not use this file except in compliance with +* the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + + + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js index a549aafaa202b33a5d4c3cc978c8975975d0374e..be516042b0d47b157ea3df844f900a4baf8eb332 100644 --- a/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js +++ b/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js @@ -79,6 +79,46 @@ export default { }) }) }, + + /** + * get process definition versions pagination info + */ + getProcessDefinitionVersionsPage ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get(`projects/${state.projectName}/process/versions`, payload, res => { + resolve(res) + }).catch(e => { + reject(e) + }) + }) + }, + + /** + * switch process definition version + */ + switchProcessDefinitionVersion ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get(`projects/${state.projectName}/process/version/switch`, payload, res => { + resolve(res) + }).catch(e => { + reject(e) + }) + }) + }, + + /** + * delete process definition version + */ + deleteProcessDefinitionVersion ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get(`projects/${state.projectName}/process/version/delete`, payload, res => { + resolve(res) + }).catch(e => { + reject(e) + }) + }) + }, + /** * Update process instance status */ @@ -126,6 +166,8 @@ export default { state.connects = JSON.parse(res.data.connects) // locations state.locations = JSON.parse(res.data.locations) + // version + state.version = res.data.version // Process definition const processDefinitionJson = JSON.parse(res.data.processDefinitionJson) // tasks info diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js index e5e96131f51699704c31e3b354e225afdea0370d..02f90bbf170d59c865f9ee7bf981947f542560bb 100755 --- a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js @@ -135,6 +135,7 @@ export default { 'Child Node': 'Child Node', 'Please select a sub-Process': 'Please select a sub-Process', Edit: 'Edit', + 'Switch To This Version': 'Switch To This Version', 'Datasource Name': 'Datasource Name', 'Please enter datasource name': 'Please enter datasource name', IP: 'IP', @@ -159,8 +160,11 @@ export default { 'Create Time': 'Create Time', 'Update Time': 'Update Time', Operation: 'Operation', + 'Current Version': 'Current Version', 'Click to view': 'Click to view', 'Delete?': 'Delete?', + 'Switch Version Successfully': 'Switch Version Successfully', + 'Confirm Switch To This Version?': 'Confirm Switch To This Version?', Confirm: 'Confirm', 'Task status statistics': 'Task Status Statistics', Number: 'Number', @@ -288,6 +292,7 @@ export default { Rename: 'Rename', Download: 'Download', Export: 'Export', + 'Version Info': 'Version Info', Submit: 'Submit', 'Edit UDF Function': 'Edit UDF Function', type: 'type', diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js index 26326d3fe2ea4a1c432985dda8faa24141749aa1..88ca2d1cc9cc72e6762b27be6e11957bb69a803d 100755 --- a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js @@ -138,6 +138,7 @@ export default { 'Child Node': '子节点', 'Please select a sub-Process': '请选择子工作流', Edit: '编辑', + 'Switch To This Version': '切换到该版本', 'Datasource Name': '数据源名称', 'Please enter datasource name': '请输入数据源名称', IP: 'IP主机名', @@ -162,8 +163,11 @@ export default { 'Create Time': '创建时间', 'Update Time': '更新时间', Operation: '操作', + 'Current Version': '当前版本', 'Click to view': '点击查看', 'Delete?': '确定删除吗?', + 'Switch Version Successfully': '切换版本成功', + 'Confirm Switch To This Version?': '确定切换到该版本吗?', Confirm: '确定', 'Task status statistics': '任务状态统计', Number: '数量', @@ -289,6 +293,7 @@ export default { Rename: '重命名', Download: '下载', Export: '导出', + 'Version Info': '版本信息', Submit: '提交', 'Edit UDF Function': '编辑UDF函数', type: '类型', diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/project/ProjectData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/ProjectData.java index fce247aa4d1ba322b3fedd24e4f8a7511cb4a35f..036a6771bd2a9d845478cbe5dc033c3f8e77cd02 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/data/project/ProjectData.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/ProjectData.java @@ -17,6 +17,9 @@ package org.apache.dolphinscheduler.data.project; public class ProjectData { + + public static final String CREATE_PROJECT_BUTTON = "创建项目"; + // create project name public static final String PROJECT_NAME = "selenium_project_1"; // create project description diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/project/WorkflowDefineData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/WorkflowDefineData.java index 0b3645f3292181106bfa2f4394b23c173babcb32..c9eb32e9483f18d3295f9ec117088eafe72f7e26 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/data/project/WorkflowDefineData.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/WorkflowDefineData.java @@ -20,6 +20,9 @@ public class WorkflowDefineData { /** * create workflow data */ + + public static final String workflow_define = "工作流定义"; + //input shell task name public static final String SHELL_TASK_NAME = "shell_task_selenium_1"; @@ -74,4 +77,13 @@ public class WorkflowDefineData { //input to add workflow global parameters value public static final String INPUT_ADD_WORKFLOW_GLOBAL_PARAMETERS_VALUES = "selenium_global_parameters_value_2"; + + /** + * workflowDefine list + */ + public static final String WORKFLOW_ONLINE_STATE = "上线"; + + public static final String WORKFLOW_OFFLINE_STATE = "下线"; + + } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/AlertManageData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/AlertManageData.java index aa470169fcab9e140da894f69aaa067c86f19a09..ae996a209ddc53869d9985203851cfe13993a4a3 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/AlertManageData.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/AlertManageData.java @@ -17,18 +17,14 @@ package org.apache.dolphinscheduler.data.security; public class AlertManageData { - /** - * Alert Name - */ + //Alert Name public static final String ALERT_NAME = "selenium_alert_Name"; - /** - * Alert Type - */ + public static final String CREATE_ALERT = "创建告警组"; + + // Alert Type public static final String ALERT_TYPE = "邮件"; - /** - * Alert Description - */ + //Alert Description public static final String DESCRIPTION = "create alert test"; public static final String ALERT_MANAGE = "告警组管理 - DolphinScheduler"; diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/QueueManageData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/QueueManageData.java index 9fa9568a35e120e16f7a80e089cd7adb60318178..46f0d834d1d46a21f28205cca610951a2a9121d4 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/QueueManageData.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/QueueManageData.java @@ -22,6 +22,8 @@ public class QueueManageData { */ public static final String QUEUE_NAME = "selenium_queue_name"; + public static final String CREATE_QUEUE = "创建队列"; + /** * Create Queue Value */ @@ -30,12 +32,12 @@ public class QueueManageData { /** * Edit Queue Name */ - public static final String EDIT_QUEUE_NAME = "_edit"; + public static final String EDIT_QUEUE_NAME = "selenium_queue_value_edit"; /** * Edit Queue Value */ - public static final String EDIT_QUEUE_VALUE = "_edit"; + public static final String EDIT_QUEUE_VALUE = "selenium_queue_value_edit"; public static final String QUEUE_MANAGE = "队列管理 - DolphinScheduler"; diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/TokenManageData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/TokenManageData.java index 6b6370f5507255a8d5269a04f2ee5e88c2ee6926..9dd625ec2593230bf99602e62b9496af2b7da72a 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/TokenManageData.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/TokenManageData.java @@ -18,7 +18,11 @@ package org.apache.dolphinscheduler.data.security; public class TokenManageData { public static final String TOKEN_MANAGE = "令牌管理 - DolphinScheduler"; + public static final String CREATE_TOKEN = "创建令牌"; + public static final String DATE = "2038-06-10 00:00:00"; + public static final String NAME = "admin"; + } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/UserManageData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/UserManageData.java index 10dcd45af7dbd54b7a3377ad2a91b570d6219585..ee7fc60bc4f7fb60d236e85ebc2048d5baf568d0 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/UserManageData.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/UserManageData.java @@ -31,6 +31,9 @@ public class UserManageData { public static final String USER_MANAGE = "用户管理 - DolphinScheduler"; + public static final String CREATE_USER_BUTTON = "创建用户"; + + /** * edit user */ diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProcessInstanceLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProcessInstanceLocator.java index 445ea3ee78adf788482690a751a67cb5e2af2d99..c881c276b6cba35352fd1704bfba64a7745a5662 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProcessInstanceLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProcessInstanceLocator.java @@ -20,6 +20,9 @@ import org.openqa.selenium.By; public class ProcessInstanceLocator { // jump Process Instance page + // process instance state is success + public static final By PROCESS_INSTANCE_SUCCESS_STATE = By.xpath("//table/tr[2]/td[4]/span/em[@title['data-original-title']='成功']"); + //click Process Instance name public static final By CLICK_PROCESS_INSTANCE_NAME = By.xpath("//div[4]/div/ul/li[2]"); diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProjectLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProjectLocator.java index 8b26e673a66e4bc6b799f913d826c56f82cca73b..94af3ac54609126660c08778b1625144b5ed3c71 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProjectLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/ProjectLocator.java @@ -34,6 +34,9 @@ public class ProjectLocator { //submit button public static final By SUBMIT_BUTTON = By.xpath("//div[3]/button[2]/span"); + // project name + public static final By LIST_PROJECT_NAME = By.xpath("//table/tr[2]/td[2]/span/a"); + //delete project button public static final By DELETE_PROJECT_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[9]/span/button"); diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/WorkflowDefineLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/WorkflowDefineLocator.java index a70b22eacf7a8d6e3037f375ef47d64d98aa841a..01fdb39e4aaf2ac816bb691c40e9846580eb943f 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/WorkflowDefineLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/WorkflowDefineLocator.java @@ -49,10 +49,10 @@ public class WorkflowDefineLocator { public static final By INPUT_SHELL_TASK_NAME = By.xpath("//input"); //click stop run type - public static final By CLICK_STOP_RUN_TYPE = By.xpath("//label[2]/span/input"); + public static final By CLICK_STOP_RUN_TYPE = By.xpath("//label[2]/span[1]/input"); //click normal run type - public static final By CLICK_NORMAL_RUN_TYPE = By.xpath("//span/input"); + public static final By CLICK_NORMAL_RUN_TYPE = By.xpath("//label[1]/span[1]/input"); //input shell task description public static final By INPUT_SHELL_TASK_DESCRIPTION = By.xpath("//label/div/textarea"); @@ -182,23 +182,24 @@ public class WorkflowDefineLocator { /** * online workflow */ + public static final By WORKFLOW_STATE = By.xpath("//table/tr[2]/td[4]/span"); + + // click online workflow button - public static final By CLICK_ONLINE_WORKFLOW_BUTTON = By.xpath("//div[1]/div/table/tr[2]/td[10]/button[4]"); + public static final By CLICK_ONLINE_WORKFLOW_BUTTON = By.xpath("//button[@title['data-original-title']='上线']"); /** * offline workflow */ // click offline workflow button - public static final By CLICK_OFFLINE_WORKFLOW_BUTTON = By.xpath("//div[1]/div/table/tr[2]/td[10]/button[4]"); - + public static final By CLICK_OFFLINE_WORKFLOW_BUTTON = By.xpath("//button[@title['data-original-title']='下线']"); /** * delete workflow */ //click delete workflow button - public static final By DELETE_WORKFLOW_BOTTOM = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[10]/span/button"); + public static final By DELETE_WORKFLOW_BOTTOM = By.xpath("//table/tr[2]/td[10]/span/button"); //click confirm delete workflow button public static final By CONFIRM_DELETE_WORKFLOW_BOTTOM = By.xpath("//div[2]/div/button[2]/span"); - } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/AlertManageLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/AlertManageLocator.java index e4dbe731df2e5438dc8dab9292fb15743455f083..82ac473e5b8bab820adc974281aaa5293e4d164c 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/AlertManageLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/AlertManageLocator.java @@ -28,8 +28,9 @@ public class AlertManageLocator { public static final By INPUT_ALERT_DESCRIPTION = By.xpath("//textarea"); public static final By SUBMIT_ALERT = By.xpath("//div[3]/button[2]/span"); + public static final By ALERT_NAME = By.xpath("//table/tr[2]/td[2]/span"); + //delete alert locator public static final By DELETE_ALERT_BUTTON = By.xpath("//span/button"); public static final By CONFIRM_DELETE_ALERT_BUTTON = By.xpath("//div[2]/div/button[2]/span"); } - diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/QueueManageLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/QueueManageLocator.java index ee7d3992b5e265c9583761c597af930940d8f2c8..dd42a91db0d490d044d979e1a3820d9a6f5a223a 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/QueueManageLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/QueueManageLocator.java @@ -26,6 +26,8 @@ public class QueueManageLocator { public static final By INPUT_QUEUE_VALUE = By.xpath("//div[2]/div[2]/div/input"); public static final By SUBMIT_QUEUE = By.xpath("//button[2]/span"); + public static final By LIST_QUEUE_NAME = By.xpath("//table/tr[2]/td[2]/span"); + //edit queue locator public static final By CLICK_EDIT_QUEUE = By.xpath("//td[6]/button/i"); } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TenantManageLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TenantManageLocator.java index 7db883615cc1fa35fd3dd7187152701236ed0213..2f7a5f9ca31117e717fd74ceeadf2031253edf52 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TenantManageLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TenantManageLocator.java @@ -38,4 +38,6 @@ public class TenantManageLocator{ public static final By DELETE_TENANT_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[8]/span/button"); public static final By CONFIRM_DELETE_TENANT_BUTTON = By.xpath("//div[2]/div/button[2]/span"); + + public static final By TENANT_CODE_FIRST = By.xpath("//table/tr[2]/td[2]/span"); } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TokenManageLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TokenManageLocator.java index 0626ed8031f47e1f659627baac9c70288a1b92a2..d935dd54a077aba056a8880772bae2789b12a56d 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TokenManageLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TokenManageLocator.java @@ -30,7 +30,11 @@ public class TokenManageLocator { public static final By CLICK_SUBMIT_BUTTON = By.xpath("//div[3]/button[2]/span"); + public static final By EDIT_TOKEN_BUTTON = By.xpath("//table/tr[2]/td[7]/button"); + //edit token + public static final By TOKEN = By.xpath("//table/tr[2]/td[1]/span"); + public static final By CLICK_EDIT_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[7]/button/i"); //delete token diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/UserManageLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/UserManageLocator.java index ef2ec9095c25a8f338a2116636619a912d7bb68c..ecfd285d6019b79d3e503abd34253b4aa9ce49c2 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/UserManageLocator.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/UserManageLocator.java @@ -52,13 +52,15 @@ public class UserManageLocator { /** * edit user */ + public static final By USER_NAME = By.xpath("//table/tr[2]/td[2]/span"); + public static final By EDIT_GENERAL_USER_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[11]/button"); public static final By EDIT_ADMIN_USER_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[3]/td[11]/button"); /** * delete user */ - public static final By DELETE_USER_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[11]/span[2]/button"); + public static final By DELETE_USER_BUTTON = By.xpath("//table/tr[3]/td[11]/span[2]/button"); - public static final By CONFIRM_DELETE_USER_BUTTON = By.xpath("//div[2]/div/button[2]/span"); + public static final By CONFIRM_DELETE_USER_BUTTON = By.xpath("//tr[3]/td[11]/span[2]/div/div[2]/div/button[2]/span"); } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProcessInstancePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProcessInstancePage.java index 4d1a1c253007e0891207bdd7b3e1c35e05e9f1e2..8c251afc84914910201aa2f1f1b951547135e442 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProcessInstancePage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProcessInstancePage.java @@ -34,6 +34,7 @@ public class ProcessInstancePage extends PageCommon { public boolean rerunWorkflowPage() throws InterruptedException { Thread.sleep(2000); clickTopElement(ProcessInstanceLocator.CLICK_PROCESS_INSTANCE_NAME); + locateElement(ProcessInstanceLocator.PROCESS_INSTANCE_SUCCESS_STATE); clickTopElement(ProcessInstanceLocator.CLICK_RERUN_BUTTON); return ifTitleContains(ProcessInstanceData.PROCESS_INSTANCE_TITLE); } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProjectPage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProjectPage.java index 5c591d0b5d0154d822d89ed13d93cde0616f97ed..6a93094340ebdcafc304a0e00d418e698cbf076c 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProjectPage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/ProjectPage.java @@ -32,7 +32,6 @@ public class ProjectPage extends PageCommon { */ public boolean jumpProjectManagePage() throws InterruptedException { clickTopElement(ProjectLocator.PROJECT_MANAGE); - Thread.sleep(TestConstant.ONE_THOUSAND); return ifTitleContains(ProjectData.PROJECT_TITLE); } @@ -42,7 +41,7 @@ public class ProjectPage extends PageCommon { * @return Whether to enter the specified page after create project */ public boolean createProject() throws InterruptedException { - Thread.sleep(500); + ifTextExists(ProjectLocator.CREATE_PROJECT_BUTTON,ProjectData.CREATE_PROJECT_BUTTON); clickElement(ProjectLocator.CREATE_PROJECT_BUTTON); // input create project data @@ -53,7 +52,7 @@ public class ProjectPage extends PageCommon { clickButton(ProjectLocator.SUBMIT_BUTTON); // Whether to enter the specified page after submit - return ifTitleContains(ProjectData.PROJECT_TITLE); + return ifTextExists(ProjectLocator.LIST_PROJECT_NAME,ProjectData.PROJECT_NAME); } /** diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/RunWorkflowPage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/RunWorkflowPage.java index 9f14dde8fa93d9eb0b915ef583cf4f413d87632e..921e593e4426f9e81b2347dfeb0c354ce0e7ba5e 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/RunWorkflowPage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/RunWorkflowPage.java @@ -18,7 +18,9 @@ package org.apache.dolphinscheduler.page.project; import org.apache.dolphinscheduler.common.PageCommon; import org.apache.dolphinscheduler.data.project.RunWorkflowData; +import org.apache.dolphinscheduler.data.project.WorkflowDefineData; import org.apache.dolphinscheduler.locator.project.RunWorkflowLocator; +import org.apache.dolphinscheduler.locator.project.WorkflowDefineLocator; import org.openqa.selenium.WebDriver; public class RunWorkflowPage extends PageCommon { @@ -27,17 +29,17 @@ public class RunWorkflowPage extends PageCommon { } public boolean runWorkflow() throws InterruptedException { + // Determine whether the workflow status is online + ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE, WorkflowDefineData.WORKFLOW_ONLINE_STATE); + // click run workflow button System.out.println("Click run workflow button"); - Thread.sleep(1000); clickButton(RunWorkflowLocator.CLICK_RUN_WORKFLOW_BUTTON); - Thread.sleep(1000); clickElement(RunWorkflowLocator.SELECT_FAILURE_STRATEGY_END); clickElement(RunWorkflowLocator.SELECT_FAILURE_STRATEGY_CONTINUE); clickElement(RunWorkflowLocator.CLICK_NOTICE_STRATEGY); clickElement(RunWorkflowLocator.SELECT_NOTICE_STRATEGY); - Thread.sleep(500); clickElement(RunWorkflowLocator.CLICK_PROCESS_PRIORITY); clickElement(RunWorkflowLocator.SELECT_PROCESS_PRIORITY_HIGHEST); clickElement(RunWorkflowLocator.CLICK_WORKER_GROUP); diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/WorkflowDefinePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/WorkflowDefinePage.java index 83442562ca45a8e64a83063461970878bd28c46d..8826222189d220fcfbcb6a0ee9cffe71dcd22530 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/WorkflowDefinePage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/WorkflowDefinePage.java @@ -18,7 +18,9 @@ package org.apache.dolphinscheduler.page.project; import org.apache.dolphinscheduler.common.PageCommon; import org.apache.dolphinscheduler.constant.TestConstant; +import org.apache.dolphinscheduler.data.project.ProjectData; import org.apache.dolphinscheduler.data.project.WorkflowDefineData; +import org.apache.dolphinscheduler.locator.project.ProjectLocator; import org.apache.dolphinscheduler.locator.project.WorkflowDefineLocator; import org.openqa.selenium.WebDriver; @@ -32,9 +34,12 @@ public class WorkflowDefinePage extends PageCommon { */ public boolean jumpWorkflowPage() throws InterruptedException { + ifTextExists(ProjectLocator.LIST_PROJECT_NAME, ProjectData.PROJECT_NAME); + // click project name clickElement(WorkflowDefineLocator.CLICK_PROJECT_NAME); - Thread.sleep(TestConstant.ONE_THOUSAND); + + ifTextExists(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE,WorkflowDefineData.workflow_define); System.out.println("Click on workflow define to jump to workflow define page"); // click workflow define @@ -126,7 +131,6 @@ public class WorkflowDefinePage extends PageCommon { //click submit button clickElement(WorkflowDefineLocator.CLICK_SUBMIT_BUTTON); - Thread.sleep(TestConstant.ONE_THOUSAND); System.out.println("Task node set up successfully"); System.out.println("move to Dag Element "); moveToDragElement(WorkflowDefineLocator.MOUSE_MOVE_SHELL_AT_DAG,-300,-100); @@ -185,7 +189,6 @@ public class WorkflowDefinePage extends PageCommon { //delete workflow global parameters value clickElement(WorkflowDefineLocator.CLICK_DELETE_WORKFLOW_GLOBAL_PARAMETERS); - Thread.sleep(TestConstant.ONE_THOUSAND); //click add button System.out.println("submit workflow"); @@ -197,6 +200,9 @@ public class WorkflowDefinePage extends PageCommon { public boolean onlineWorkflow() throws InterruptedException { clickElement(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE); + // Determine whether the workflow status is offline + ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE,WorkflowDefineData.WORKFLOW_OFFLINE_STATE); + // click online button System.out.println("Click online workflow button"); clickButton(WorkflowDefineLocator.CLICK_ONLINE_WORKFLOW_BUTTON); @@ -207,9 +213,11 @@ public class WorkflowDefinePage extends PageCommon { public boolean offlineWorkflow() throws InterruptedException { clickElement(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE); + // Determine whether the workflow status is online + ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE,WorkflowDefineData.WORKFLOW_ONLINE_STATE); + // click offline button System.out.println("offline workflow"); - Thread.sleep(500); clickButton(WorkflowDefineLocator.CLICK_OFFLINE_WORKFLOW_BUTTON); return ifTitleContains(WorkflowDefineData.WORKFLOW_TITLE); @@ -218,8 +226,11 @@ public class WorkflowDefinePage extends PageCommon { public boolean deleteWorkflow() throws InterruptedException { //click delete workflow - Thread.sleep(500); clickElement(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE); + + // Determine whether the workflow status is offline + ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE,WorkflowDefineData.WORKFLOW_OFFLINE_STATE); + clickButton(WorkflowDefineLocator.DELETE_WORKFLOW_BOTTOM); //click confirm delete project diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/AlertManagePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/AlertManagePage.java index 4ea64dfc4c02a525db4335d97faf1f558f4173d6..541251e0dbacf21e5f5fbbad0540202254f20032 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/AlertManagePage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/AlertManagePage.java @@ -31,22 +31,21 @@ public class AlertManagePage extends PageCommon { } /** - * createTenant + * create alert * * @return Whether to enter the specified page after create tenant */ public boolean createAlert() throws InterruptedException { // click alert manage - Thread.sleep(500); System.out.println("start click alert manage button"); clickElement(AlertManageLocator.CLICK_ALERT_MANAGE); - Thread.sleep(500); + + //determine whether the create alert button exists + ifTextExists(AlertManageLocator.CLICK_CREATE_ALERT,AlertManageData.CREATE_ALERT); // click create alert button System.out.println("start click create alert button"); clickElement(AlertManageLocator.CLICK_CREATE_ALERT); - Thread.sleep(500); - // input alert data System.out.println("start input alert "); sendInput(AlertManageLocator.INPUT_ALERT_NAME, AlertManageData.ALERT_NAME); @@ -61,15 +60,17 @@ public class AlertManagePage extends PageCommon { clickButton(AlertManageLocator.SUBMIT_ALERT); // Whether to enter the specified page after submit - return ifTitleContains(AlertManageData.ALERT_MANAGE); + return ifTextExists(AlertManageLocator.ALERT_NAME, AlertManageData.ALERT_NAME); } public boolean deleteAlert() throws InterruptedException { - // click user manage + // click alert manage clickElement(AlertManageLocator.CLICK_ALERT_MANAGE); - // click delete user button + ifTextExists(AlertManageLocator.ALERT_NAME, AlertManageData.ALERT_NAME); + + // click delete alert button clickButton(AlertManageLocator.DELETE_ALERT_BUTTON); // click confirm delete button diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/QueueManagePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/QueueManagePage.java index 95165aec1d74c75e90d6d966d598513d66f5fc3a..368202e9213baab5aa5d62860679987da0d96862 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/QueueManagePage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/QueueManagePage.java @@ -37,15 +37,15 @@ public class QueueManagePage extends PageCommon { */ public boolean createQueue() throws InterruptedException { // click queue manage - Thread.sleep(500); System.out.println("start click queue manage button"); clickElement(QueueManageLocator.CLICK_QUEUE_MANAGE); - Thread.sleep(500); + + //determine whether the create queue button exists + ifTextExists(QueueManageLocator.CLICK_CREATE_QUEUE,QueueManageData.CREATE_QUEUE); // click create queue button System.out.println("start click create queue button"); - clickElement(QueueManageLocator.CLICK_CREATE_QUEUE); - Thread.sleep(500); + clickButton(QueueManageLocator.CLICK_CREATE_QUEUE); // input queue data System.out.println("start input queue"); @@ -57,7 +57,7 @@ public class QueueManagePage extends PageCommon { clickButton(QueueManageLocator.SUBMIT_QUEUE); // Whether to enter the specified page after submit - return ifTitleContains(QueueManageData.QUEUE_MANAGE); + return ifTextExists(QueueManageLocator.LIST_QUEUE_NAME, QueueManageData.QUEUE_NAME); } @@ -68,22 +68,21 @@ public class QueueManagePage extends PageCommon { */ public boolean editQueue() throws InterruptedException { // click queue manage - Thread.sleep(1000); clickElement(QueueManageLocator.CLICK_QUEUE_MANAGE); - Thread.sleep(1000); + + ifTextExists(QueueManageLocator.LIST_QUEUE_NAME, QueueManageData.QUEUE_NAME); // click edit queue button - clickElement(QueueManageLocator.CLICK_EDIT_QUEUE); - Thread.sleep(1000); + clickButton(QueueManageLocator.CLICK_EDIT_QUEUE); // input queue data - sendInput(QueueManageLocator.INPUT_QUEUE_NAME, QueueManageData.EDIT_QUEUE_NAME); - sendInput(QueueManageLocator.INPUT_QUEUE_VALUE, QueueManageData.EDIT_QUEUE_VALUE); + clearSendInput(QueueManageLocator.INPUT_QUEUE_NAME, QueueManageData.EDIT_QUEUE_NAME); + clearSendInput(QueueManageLocator.INPUT_QUEUE_VALUE, QueueManageData.EDIT_QUEUE_VALUE); // click button clickButton(QueueManageLocator.SUBMIT_QUEUE); // Whether to enter the specified page after submit - return ifTitleContains(QueueManageData.QUEUE_MANAGE); + return ifTextExists(QueueManageLocator.LIST_QUEUE_NAME, QueueManageData.EDIT_QUEUE_NAME); } } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TenantManagePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TenantManagePage.java index 085bfa93a3a6fed3ea9aba0848a67c3aae9db800..40d964fa33ab66813b401b6a55ce24307ec2dde3 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TenantManagePage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TenantManagePage.java @@ -46,7 +46,7 @@ public class TenantManagePage extends PageCommon { * @return Whether to enter the specified page after create tenant */ public boolean createTenant() throws InterruptedException { - clickButton(TenantManageLocator.TENANT_MANAGE); + clickElement(TenantManageLocator.TENANT_MANAGE); //create tenant clickButton(TenantManageLocator.CREATE_TENANT_BUTTON); @@ -61,11 +61,11 @@ public class TenantManagePage extends PageCommon { clickButton(TenantManageLocator.SUBMIT_BUTTON); // Whether to enter the specified page after submit - return ifTitleContains(TenantManageData.TENANT_MANAGE); + return ifTextExists(TenantManageLocator.TENANT_CODE_FIRST, TenantManageData.TENANT_CODE); } public boolean deleteTenant() throws InterruptedException { - clickButton(TenantManageLocator.TENANT_MANAGE); + clickElement(TenantManageLocator.TENANT_MANAGE); // click delete button clickButton(TenantManageLocator.DELETE_TENANT_BUTTON); diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TokenManagePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TokenManagePage.java index 2f8204ac470212271c5b3a08708eca2b4c43c44c..61fb93c570b6bc07554b79b1c372e5cecd47a06b 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TokenManagePage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TokenManagePage.java @@ -27,26 +27,24 @@ public class TokenManagePage extends PageCommon { } /** - * createTenant + * create token * - * @return Whether to enter the specified page after creat tenant + * @return Whether to enter the specified page after create tenant */ public boolean createToken() throws InterruptedException { //create token - Thread.sleep(1000); clickElement(TokenManageLocator.CLICK_TOKEN_MANAGE); - Thread.sleep(1000); + + //determine whether the create token button exists + ifTextExists(TokenManageLocator.CLICK_CREATE_TOKEN,TokenManageData.CREATE_TOKEN); // click create token button clickButton(TokenManageLocator.CLICK_CREATE_TOKEN); - Thread.sleep(1000); - - //selectDate(TokenManageLocator.js, TokenManageLocator.CLICK_TIME, TokenManageData.DATE); clickButton(TokenManageLocator.SELECT_USER); clickButton(TokenManageLocator.CLICK_GENERATE_TOKEN_BUTTON); - Thread.sleep(2500); + Thread.sleep(2000); // click button clickButton(TokenManageLocator.CLICK_SUBMIT_BUTTON); @@ -55,21 +53,25 @@ public class TokenManagePage extends PageCommon { return ifTitleContains(TokenManageData.TOKEN_MANAGE); } - - //edit token + /** + * edit token + * + * @return Whether to enter the specified page after edit tenant + */ public boolean editToken() throws InterruptedException { - // click token manage - clickElement(TokenManageLocator.CLICK_TOKEN_MANAGE); - Thread.sleep(1000); + // edit token + ifTextExists(TokenManageLocator.TOKEN, "1"); - // click create token button + // determine the existence of the editing token + locateElement(TokenManageLocator.EDIT_TOKEN_BUTTON); + + // click edit token button clickButton(TokenManageLocator.CLICK_EDIT_BUTTON); - Thread.sleep(1000); clickButton(TokenManageLocator.SELECT_USER); clickButton(TokenManageLocator.CLICK_GENERATE_TOKEN_BUTTON); - Thread.sleep(2500); + Thread.sleep(2000); // click button clickButton(TokenManageLocator.CLICK_SUBMIT_BUTTON); @@ -83,7 +85,6 @@ public class TokenManagePage extends PageCommon { public boolean deleteToken() throws InterruptedException { // click token manage clickElement(TokenManageLocator.CLICK_TOKEN_MANAGE); - Thread.sleep(1000); clickButton(TokenManageLocator.CLICK_DELETE_BUTTON); clickButton(TokenManageLocator.CLICK_CONFIRM_DELETE_BUTTON); diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/UserManagePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/UserManagePage.java index 31f0445cd8faffae74c29b82e41575b4ea016e1c..367896a3cff404f1a1229f4e59482e26e190ea37 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/UserManagePage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/UserManagePage.java @@ -29,12 +29,15 @@ public class UserManagePage extends PageCommon { /** * create user * - * @return Whether to enter the specified page after creat tenant + * @return Whether to enter the specified page after create tenant */ public boolean createUser() throws InterruptedException { // click user manage clickElement(UserManageLocator.CLICK_USER_MANAGE); + //determine whether the create user button exists + ifTextExists(UserManageLocator.CLICK_CREATE_USER_BUTTON,UserManageData.CREATE_USER_BUTTON); + // click create user button clickButton(UserManageLocator.CLICK_CREATE_USER_BUTTON); @@ -64,9 +67,11 @@ public class UserManagePage extends PageCommon { //edit general user // click user manage System.out.println("start edit general user"); - Thread.sleep(500); clickElement(UserManageLocator.CLICK_USER_MANAGE); + //determine whether the general user exists + ifTextExists(UserManageLocator.USER_NAME,UserManageData.USERNAME); + // click edit user button clickButton(UserManageLocator.EDIT_GENERAL_USER_BUTTON ); @@ -97,13 +102,15 @@ public class UserManagePage extends PageCommon { /** * edit admin user */ - public boolean editAdminlUser() throws InterruptedException { + public boolean editAdminUser() throws InterruptedException { //edit admin user // click user manage System.out.println("start edit admin user"); - Thread.sleep(500); clickElement(UserManageLocator.CLICK_USER_MANAGE); + //determine whether the general user edit success + ifTextExists(UserManageLocator.USER_NAME,UserManageData.EDIT_USERNAME); + // click edit user button clickButton(UserManageLocator.EDIT_ADMIN_USER_BUTTON ); @@ -115,7 +122,6 @@ public class UserManagePage extends PageCommon { // click button clickButton(UserManageLocator.SUBMIT); System.out.println("end edit admin user"); - Thread.sleep(500); // Whether to enter the specified page after submit return ifTitleContains(UserManageData.USER_MANAGE); } diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTokenManage.java b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTokenManage.java index e46cf0d997d1842cc795334853a189d626604f77..816ae64c296151a2fc7126a9a591468740ddf075 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTokenManage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestTokenManage.java @@ -24,7 +24,7 @@ public class TestTokenManage extends BaseTest { private TokenManagePage tokenManagePage; @Test(groups={"functionTests","token"},dependsOnGroups = { "login" },description = "TestTokenManage") - public void testUserManage() throws InterruptedException { + public void testTokenManage() throws InterruptedException { tokenManagePage = new TokenManagePage(driver); //generate token System.out.println("start create token"); diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestUserManage.java b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestUserManage.java index e464d7c57e8c1e8d3e34ad2fc02d4d1486bb94ff..2adc4e77697505d11f78c5f8da84ab87d0d4441f 100644 --- a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestUserManage.java +++ b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/testSecurity/TestUserManage.java @@ -39,7 +39,7 @@ public class TestUserManage extends BaseTest { //edit user System.out.println("start edit user"); assert userManagePage.editGeneralUser(); - assert userManagePage.editAdminlUser(); + assert userManagePage.editAdminUser(); System.out.println("end edit user"); System.out.println("==================================="); diff --git a/e2e/testng.xml b/e2e/testng.xml index 7e5dc5228b4ea830b00f543ed43d55d99b580ca1..e08c4da91765159b2b9802ce236287497ffe789d 100644 --- a/e2e/testng.xml +++ b/e2e/testng.xml @@ -53,20 +53,20 @@ - - - - - - + + + + + + + + + + + + + + @@ -78,7 +78,7 @@ - + diff --git a/pom.xml b/pom.xml index 2c6dc9d9faf9551223ad093fc72c474becea4dbd..0c20bd873fa5bab57e65ad3e984eb4d330ff5751 100644 --- a/pom.xml +++ b/pom.xml @@ -730,6 +730,7 @@ **/api/service/LoggerServiceTest.java **/api/service/MonitorServiceTest.java **/api/service/ProcessDefinitionServiceTest.java + **/api/service/ProcessDefinitionVersionServiceTest.java **/api/service/ProcessInstanceServiceTest.java **/api/service/ProjectServiceTest.java **/api/service/QueueServiceTest.java @@ -803,7 +804,8 @@ **/remote/JsonSerializerTest.java **/remote/RemoveTaskLogResponseCommandTest.java **/remote/RemoveTaskLogRequestCommandTest.java - + **/remote/NettyRemotingClientTest.java + **/remote/NettyUtilTest.java **/remote/ResponseFutureTest.java **/server/log/LoggerServerTest.java **/server/entity/SQLTaskExecutionContextTest.java @@ -818,7 +820,7 @@ **/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java **/server/master/dispatch/host/assign/RandomSelectorTest.java **/server/master/dispatch/host/assign/RoundRobinSelectorTest.java - + **/server/master/register/MasterRegistryTest.java **/server/master/AlertManagerTest.java **/server/master/MasterCommandTest.java **/server/master/DependentTaskTest.java @@ -853,6 +855,7 @@ **/dao/mapper/DataSourceUserMapperTest.java **/dao/mapper/ProcessDefinitionMapperTest.java + **/dao/mapper/ProcessDefinitionVersionMapperTest.java **/dao/mapper/ProcessInstanceMapMapperTest.java **/dao/mapper/ProcessInstanceMapperTest.java **/dao/mapper/ProjectMapperTest.java diff --git a/sql/dolphinscheduler-postgre.sql b/sql/dolphinscheduler-postgre.sql index 3a0b1843beb21766db7d991487662d83f10dc30c..1fedf05cfd66dcbc68c50e4755978f794fa78d41 100644 --- a/sql/dolphinscheduler-postgre.sql +++ b/sql/dolphinscheduler-postgre.sql @@ -315,6 +315,30 @@ CREATE TABLE t_ds_process_definition ( create index process_definition_index on t_ds_process_definition (project_id,id); +-- +-- Table structure for table t_ds_process_definition_version +-- + +DROP TABLE IF EXISTS t_ds_process_definition_version; +CREATE TABLE t_ds_process_definition_version ( + id int NOT NULL , + process_definition_id int NOT NULL , + version int DEFAULT NULL , + process_definition_json text , + description text , + global_params text , + locations text , + connects text , + receivers text , + receivers_cc text , + create_time timestamp DEFAULT NULL , + timeout int DEFAULT '0' , + resource_ids varchar(64), + PRIMARY KEY (id) +) ; + +create index process_definition_id_and_version on t_ds_process_definition_version (process_definition_id,version); + -- -- Table structure for table t_ds_process_instance -- @@ -693,6 +717,9 @@ ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence; CREATE SEQUENCE t_ds_process_definition_id_sequence; ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence'); +DROP SEQUENCE IF EXISTS t_ds_process_definition_version_id_sequence; +CREATE SEQUENCE t_ds_process_definition_version_id_sequence; +ALTER TABLE t_ds_process_definition_version ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_version_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_instance_id_sequence; CREATE SEQUENCE t_ds_process_instance_id_sequence; ALTER TABLE t_ds_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_instance_id_sequence'); diff --git a/sql/dolphinscheduler_mysql.sql b/sql/dolphinscheduler_mysql.sql index bb3dbb095a2642678b082e7fd9a43e0e40a10bed..70bb7cddfc4ac30f6f9ca71d9abb69cefd2589e0 100644 --- a/sql/dolphinscheduler_mysql.sql +++ b/sql/dolphinscheduler_mysql.sql @@ -423,6 +423,33 @@ CREATE TABLE `t_ds_process_definition` ( -- Records of t_ds_process_definition -- ---------------------------- +-- ---------------------------- +-- Table structure for t_ds_process_definition_version +-- ---------------------------- +DROP TABLE IF EXISTS `t_ds_process_definition_version`; +CREATE TABLE `t_ds_process_definition_version` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', + `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', + `version` int(11) DEFAULT NULL COMMENT 'process definition version', + `process_definition_json` longtext COMMENT 'process definition json content', + `description` text, + `global_params` text COMMENT 'global parameters', + `locations` text COMMENT 'Node location information', + `connects` text COMMENT 'Node connection information', + `receivers` text COMMENT 'receivers', + `receivers_cc` text COMMENT 'cc', + `create_time` datetime DEFAULT NULL COMMENT 'create time', + `timeout` int(11) DEFAULT '0' COMMENT 'time out', + `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids', + PRIMARY KEY (`id`), + UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE, + KEY `process_definition_index` (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8; + +-- ---------------------------- +-- Records of t_ds_process_definition +-- ---------------------------- + -- ---------------------------- -- Table structure for t_ds_process_instance -- ---------------------------- diff --git a/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql b/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql index 9eaf3f8d50370f51551cca4f845f9304a6cbe08f..e9f8b5b6b569ab46feec0c0d99ff6ad4dd463278 100644 --- a/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql +++ b/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql @@ -56,3 +56,34 @@ delimiter ; CALL uc_dolphin_T_t_ds_task_instance_A_delay_time(); DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time; +-- uc_dolphin_T_t_ds_process_definition_A_modify_by +drop PROCEDURE if EXISTS ct_dolphin_T_t_ds_process_definition_version; +delimiter d// +CREATE PROCEDURE ct_dolphin_T_t_ds_process_definition_version() +BEGIN + CREATE TABLE `t_ds_process_definition_version` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', + `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', + `version` int(11) DEFAULT NULL COMMENT 'process definition version', + `process_definition_json` longtext COMMENT 'process definition json content', + `description` text, + `global_params` text COMMENT 'global parameters', + `locations` text COMMENT 'Node location information', + `connects` text COMMENT 'Node connection information', + `receivers` text COMMENT 'receivers', + `receivers_cc` text COMMENT 'cc', + `create_time` datetime DEFAULT NULL COMMENT 'create time', + `timeout` int(11) DEFAULT '0' COMMENT 'time out', + `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids', + PRIMARY KEY (`id`), + UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE, + KEY `process_definition_index` (`id`) USING BTREE + ) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8; +END; + +d// + +delimiter ; +CALL ct_dolphin_T_t_ds_process_definition_version; +DROP PROCEDURE ct_dolphin_T_t_ds_process_definition_version; + diff --git a/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql b/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql index 9a65824238f98c7767d84c27c26ddc465fc3b247..52045f61f08c139789661d89a1969fd2fd7a9091 100644 --- a/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql +++ b/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql @@ -49,4 +49,34 @@ d// delimiter ; SELECT uc_dolphin_T_t_ds_task_instance_A_delay_time(); -DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time(); \ No newline at end of file +DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time(); + +-- uc_dolphin_T_t_ds_process_definition_A_modify_by +delimiter d// +CREATE OR REPLACE FUNCTION ct_dolphin_T_t_ds_process_definition_version() RETURNS void AS $$ +BEGIN +CREATE TABLE t_ds_process_definition_version ( + id int NOT NULL , + process_definition_id int NOT NULL , + version int DEFAULT NULL , + process_definition_json text , + description text , + global_params text , + locations text , + connects text , + receivers text , + receivers_cc text , + create_time timestamp DEFAULT NULL , + timeout int DEFAULT '0' , + resource_ids varchar(64), + PRIMARY KEY (id) +) ; +create index process_definition_id_and_version on t_ds_process_definition_version (process_definition_id,version); + +END; +$$ LANGUAGE plpgsql; +d// + +delimiter ; +SELECT ct_dolphin_T_t_ds_process_definition_version(); +DROP FUNCTION IF EXISTS ct_dolphin_T_t_ds_process_definition_version(); \ No newline at end of file