From e60c86dd62b72fa972b7f9a81abe92d8f2c5f6ea Mon Sep 17 00:00:00 2001 From: gongzhongqiang <764629910@qq.com> Date: Sat, 2 Sep 2023 22:43:33 +0800 Subject: [PATCH 1/2] [Refactor] Refactor ApplicationService moving it to own package and creating two interfaces. (#3009) * Refactor ApplicationService moving it to own package and creating two interfaces. * rebase latest dev --- .../console/core/aspect/StreamParkAspect.java | 6 +- .../controller/ApplicationController.java | 48 +- .../ApplicationHistoryController.java | 18 +- .../core/controller/SavePointController.java | 6 +- .../core/service/ApplicationService.java | 128 -- .../alert/impl/AlertConfigServiceImpl.java | 6 +- .../application/ApplicationActionService.java | 77 + .../application/ApplicationInfoService.java | 238 +++ .../application/ApplicationManageService.java | 152 ++ .../impl/ApplicationActionServiceImpl.java | 740 +++++++ .../impl/ApplicationInfoServiceImpl.java | 508 +++++ .../impl/ApplicationManageServiceImpl.java | 753 +++++++ .../service/impl/AppBuildPipeServiceImpl.java | 64 +- .../impl/ApplicationBackUpServiceImpl.java | 10 +- .../service/impl/ApplicationServiceImpl.java | 1834 ----------------- .../service/impl/ExternalLinkServiceImpl.java | 9 +- .../service/impl/FlinkClusterServiceImpl.java | 9 +- .../service/impl/FlinkEnvServiceImpl.java | 6 +- .../core/service/impl/ProjectServiceImpl.java | 14 +- .../service/impl/ResourceServiceImpl.java | 6 +- .../service/impl/SavePointServiceImpl.java | 14 +- .../service/impl/VariableServiceImpl.java | 8 +- .../service/impl/YarnQueueServiceImpl.java | 6 +- .../core/task/CheckpointProcessor.java | 6 +- .../core/task/FlinkClusterWatcher.java | 8 +- .../console/core/task/FlinkHttpWatcher.java | 26 +- .../task/FlinkK8sChangeEventListener.java | 17 +- .../core/task/FlinkK8sWatcherWrapper.java | 8 +- .../system/service/impl/TeamServiceImpl.java | 6 +- .../system/service/impl/UserServiceImpl.java | 11 +- ...ava => ApplicationManageServiceITest.java} | 17 +- ...java => ApplicationManageServiceTest.java} | 19 +- .../core/service/SavePointServiceTest.java | 7 +- .../console/core/service/UserServiceTest.java | 13 +- .../core/service/YarnQueueServiceTest.java | 17 +- 35 files changed, 2676 insertions(+), 2139 deletions(-) delete mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/ApplicationService.java create mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java create mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java create mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java create mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java create mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java create mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java delete mode 100644 streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java rename streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/{ApplicationServiceITest.java => ApplicationManageServiceITest.java} (87%) rename streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/{ApplicationServiceTest.java => ApplicationManageServiceTest.java} (86%) diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java index 2392885bd8..6d14119d5f 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java @@ -26,8 +26,8 @@ import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.enums.PermissionType; import org.apache.streampark.console.core.enums.UserType; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.CommonService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.task.FlinkHttpWatcher; import org.apache.streampark.console.system.entity.AccessToken; import org.apache.streampark.console.system.entity.Member; @@ -61,7 +61,7 @@ public class StreamParkAspect { @Autowired private FlinkHttpWatcher flinkHttpWatcher; @Autowired private CommonService commonService; @Autowired private MemberService memberService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; @Pointcut( "execution(public" @@ -128,7 +128,7 @@ public RestResponse permissionAction(ProceedingJoinPoint joinPoint) throws Throw "Permission denied, only user belongs to this team can access this permission"); break; case APP: - Application app = applicationService.getById(paramId); + Application app = applicationManageService.getById(paramId); ApiAlertException.throwIfTrue(app == null, "Invalid operation, application is null"); member = memberService.findByUserName(app.getTeamId(), currentUser.getUsername()); ApiAlertException.throwIfTrue( diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java index 6a3e18a6b0..ed2a113522 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java @@ -35,8 +35,10 @@ import org.apache.streampark.console.core.service.AppBuildPipeService; import org.apache.streampark.console.core.service.ApplicationBackUpService; import org.apache.streampark.console.core.service.ApplicationLogService; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.ResourceService; +import org.apache.streampark.console.core.service.application.ApplicationActionService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.flink.packer.pipeline.PipelineStatus; import org.apache.shiro.authz.annotation.RequiresPermissions; @@ -73,7 +75,9 @@ @RequestMapping("flink/app") public class ApplicationController { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationActionService applicationActionService; + @Autowired private ApplicationInfoService applicationInfoService; @Autowired private ApplicationBackUpService backUpService; @@ -88,7 +92,7 @@ public class ApplicationController { @PostMapping("get") @RequiresPermissions("app:detail") public RestResponse get(Application app) { - Application application = applicationService.getApp(app); + Application application = applicationManageService.getApp(app); return RestResponse.success(application); } @@ -98,7 +102,7 @@ public RestResponse get(Application app) { @PostMapping("create") @RequiresPermissions("app:create") public RestResponse create(Application app) throws IOException { - boolean saved = applicationService.create(app); + boolean saved = applicationManageService.create(app); return RestResponse.success(saved); } @@ -124,7 +128,7 @@ public RestResponse create(Application app) throws IOException { @PostMapping(value = "copy") @RequiresPermissions("app:copy") public RestResponse copy(@Parameter(hidden = true) Application app) throws IOException { - Long id = applicationService.copy(app); + Long id = applicationManageService.copy(app); Map data = new HashMap<>(); data.put("id", Long.toString(id)); return id.equals(0L) @@ -138,14 +142,14 @@ public RestResponse copy(@Parameter(hidden = true) Application app) throws IOExc @PostMapping("update") @RequiresPermissions("app:update") public RestResponse update(Application app) { - applicationService.update(app); + applicationManageService.update(app); return RestResponse.success(true); } @Operation(summary = "Get applications dashboard data") @PostMapping("dashboard") public RestResponse dashboard(Long teamId) { - Map map = applicationService.dashboard(teamId); + Map map = applicationInfoService.dashboard(teamId); return RestResponse.success(map); } @@ -154,7 +158,7 @@ public RestResponse dashboard(Long teamId) { @PostMapping("list") @RequiresPermissions("app:view") public RestResponse list(Application app, RestRequest request) { - IPage applicationList = applicationService.page(app, request); + IPage applicationList = applicationManageService.page(app, request); List appRecords = applicationList.getRecords(); List appIds = appRecords.stream().map(Application::getId).collect(Collectors.toList()); Map pipeStates = appBuildPipeService.listPipelineStatus(appIds); @@ -191,7 +195,7 @@ public RestResponse list(Application app, RestRequest request) { @PostMapping("mapping") @RequiresPermissions("app:mapping") public RestResponse mapping(Application app) { - boolean flag = applicationService.mapping(app); + boolean flag = applicationInfoService.mapping(app); return RestResponse.success(flag); } @@ -201,7 +205,7 @@ public RestResponse mapping(Application app) { @PostMapping("revoke") @RequiresPermissions("app:release") public RestResponse revoke(Application app) { - applicationService.revoke(app); + applicationActionService.revoke(app); return RestResponse.success(); } @@ -241,8 +245,8 @@ public RestResponse revoke(Application app) { @RequiresPermissions("app:start") public RestResponse start(@Parameter(hidden = true) Application app) { try { - applicationService.checkEnv(app); - applicationService.start(app, false); + applicationInfoService.checkEnv(app); + applicationActionService.start(app, false); return RestResponse.success(true); } catch (Exception e) { return RestResponse.success(false).message(e.getMessage()); @@ -292,7 +296,7 @@ public RestResponse start(@Parameter(hidden = true) Application app) { @PostMapping(value = "cancel") @RequiresPermissions("app:cancel") public RestResponse cancel(@Parameter(hidden = true) Application app) throws Exception { - applicationService.cancel(app); + applicationActionService.cancel(app); return RestResponse.success(); } @@ -303,7 +307,7 @@ public RestResponse cancel(@Parameter(hidden = true) Application app) throws Exc @PostMapping("clean") @RequiresPermissions("app:clean") public RestResponse clean(Application app) { - applicationService.clean(app); + applicationManageService.clean(app); return RestResponse.success(true); } @@ -313,7 +317,7 @@ public RestResponse clean(Application app) { @PostMapping("forcedStop") @RequiresPermissions("app:cancel") public RestResponse forcedStop(Application app) { - applicationService.forcedStop(app); + applicationActionService.forcedStop(app); return RestResponse.success(); } @@ -326,28 +330,28 @@ public RestResponse yarn() { @Operation(summary = "Get application on yarn name") @PostMapping("name") public RestResponse yarnName(Application app) { - String yarnName = applicationService.getYarnName(app); + String yarnName = applicationInfoService.getYarnName(app); return RestResponse.success(yarnName); } @Operation(summary = "Check the application exist status") @PostMapping("checkName") public RestResponse checkName(Application app) { - AppExistsState exists = applicationService.checkExists(app); + AppExistsState exists = applicationInfoService.checkExists(app); return RestResponse.success(exists.get()); } @Operation(summary = "Get application conf") @PostMapping("readConf") public RestResponse readConf(Application app) throws IOException { - String config = applicationService.readConf(app); + String config = applicationInfoService.readConf(app); return RestResponse.success(config); } @Operation(summary = "Get application main-class") @PostMapping("main") public RestResponse getMain(Application application) { - String mainClass = applicationService.getMain(application); + String mainClass = applicationInfoService.getMain(application); return RestResponse.success(mainClass); } @@ -379,7 +383,7 @@ public RestResponse deleteOperationLog(ApplicationLog applicationLog) { @PostMapping("delete") @RequiresPermissions("app:delete") public RestResponse delete(Application app) throws InternalException { - Boolean deleted = applicationService.delete(app); + Boolean deleted = applicationManageService.delete(app); return RestResponse.success(deleted); } @@ -437,7 +441,7 @@ public RestResponse verifySchema(String path) { @Operation(summary = "Check the application savepoint path") @PostMapping("checkSavepointPath") public RestResponse checkSavepointPath(Application app) throws Exception { - String error = applicationService.checkSavepointPath(app); + String error = applicationInfoService.checkSavepointPath(app); if (error == null) { return RestResponse.success(true); } else { @@ -468,7 +472,7 @@ public RestResponse checkSavepointPath(Application app) throws Exception { }) @PostMapping(value = "k8sStartLog") public RestResponse k8sStartLog(Long id, Integer offset, Integer limit) throws Exception { - String resp = applicationService.k8sStartLog(id, offset, limit); + String resp = applicationInfoService.k8sStartLog(id, offset, limit); return RestResponse.success(resp); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java index 2ba542336a..324ca40660 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java @@ -19,7 +19,7 @@ import org.apache.streampark.common.enums.ExecutionMode; import org.apache.streampark.console.base.domain.RestResponse; -import org.apache.streampark.console.core.service.ApplicationService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.shiro.authz.annotation.RequiresPermissions; @@ -42,13 +42,13 @@ @RequestMapping("flink/history") public class ApplicationHistoryController { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationInfoService applicationInfoService; @Operation(summary = "List the upload jar history records") @PostMapping("uploadJars") @RequiresPermissions("app:create") public RestResponse listUploadJars() { - List jars = applicationService.historyUploadJars(); + List jars = applicationInfoService.historyUploadJars(); return RestResponse.success(jars); } @@ -56,7 +56,7 @@ public RestResponse listUploadJars() { @PostMapping("k8sNamespaces") @RequiresPermissions("app:create") public RestResponse listK8sNamespace() { - List namespaces = applicationService.getRecentK8sNamespace(); + List namespaces = applicationInfoService.getRecentK8sNamespace(); return RestResponse.success(namespaces); } @@ -69,7 +69,7 @@ public RestResponse listSessionClusterId(int executionMode) { case KUBERNETES_NATIVE_SESSION: case YARN_SESSION: case REMOTE: - clusterIds = applicationService.getRecentK8sClusterId(executionMode); + clusterIds = applicationInfoService.getRecentK8sClusterId(executionMode); break; default: clusterIds = new ArrayList<>(0); @@ -82,7 +82,7 @@ public RestResponse listSessionClusterId(int executionMode) { @PostMapping("flinkBaseImages") @RequiresPermissions("app:create") public RestResponse listFlinkBaseImage() { - List images = applicationService.getRecentFlinkBaseImage(); + List images = applicationInfoService.getRecentFlinkBaseImage(); return RestResponse.success(images); } @@ -90,7 +90,7 @@ public RestResponse listFlinkBaseImage() { @PostMapping("flinkPodTemplates") @RequiresPermissions("app:create") public RestResponse listPodTemplate() { - List templates = applicationService.getRecentK8sPodTemplate(); + List templates = applicationInfoService.getRecentK8sPodTemplate(); return RestResponse.success(templates); } @@ -98,7 +98,7 @@ public RestResponse listPodTemplate() { @PostMapping("flinkJmPodTemplates") @RequiresPermissions("app:create") public RestResponse listJmPodTemplate() { - List templates = applicationService.getRecentK8sJmPodTemplate(); + List templates = applicationInfoService.getRecentK8sJmPodTemplate(); return RestResponse.success(templates); } @@ -106,7 +106,7 @@ public RestResponse listJmPodTemplate() { @PostMapping("flinkTmPodTemplates") @RequiresPermissions("app:create") public RestResponse listTmPodTemplate() { - List templates = applicationService.getRecentK8sTmPodTemplate(); + List templates = applicationInfoService.getRecentK8sTmPodTemplate(); return RestResponse.success(templates); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/SavePointController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/SavePointController.java index 95553826e2..e495473c1b 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/SavePointController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/SavePointController.java @@ -23,8 +23,8 @@ import org.apache.streampark.console.core.annotation.ApiAccess; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.SavePoint; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.SavePointService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.shiro.authz.annotation.RequiresPermissions; @@ -50,7 +50,7 @@ @RequestMapping("flink/savepoint") public class SavePointController { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; @Autowired private SavePointService savePointService; @@ -73,7 +73,7 @@ public RestResponse history(SavePoint savePoint, RestRequest request) { @RequiresPermissions("savepoint:delete") public RestResponse delete(Long id) throws InternalException { SavePoint savePoint = savePointService.getById(id); - Application application = applicationService.getById(savePoint.getAppId()); + Application application = applicationManageService.getById(savePoint.getAppId()); Boolean deleted = savePointService.delete(id, application); return RestResponse.success(deleted); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/ApplicationService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/ApplicationService.java deleted file mode 100644 index a9c3ddbf1c..0000000000 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/ApplicationService.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.streampark.console.core.service; - -import org.apache.streampark.common.enums.ExecutionMode; -import org.apache.streampark.console.base.domain.RestRequest; -import org.apache.streampark.console.base.exception.ApplicationException; -import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.enums.AppExistsState; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.service.IService; - -import java.io.IOException; -import java.io.Serializable; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -public interface ApplicationService extends IService { - - IPage page(Application app, RestRequest request); - - boolean existsByTeamId(Long teamId); - - boolean existsByUserId(Long userId); - - boolean create(Application app) throws IOException; - - Long copy(Application app) throws IOException; - - boolean update(Application app); - - void starting(Application app); - - void start(Application app, boolean auto) throws Exception; - - void restart(Application application) throws Exception; - - String getYarnName(Application app); - - AppExistsState checkExists(Application app); - - String checkSavepointPath(Application app) throws Exception; - - void cancel(Application app) throws Exception; - - void persistMetrics(Application application); - - void clean(Application app); - - String readConf(Application app) throws IOException; - - Application getApp(Application app); - - String getMain(Application application); - - boolean mapping(Application app); - - Map dashboard(Long teamId); - - /** set the latest to Effective, it will really become the current effective */ - void toEffective(Application application); - - void revoke(Application app) throws ApplicationException; - - Boolean delete(Application app); - - boolean checkEnv(Application app) throws ApplicationException; - - boolean checkAlter(Application application); - - void updateRelease(Application application); - - List getByProjectId(Long id); - - List getByTeamId(Long teamId); - - List getByTeamIdAndExecutionModes( - Long teamId, Collection executionModes); - - boolean checkBuildAndUpdate(Application app); - - void forcedStop(Application app); - - boolean existsRunningByClusterId(Long clusterId); - - boolean existsByClusterId(Long clusterId); - - Integer countByClusterId(Long clusterId); - - Integer countAffectedByClusterId(Long clusterId, String dbType); - - boolean existsByFlinkEnvId(Long flinkEnvId); - - List getRecentK8sNamespace(); - - List getRecentK8sClusterId(Integer executionMode); - - List getRecentFlinkBaseImage(); - - List getRecentK8sPodTemplate(); - - List getRecentK8sJmPodTemplate(); - - List getRecentK8sTmPodTemplate(); - - List historyUploadJars(); - - String k8sStartLog(Long id, Integer offset, Integer limit) throws Exception; - - void changeOwnership(Long userId, Long targetUserId); -} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertConfigServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertConfigServiceImpl.java index 9506d38d6d..feb07b4902 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertConfigServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertConfigServiceImpl.java @@ -24,8 +24,8 @@ import org.apache.streampark.console.core.entity.AlertConfig; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.mapper.AlertConfigMapper; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.alert.AlertConfigService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.commons.collections.CollectionUtils; @@ -47,7 +47,7 @@ public class AlertConfigServiceImpl extends ServiceImpl implements AlertConfigService { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationInfoService applicationInfoService; @Override public IPage page(AlertConfigParams params, RestRequest request) { @@ -76,7 +76,7 @@ public boolean exist(AlertConfig alertConfig) { @Override public boolean deleteById(Long id) throws AlertException { long count = - applicationService.count( + applicationInfoService.count( new LambdaQueryWrapper().eq(id != null, Application::getAlertId, id)); if (count > 0) { throw new AlertException( diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java new file mode 100644 index 0000000000..3e77485f80 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.core.service.application; + +import org.apache.streampark.console.base.exception.ApplicationException; +import org.apache.streampark.console.core.entity.Application; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** + * This interface represents an Application Operation Service. It extends the IService interface for + * handling Application entities. + */ +public interface ApplicationActionService extends IService { + + /** + * This method is used to start the given application. + * + * @param app The application object to be started. + */ + void starting(Application app); + + /** + * Starts the specified application. + * + * @param app The application to start. + * @param auto True if the application should start automatically, False otherwise. + * @throws Exception If an error occurs while starting the application. + */ + void start(Application app, boolean auto) throws Exception; + + /** + * Restarts the given application. + * + * @param application The application to restart. + * @throws Exception If an error occurs while restarting the application. + */ + void restart(Application application) throws Exception; + + /** + * Revokes access for the given application. + * + * @param app The application for which access needs to be revoked. + * @throws ApplicationException if an error occurs while revoking access. + */ + void revoke(Application app) throws ApplicationException; + + /** + * Cancels the given application. Throws an exception if cancellation fails. + * + * @param app the application to be canceled + * @throws Exception if cancellation fails + */ + void cancel(Application app) throws Exception; + + /** + * Forces the given application to stop. + * + * @param app the application to be stopped + */ + void forcedStop(Application app); +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java new file mode 100644 index 0000000000..093b1b0d44 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java @@ -0,0 +1,238 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.core.service.application; + +import org.apache.streampark.console.base.exception.ApplicationException; +import org.apache.streampark.console.core.entity.Application; +import org.apache.streampark.console.core.enums.AppExistsState; + +import com.baomidou.mybatisplus.extension.service.IService; + +import java.io.IOException; +import java.io.Serializable; +import java.util.List; +import java.util.Map; + +/** + * This interface defines the methods that can be used for various utility operations related to an + * application. + */ +public interface ApplicationInfoService extends IService { + + /** + * Maps the given application. + * + * @param app The application to be mapped. + * @return True if the mapping was successful, false otherwise. + */ + boolean mapping(Application app); + + /** + * Checks the environment for the given application. + * + * @param app the application to check the environment for + * @return true if the environment is valid for the application, false otherwise + * @throws ApplicationException if an error occurs while checking the environment + */ + boolean checkEnv(Application app) throws ApplicationException; + + /** + * Checks the savepoint path for the given application. + * + * @param app the application to check the savepoint path for + * @return the check message + * @throws Exception if an error occurs while checking the savepoint path + */ + String checkSavepointPath(Application app) throws Exception; + + /** + * Checks if the given application meets the required alterations. + * + * @param application The application to be checked. + * @return True if the application meets the required alterations, false otherwise. + * @throws ApplicationException If an error occurs while checking the alterations. + */ + boolean checkAlter(Application application); + + /** + * Checks if a record exists in the database with the given team ID. + * + * @param teamId The ID of the team to check. + * @return true if a record with the given team ID exists, false otherwise. + */ + boolean existsByTeamId(Long teamId); + + /** + * Checks if a record exists in the database with the given user ID. + * + * @param userId The ID of the user to check. + * @return true if a record with the given user ID exists, false otherwise. + */ + boolean existsByUserId(Long userId); + + /** + * Checks if a job exists for a given Flink environment ID. + * + * @param flinkEnvId The ID of the Flink environment. + * @return true if a job exists for the given Flink environment ID; otherwise, false. + */ + boolean existsByFlinkEnvId(Long flinkEnvId); + + /** + * Checks if a job is running for a given cluster ID. + * + * @param clusterId The ID of the cluster. + * @return true if a job is running for the given cluster ID; otherwise, false. + */ + boolean existsRunningByClusterId(Long clusterId); + + /** + * Checks if there is a job that is associated with the given cluster ID. + * + * @param clusterId The ID of the cluster. + * @return True if a job exists for the given cluster ID, false otherwise. + */ + boolean existsByClusterId(Long clusterId); + + /** + * Counts the number of items associated with the given cluster ID. + * + * @param clusterId The ID of the cluster. + * @return The number of items associated with the given cluster ID. + */ + Integer countByClusterId(Long clusterId); + + /** + * Counts the number of items associated with the given cluster ID and database type. + * + * @param clusterId The ID of the cluster. + * @param dbType The type of the database. + * @return The number of items associated with the given cluster ID and database type. + */ + Integer countAffectedByClusterId(Long clusterId, String dbType); + + /** + * Gets the YARN name for the given application. + * + * @param app The application for which to retrieve the YARN name. + * @return The YARN name of the application as a String. + */ + String getYarnName(Application app); + + /** + * Checks if the given application exists in the system. + * + * @param app The application to check for existence. + * @return AppExistsState indicating the existence state of the application. + */ + AppExistsState checkExists(Application app); + + /** + * Persists the metrics of the given application. + * + * @param application The application which metrics need to be persisted. + */ + void persistMetrics(Application application); + + /** + * Reads the configuration for the given application and returns it as a String. + * + * @param app The application for which the configuration needs to be read. + * @return The configuration for the given application as a String. + * @throws IOException If an I/O error occurs while reading the configuration. + */ + String readConf(Application app) throws IOException; + + /** + * Retrieves the main configuration value for the given Application. + * + * @param application the Application object for which to fetch the main configuration value + * @return the main configuration value as a String + */ + String getMain(Application application); + + /** + * Returns the dashboard for the specified team. + * + * @param teamId the ID of the team + * @return a map containing the dashboard data + */ + Map dashboard(Long teamId); + + /** + * Retrieves the Kubernetes start log for a specific ID with an optional offset and limit. + * + * @param id The ID of the Kubernetes resource. + * @param offset The offset to start fetching log lines from. + * @param limit The maximum number of log lines to fetch. + * @return The Kubernetes start log as a string. + * @throws Exception if an error occurs while retrieving the log. + */ + String k8sStartLog(Long id, Integer offset, Integer limit) throws Exception; + + /** + * Retrieves the list of recent Kubernetes namespaces. + * + * @return The list of recent Kubernetes namespaces as a List of Strings. + */ + List getRecentK8sNamespace(); + + /** + * Retrieves the list of recent K8s cluster IDs based on the specified execution mode. + * + * @param executionMode The execution mode to filter the recent K8s cluster IDs. 1: Production + * mode 2: Test mode 3: Development mode -1: All modes + * @return The list of recent K8s cluster IDs based on the specified execution mode. + */ + List getRecentK8sClusterId(Integer executionMode); + + /** + * Retrieves the list of recent Flink base images. + * + * @return a list of strings representing the recent Flink base images + */ + List getRecentFlinkBaseImage(); + + /** + * Retrieves the recent K8s pod templates. + * + * @return a List of Strings representing the recent K8s pod templates. + */ + List getRecentK8sPodTemplate(); + + /** + * Retrieves the list of recent Kubernetes Job Manager Pod templates. + * + * @return A List of string values representing the recent Kubernetes Job Manager Pod templates. + */ + List getRecentK8sJmPodTemplate(); + + /** + * Retrieves the list of recent K8s TM pod templates. + * + * @return The list of recent K8s TM pod templates as a List of String objects. + */ + List getRecentK8sTmPodTemplate(); + + /** + * Uploads a list of jars to the server for historical reference. + * + * @return A list of strings representing the names of the uploaded jars. + */ + List historyUploadJars(); +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java new file mode 100644 index 0000000000..3d2c6f4fa2 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.core.service.application; + +import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.console.base.domain.RestRequest; +import org.apache.streampark.console.core.entity.Application; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.service.IService; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +/** + * The ApplicationManageService interface provides methods to manage applications information. It + * extends the IService interface with the Application entity. + */ +public interface ApplicationManageService extends IService { + + /** + * Retrieves a page of applications based on the provided parameters. + * + * @param app The application object to be used for filtering the results. + * @param request The REST request object containing additional parameters or headers. + * @return A page of Application objects based on the provided parameters. + */ + IPage page(Application app, RestRequest request); + + /** + * Creates a new application. + * + * @param app The application to create. + * @return True if the application was successfully created, false otherwise. + * @throws IOException If an I/O error occurs. + */ + boolean create(Application app) throws IOException; + + /** + * Copies the given Application. + * + * @param app the Application to be copied + * @return the size of the copied Application in bytes as a Long value + * @throws IOException if there was an error during the copy process + */ + Long copy(Application app) throws IOException; + + /** + * Updates the given application. + * + * @param app the application to be updated + * @return true if the update was successful, false otherwise + */ + boolean update(Application app); + + /** + * Sets the given application to be effective. + * + * @param application the application to be set effective + */ + void toEffective(Application application); + + /** + * Checks if the given application is ready to build and update. + * + * @param app the application to check for readiness + * @return true if the application is ready to build and update, false otherwise + */ + boolean checkBuildAndUpdate(Application app); + + /** + * Deletes the given Application from the system. + * + * @param app The Application to be deleted. + * @return True if the deletion was successful, false otherwise. + */ + Boolean delete(Application app); + + /** + * Retrieves the Application with the specified details from the system. + * + * @param app The Application object containing the details of the Application to retrieve. + * @return The Application object that matches the specified details, or null if no matching + * Application is found. + */ + Application getApp(Application app); + + /** + * Updates the release of the given application. + * + * @param application The application to update the release for. + */ + void updateRelease(Application application); + + /** + * Cleans the application by performing necessary cleanup tasks. + * + * @param app The application to clean. + */ + void clean(Application app); + + /** + * Retrieves a list of applications by project ID. + * + * @param id The project ID to search for applications. + * @return A list of applications associated with the project ID. + */ + List getByProjectId(Long id); + + /** + * Changes the ownership of all applications associated with a user. + * + * @param userId The ID of the user whose applications will be changed. + * @param targetUserId The ID of the user who will become the new owner of the applications. + */ + void changeOwnership(Long userId, Long targetUserId); + + /** + * Retrieves a list of applications based on the specified team ID. + * + * @param teamId The ID of the team to retrieve the applications for. + * @return A list of Application objects associated with the given team ID. + */ + List getByTeamId(Long teamId); + + /** + * Retrieves a list of applications by team ID and execution modes. + * + * @param teamId The ID of the team to filter by + * @param executionModes The collection of execution modes to filter by + * @return A list of applications that belong to the specified team and have the specified + * execution modes + */ + List getByTeamIdAndExecutionModes( + Long teamId, Collection executionModes); +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java new file mode 100644 index 0000000000..128893d81b --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java @@ -0,0 +1,740 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.core.service.application.impl; + +import org.apache.streampark.common.conf.ConfigConst; +import org.apache.streampark.common.conf.Workspace; +import org.apache.streampark.common.enums.DevelopmentMode; +import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ResolveOrder; +import org.apache.streampark.common.enums.RestoreMode; +import org.apache.streampark.common.fs.FsOperator; +import org.apache.streampark.common.util.CompletableFutureUtils; +import org.apache.streampark.common.util.DeflaterUtils; +import org.apache.streampark.common.util.HadoopUtils; +import org.apache.streampark.common.util.PropertiesUtils; +import org.apache.streampark.common.util.ThreadUtils; +import org.apache.streampark.common.util.Utils; +import org.apache.streampark.console.base.exception.ApiAlertException; +import org.apache.streampark.console.base.exception.ApplicationException; +import org.apache.streampark.console.core.entity.AppBuildPipeline; +import org.apache.streampark.console.core.entity.Application; +import org.apache.streampark.console.core.entity.ApplicationConfig; +import org.apache.streampark.console.core.entity.ApplicationLog; +import org.apache.streampark.console.core.entity.FlinkCluster; +import org.apache.streampark.console.core.entity.FlinkEnv; +import org.apache.streampark.console.core.entity.FlinkSql; +import org.apache.streampark.console.core.entity.Resource; +import org.apache.streampark.console.core.entity.SavePoint; +import org.apache.streampark.console.core.enums.CheckPointType; +import org.apache.streampark.console.core.enums.ConfigFileType; +import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.enums.Operation; +import org.apache.streampark.console.core.enums.OptionState; +import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.mapper.ApplicationMapper; +import org.apache.streampark.console.core.service.AppBuildPipeService; +import org.apache.streampark.console.core.service.ApplicationBackUpService; +import org.apache.streampark.console.core.service.ApplicationConfigService; +import org.apache.streampark.console.core.service.ApplicationLogService; +import org.apache.streampark.console.core.service.CommonService; +import org.apache.streampark.console.core.service.FlinkClusterService; +import org.apache.streampark.console.core.service.FlinkEnvService; +import org.apache.streampark.console.core.service.FlinkSqlService; +import org.apache.streampark.console.core.service.ResourceService; +import org.apache.streampark.console.core.service.SavePointService; +import org.apache.streampark.console.core.service.SettingService; +import org.apache.streampark.console.core.service.VariableService; +import org.apache.streampark.console.core.service.application.ApplicationActionService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; +import org.apache.streampark.console.core.task.FlinkHttpWatcher; +import org.apache.streampark.flink.client.FlinkClient; +import org.apache.streampark.flink.client.bean.CancelRequest; +import org.apache.streampark.flink.client.bean.CancelResponse; +import org.apache.streampark.flink.client.bean.KubernetesSubmitParam; +import org.apache.streampark.flink.client.bean.SubmitRequest; +import org.apache.streampark.flink.client.bean.SubmitResponse; +import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; +import org.apache.streampark.flink.kubernetes.helper.KubernetesDeploymentHelper; +import org.apache.streampark.flink.kubernetes.ingress.IngressController; +import org.apache.streampark.flink.kubernetes.model.TrackId; +import org.apache.streampark.flink.packer.pipeline.BuildResult; +import org.apache.streampark.flink.packer.pipeline.ShadedBuildResponse; + +import org.apache.commons.lang3.StringUtils; +import org.apache.flink.api.common.JobID; +import org.apache.flink.configuration.CoreOptions; +import org.apache.flink.configuration.JobManagerOptions; +import org.apache.flink.configuration.MemorySize; +import org.apache.flink.configuration.RestOptions; +import org.apache.flink.runtime.jobgraph.SavepointConfigOptions; + +import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper; +import com.baomidou.mybatisplus.core.toolkit.Wrappers; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import io.fabric8.kubernetes.client.KubernetesClientException; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +import java.io.File; +import java.net.URI; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.Bridge.toTrackId; +import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.isKubernetesApp; + +@Slf4j +@Service +@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class) +public class ApplicationActionServiceImpl extends ServiceImpl + implements ApplicationActionService { + + private final ExecutorService executorService = + new ThreadPoolExecutor( + Runtime.getRuntime().availableProcessors() * 5, + Runtime.getRuntime().availableProcessors() * 10, + 60L, + TimeUnit.SECONDS, + new LinkedBlockingQueue<>(1024), + ThreadUtils.threadFactory("streampark-deploy-executor"), + new ThreadPoolExecutor.AbortPolicy()); + + @Autowired private ApplicationBackUpService backUpService; + @Autowired private ApplicationManageService applicationManageService; + + @Autowired private ApplicationConfigService configService; + + @Autowired private ApplicationLogService applicationLogService; + + @Autowired private FlinkEnvService flinkEnvService; + + @Autowired private FlinkSqlService flinkSqlService; + + @Autowired private SavePointService savePointService; + + @Autowired private SettingService settingService; + + @Autowired private CommonService commonService; + + @Autowired private FlinkK8sWatcher k8SFlinkTrackMonitor; + + @Autowired private AppBuildPipeService appBuildPipeService; + + @Autowired private FlinkClusterService flinkClusterService; + + @Autowired private VariableService variableService; + + @Autowired private ResourceService resourceService; + + private final Map> startFutureMap = + new ConcurrentHashMap<>(); + + private final Map> cancelFutureMap = + new ConcurrentHashMap<>(); + + @Override + public void revoke(Application appParma) throws ApplicationException { + Application application = getById(appParma.getId()); + ApiAlertException.throwIfNull( + application, + String.format("The application id=%s not found, revoke failed.", appParma.getId())); + + // 1) delete files that have been published to workspace + application.getFsOperator().delete(application.getAppHome()); + + // 2) rollback the files to the workspace + backUpService.revoke(application); + + // 3) restore related status + LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); + updateWrapper.eq(Application::getId, application.getId()); + if (application.isFlinkSqlJob()) { + updateWrapper.set(Application::getRelease, ReleaseState.FAILED.get()); + } else { + updateWrapper.set(Application::getRelease, ReleaseState.NEED_RELEASE.get()); + } + if (!application.isRunning()) { + updateWrapper.set(Application::getState, FlinkAppState.REVOKED.getValue()); + } + baseMapper.update(null, updateWrapper); + } + + @Override + public void restart(Application application) throws Exception { + this.cancel(application); + this.start(application, false); + } + + @Override + public void forcedStop(Application app) { + CompletableFuture startFuture = startFutureMap.remove(app.getId()); + CompletableFuture cancelFuture = cancelFutureMap.remove(app.getId()); + Application application = this.baseMapper.getApp(app); + if (isKubernetesApp(application)) { + KubernetesDeploymentHelper.watchPodTerminatedLog( + application.getK8sNamespace(), application.getJobName(), application.getJobId()); + KubernetesDeploymentHelper.deleteTaskDeployment( + application.getK8sNamespace(), application.getJobName()); + KubernetesDeploymentHelper.deleteTaskConfigMap( + application.getK8sNamespace(), application.getJobName()); + } + if (startFuture != null) { + startFuture.cancel(true); + } + if (cancelFuture != null) { + cancelFuture.cancel(true); + } + if (startFuture == null && cancelFuture == null) { + this.updateToStopped(app); + } + } + + @Override + public void cancel(Application appParam) throws Exception { + FlinkHttpWatcher.setOptionState(appParam.getId(), OptionState.CANCELLING); + Application application = getById(appParam.getId()); + application.setState(FlinkAppState.CANCELLING.getValue()); + + ApplicationLog applicationLog = new ApplicationLog(); + applicationLog.setOptionName(Operation.CANCEL.getValue()); + applicationLog.setAppId(application.getId()); + applicationLog.setJobManagerUrl(application.getJobManagerUrl()); + applicationLog.setOptionTime(new Date()); + applicationLog.setYarnAppId(application.getClusterId()); + + if (appParam.getSavePointed()) { + FlinkHttpWatcher.addSavepoint(application.getId()); + application.setOptionState(OptionState.SAVEPOINTING.getValue()); + } else { + application.setOptionState(OptionState.CANCELLING.getValue()); + } + + application.setOptionTime(new Date()); + this.baseMapper.updateById(application); + + Long userId = commonService.getUserId(); + if (!application.getUserId().equals(userId)) { + FlinkHttpWatcher.addCanceledApp(application.getId(), userId); + } + + FlinkEnv flinkEnv = flinkEnvService.getById(application.getVersionId()); + + // infer savepoint + String customSavepoint = null; + if (appParam.getSavePointed()) { + customSavepoint = appParam.getSavePoint(); + if (StringUtils.isBlank(customSavepoint)) { + customSavepoint = savePointService.getSavePointPath(appParam); + } + } + + String clusterId = null; + if (ExecutionMode.isKubernetesMode(application.getExecutionMode())) { + clusterId = application.getClusterId(); + } else if (ExecutionMode.isYarnMode(application.getExecutionMode())) { + if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) { + FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); + ApiAlertException.throwIfNull( + cluster, + String.format( + "The yarn session clusterId=%s can't found, maybe the clusterId is wrong or the cluster has been deleted. Please contact the Admin.", + application.getFlinkClusterId())); + clusterId = cluster.getClusterId(); + } else { + clusterId = application.getAppId(); + } + } + + Map properties = new HashMap<>(); + + if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) { + FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); + ApiAlertException.throwIfNull( + cluster, + String.format( + "The clusterId=%s cannot be find, maybe the clusterId is wrong or " + + "the cluster has been deleted. Please contact the Admin.", + application.getFlinkClusterId())); + URI activeAddress = cluster.getRemoteURI(); + properties.put(RestOptions.ADDRESS.key(), activeAddress.getHost()); + properties.put(RestOptions.PORT.key(), activeAddress.getPort()); + } + + CancelRequest cancelRequest = + new CancelRequest( + flinkEnv.getFlinkVersion(), + ExecutionMode.of(application.getExecutionMode()), + properties, + clusterId, + application.getJobId(), + appParam.getSavePointed(), + appParam.getDrain(), + customSavepoint, + appParam.getNativeFormat(), + application.getK8sNamespace()); + + final Date triggerTime = new Date(); + CompletableFuture cancelFuture = + CompletableFuture.supplyAsync(() -> FlinkClient.cancel(cancelRequest), executorService); + + cancelFutureMap.put(application.getId(), cancelFuture); + + CompletableFutureUtils.runTimeout( + cancelFuture, + 10L, + TimeUnit.MINUTES, + cancelResponse -> { + applicationLog.setSuccess(true); + if (cancelResponse != null && cancelResponse.savePointDir() != null) { + String savePointDir = cancelResponse.savePointDir(); + log.info("savePoint path: {}", savePointDir); + SavePoint savePoint = new SavePoint(); + savePoint.setPath(savePointDir); + savePoint.setAppId(application.getId()); + savePoint.setLatest(true); + savePoint.setType(CheckPointType.SAVEPOINT.get()); + savePoint.setCreateTime(new Date()); + savePoint.setTriggerTime(triggerTime); + savePointService.save(savePoint); + } + if (isKubernetesApp(application)) { + k8SFlinkTrackMonitor.unWatching(toTrackId(application)); + } + }, + e -> { + if (e.getCause() instanceof CancellationException) { + updateToStopped(application); + } else { + log.error("stop flink job fail.", e); + application.setOptionState(OptionState.NONE.getValue()); + application.setState(FlinkAppState.FAILED.getValue()); + updateById(application); + + if (appParam.getSavePointed()) { + savePointService.expire(application.getId()); + } + + // re-tracking flink job on kubernetes and logging exception + if (isKubernetesApp(application)) { + TrackId id = toTrackId(application); + k8SFlinkTrackMonitor.unWatching(id); + k8SFlinkTrackMonitor.doWatching(id); + } else { + FlinkHttpWatcher.unWatching(application.getId()); + } + + String exception = Utils.stringifyException(e); + applicationLog.setException(exception); + applicationLog.setSuccess(false); + } + }) + .whenComplete( + (t, e) -> { + cancelFutureMap.remove(application.getId()); + applicationLogService.save(applicationLog); + }); + } + + /** + * Setup task is starting (for webUI "state" display) + * + * @param application + */ + @Override + public void starting(Application application) { + application.setState(FlinkAppState.STARTING.getValue()); + application.setOptionTime(new Date()); + updateById(application); + } + + @Override + @Transactional(rollbackFor = {Exception.class}) + public void start(Application appParam, boolean auto) throws Exception { + final Application application = getById(appParam.getId()); + Utils.notNull(application); + if (!application.isCanBeStart()) { + throw new ApiAlertException("[StreamPark] The application cannot be started repeatedly."); + } + + FlinkEnv flinkEnv = flinkEnvService.getByIdOrDefault(application.getVersionId()); + if (flinkEnv == null) { + throw new ApiAlertException("[StreamPark] can no found flink version"); + } + + // if manually started, clear the restart flag + if (!auto) { + application.setRestartCount(0); + } else { + if (!application.isNeedRestartOnFailed()) { + return; + } + appParam.setSavePointed(true); + application.setRestartCount(application.getRestartCount() + 1); + } + + starting(application); + application.setAllowNonRestored(appParam.getAllowNonRestored()); + + String appConf; + String flinkUserJar = null; + String jobId = new JobID().toHexString(); + ApplicationLog applicationLog = new ApplicationLog(); + applicationLog.setOptionName(Operation.START.getValue()); + applicationLog.setAppId(application.getId()); + applicationLog.setOptionTime(new Date()); + + // set the latest to Effective, (it will only become the current effective at this time) + applicationManageService.toEffective(application); + + ApplicationConfig applicationConfig = configService.getEffective(application.getId()); + ExecutionMode executionMode = ExecutionMode.of(application.getExecutionMode()); + ApiAlertException.throwIfNull( + executionMode, "ExecutionMode can't be null, start application failed."); + if (application.isCustomCodeJob()) { + if (application.isUploadJob()) { + appConf = + String.format( + "json://{\"%s\":\"%s\"}", + ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(), application.getMainClass()); + } else { + switch (application.getApplicationType()) { + case STREAMPARK_FLINK: + ConfigFileType fileType = ConfigFileType.of(applicationConfig.getFormat()); + if (fileType != null && !fileType.equals(ConfigFileType.UNKNOWN)) { + appConf = + String.format("%s://%s", fileType.getTypeName(), applicationConfig.getContent()); + } else { + throw new IllegalArgumentException( + "application' config type error,must be ( yaml| properties| hocon )"); + } + break; + case APACHE_FLINK: + appConf = + String.format( + "json://{\"%s\":\"%s\"}", + ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(), application.getMainClass()); + break; + default: + throw new IllegalArgumentException( + "[StreamPark] ApplicationType must be (StreamPark flink | Apache flink)... "); + } + } + + if (ExecutionMode.YARN_APPLICATION.equals(executionMode)) { + switch (application.getApplicationType()) { + case STREAMPARK_FLINK: + flinkUserJar = + String.format( + "%s/%s", application.getAppLib(), application.getModule().concat(".jar")); + break; + case APACHE_FLINK: + flinkUserJar = String.format("%s/%s", application.getAppHome(), application.getJar()); + if (!FsOperator.hdfs().exists(flinkUserJar)) { + Resource resource = + resourceService.findByResourceName(application.getTeamId(), application.getJar()); + if (resource != null && StringUtils.isNotBlank(resource.getFilePath())) { + flinkUserJar = + String.format( + "%s/%s", + application.getAppHome(), new File(resource.getFilePath()).getName()); + } + } + break; + default: + throw new IllegalArgumentException( + "[StreamPark] ApplicationType must be (StreamPark flink | Apache flink)... "); + } + } + } else if (application.isFlinkSqlJob()) { + FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false); + Utils.notNull(flinkSql); + // 1) dist_userJar + String sqlDistJar = commonService.getSqlClientJar(flinkEnv); + // 2) appConfig + appConf = + applicationConfig == null + ? null + : String.format("yaml://%s", applicationConfig.getContent()); + // 3) client + if (ExecutionMode.YARN_APPLICATION.equals(executionMode)) { + String clientPath = Workspace.remote().APP_CLIENT(); + flinkUserJar = String.format("%s/%s", clientPath, sqlDistJar); + } + } else { + throw new UnsupportedOperationException("Unsupported..."); + } + + Map extraParameter = new HashMap<>(0); + if (application.isFlinkSqlJob()) { + FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), true); + // Get the sql of the replaced placeholder + String realSql = variableService.replaceVariable(application.getTeamId(), flinkSql.getSql()); + flinkSql.setSql(DeflaterUtils.zipString(realSql)); + extraParameter.put(ConfigConst.KEY_FLINK_SQL(null), flinkSql.getSql()); + } + + KubernetesSubmitParam kubernetesSubmitParam = + new KubernetesSubmitParam( + application.getClusterId(), + application.getK8sNamespace(), + application.getK8sRestExposedTypeEnum()); + + AppBuildPipeline buildPipeline = appBuildPipeService.getById(application.getId()); + + Utils.notNull(buildPipeline); + + BuildResult buildResult = buildPipeline.getBuildResult(); + if (ExecutionMode.YARN_APPLICATION.equals(executionMode)) { + buildResult = new ShadedBuildResponse(null, flinkUserJar, true); + } + + // Get the args after placeholder replacement + String applicationArgs = + variableService.replaceVariable(application.getTeamId(), application.getArgs()); + + String pyflinkFilePath = ""; + Resource resource = + resourceService.findByResourceName(application.getTeamId(), application.getJar()); + if (resource != null + && StringUtils.isNotBlank(resource.getFilePath()) + && resource.getFilePath().endsWith(ConfigConst.PYTHON_SUFFIX())) { + pyflinkFilePath = resource.getFilePath(); + } + + SubmitRequest submitRequest = + new SubmitRequest( + flinkEnv.getFlinkVersion(), + ExecutionMode.of(application.getExecutionMode()), + getProperties(application), + flinkEnv.getFlinkConf(), + DevelopmentMode.of(application.getJobType()), + application.getId(), + jobId, + application.getJobName(), + appConf, + application.getApplicationType(), + getSavePointed(appParam), + appParam.getRestoreMode() == null ? null : RestoreMode.of(appParam.getRestoreMode()), + applicationArgs, + pyflinkFilePath, + buildResult, + kubernetesSubmitParam, + extraParameter); + + CompletableFuture future = + CompletableFuture.supplyAsync(() -> FlinkClient.submit(submitRequest), executorService); + + startFutureMap.put(application.getId(), future); + + CompletableFutureUtils.runTimeout( + future, + 2L, + TimeUnit.MINUTES, + submitResponse -> { + if (submitResponse.flinkConfig() != null) { + String jmMemory = + submitResponse.flinkConfig().get(ConfigConst.KEY_FLINK_JM_PROCESS_MEMORY()); + if (jmMemory != null) { + application.setJmMemory(MemorySize.parse(jmMemory).getMebiBytes()); + } + String tmMemory = + submitResponse.flinkConfig().get(ConfigConst.KEY_FLINK_TM_PROCESS_MEMORY()); + if (tmMemory != null) { + application.setTmMemory(MemorySize.parse(tmMemory).getMebiBytes()); + } + } + application.setAppId(submitResponse.clusterId()); + if (StringUtils.isNoneEmpty(submitResponse.jobId())) { + application.setJobId(submitResponse.jobId()); + } + + if (StringUtils.isNoneEmpty(submitResponse.jobManagerUrl())) { + application.setJobManagerUrl(submitResponse.jobManagerUrl()); + applicationLog.setJobManagerUrl(submitResponse.jobManagerUrl()); + } + applicationLog.setYarnAppId(submitResponse.clusterId()); + application.setStartTime(new Date()); + application.setEndTime(null); + if (isKubernetesApp(application)) { + application.setRelease(ReleaseState.DONE.get()); + } + updateById(application); + + // if start completed, will be added task to tracking queue + if (isKubernetesApp(application)) { + k8SFlinkTrackMonitor.doWatching(toTrackId(application)); + } else { + FlinkHttpWatcher.setOptionState(appParam.getId(), OptionState.STARTING); + FlinkHttpWatcher.doWatching(application); + } + + applicationLog.setSuccess(true); + // set savepoint to expire + savePointService.expire(application.getId()); + }, + e -> { + if (e.getCause() instanceof CancellationException) { + updateToStopped(application); + } else { + String exception = Utils.stringifyException(e); + applicationLog.setException(exception); + applicationLog.setSuccess(false); + Application app = getById(appParam.getId()); + app.setState(FlinkAppState.FAILED.getValue()); + app.setOptionState(OptionState.NONE.getValue()); + updateById(app); + if (isKubernetesApp(app)) { + k8SFlinkTrackMonitor.unWatching(toTrackId(app)); + } else { + FlinkHttpWatcher.unWatching(appParam.getId()); + } + } + }) + .whenComplete( + (t, e) -> { + if (ExecutionMode.isKubernetesApplicationMode(application.getExecutionMode())) { + String domainName = settingService.getIngressModeDefault(); + if (StringUtils.isNotBlank(domainName)) { + try { + IngressController.configureIngress( + domainName, application.getClusterId(), application.getK8sNamespace()); + } catch (KubernetesClientException kubernetesClientException) { + log.info( + "Failed to create ingress, stack info:{}", + kubernetesClientException.getMessage()); + applicationLog.setException(e.getMessage()); + applicationLog.setSuccess(false); + applicationLogService.save(applicationLog); + application.setState(FlinkAppState.FAILED.getValue()); + application.setOptionState(OptionState.NONE.getValue()); + updateById(application); + return; + } + } + } + + applicationLogService.save(applicationLog); + startFutureMap.remove(application.getId()); + }); + } + + private Map getProperties(Application application) { + Map properties = new HashMap<>(application.getOptionMap()); + if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) { + FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); + ApiAlertException.throwIfNull( + cluster, + String.format( + "The clusterId=%s can't be find, maybe the clusterId is wrong or " + + "the cluster has been deleted. Please contact the Admin.", + application.getFlinkClusterId())); + URI activeAddress = cluster.getRemoteURI(); + properties.put(RestOptions.ADDRESS.key(), activeAddress.getHost()); + properties.put(RestOptions.PORT.key(), activeAddress.getPort()); + } else if (ExecutionMode.isYarnMode(application.getExecutionModeEnum())) { + if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) { + FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); + ApiAlertException.throwIfNull( + cluster, + String.format( + "The yarn session clusterId=%s cannot be find, maybe the clusterId is wrong or " + + "the cluster has been deleted. Please contact the Admin.", + application.getFlinkClusterId())); + properties.put(ConfigConst.KEY_YARN_APP_ID(), cluster.getClusterId()); + } else { + String yarnQueue = + (String) application.getHotParamsMap().get(ConfigConst.KEY_YARN_APP_QUEUE()); + String yarnLabelExpr = + (String) application.getHotParamsMap().get(ConfigConst.KEY_YARN_APP_NODE_LABEL()); + Optional.ofNullable(yarnQueue) + .ifPresent(yq -> properties.put(ConfigConst.KEY_YARN_APP_QUEUE(), yq)); + Optional.ofNullable(yarnLabelExpr) + .ifPresent(yLabel -> properties.put(ConfigConst.KEY_YARN_APP_NODE_LABEL(), yLabel)); + } + } else if (ExecutionMode.isKubernetesMode(application.getExecutionModeEnum())) { + properties.put(ConfigConst.KEY_K8S_IMAGE_PULL_POLICY(), "Always"); + } + + if (ExecutionMode.isKubernetesApplicationMode(application.getExecutionMode())) { + try { + HadoopUtils.yarnClient(); + properties.put(JobManagerOptions.ARCHIVE_DIR.key(), Workspace.ARCHIVES_FILE_PATH()); + } catch (Exception e) { + // skip + } + } + + if (application.getAllowNonRestored()) { + properties.put(SavepointConfigOptions.SAVEPOINT_IGNORE_UNCLAIMED_STATE.key(), true); + } + + Map dynamicProperties = + PropertiesUtils.extractDynamicPropertiesAsJava(application.getDynamicProperties()); + properties.putAll(dynamicProperties); + ResolveOrder resolveOrder = ResolveOrder.of(application.getResolveOrder()); + if (resolveOrder != null) { + properties.put(CoreOptions.CLASSLOADER_RESOLVE_ORDER.key(), resolveOrder.getName()); + } + + return properties; + } + + private void updateToStopped(Application app) { + Application application = getById(app); + application.setOptionState(OptionState.NONE.getValue()); + application.setState(FlinkAppState.CANCELED.getValue()); + application.setOptionTime(new Date()); + updateById(application); + savePointService.expire(application.getId()); + // re-tracking flink job on kubernetes and logging exception + if (isKubernetesApp(application)) { + TrackId id = toTrackId(application); + k8SFlinkTrackMonitor.unWatching(id); + k8SFlinkTrackMonitor.doWatching(id); + } else { + FlinkHttpWatcher.unWatching(application.getId()); + } + } + + private String getSavePointed(Application appParam) { + if (appParam.getSavePointed()) { + if (appParam.getSavePoint() == null) { + SavePoint savePoint = savePointService.getLatest(appParam.getId()); + if (savePoint != null) { + return savePoint.getPath(); + } + } else { + return appParam.getSavePoint(); + } + } + return null; + } +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java new file mode 100644 index 0000000000..8ded940f7f --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java @@ -0,0 +1,508 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.core.service.application.impl; + +import org.apache.streampark.common.conf.Workspace; +import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.fs.LfsOperator; +import org.apache.streampark.common.util.Utils; +import org.apache.streampark.common.util.YarnUtils; +import org.apache.streampark.console.base.exception.ApiAlertException; +import org.apache.streampark.console.base.exception.ApiDetailException; +import org.apache.streampark.console.base.exception.ApplicationException; +import org.apache.streampark.console.base.util.WebUtils; +import org.apache.streampark.console.core.entity.Application; +import org.apache.streampark.console.core.entity.FlinkCluster; +import org.apache.streampark.console.core.entity.FlinkEnv; +import org.apache.streampark.console.core.entity.Project; +import org.apache.streampark.console.core.enums.AppExistsState; +import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.mapper.ApplicationMapper; +import org.apache.streampark.console.core.metrics.flink.JobsOverview; +import org.apache.streampark.console.core.runner.EnvInitializer; +import org.apache.streampark.console.core.service.FlinkClusterService; +import org.apache.streampark.console.core.service.FlinkEnvService; +import org.apache.streampark.console.core.service.SavePointService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; +import org.apache.streampark.console.core.task.FlinkClusterWatcher; +import org.apache.streampark.console.core.task.FlinkHttpWatcher; +import org.apache.streampark.flink.core.conf.ParameterCli; +import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; +import org.apache.streampark.flink.kubernetes.helper.KubernetesDeploymentHelper; +import org.apache.streampark.flink.kubernetes.model.FlinkMetricCV; + +import org.apache.commons.lang3.StringUtils; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.net.URI; +import java.util.Arrays; +import java.util.Base64; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.jar.Manifest; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static org.apache.streampark.common.enums.StorageType.LFS; +import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.Bridge.toTrackId; +import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.isKubernetesApp; + +@Slf4j +@Service +public class ApplicationInfoServiceImpl extends ServiceImpl + implements ApplicationInfoService { + + private static final int DEFAULT_HISTORY_RECORD_LIMIT = 25; + + private static final int DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT = 5; + + private static final Pattern JOB_NAME_PATTERN = + Pattern.compile("^[.\\x{4e00}-\\x{9fa5}A-Za-z\\d_\\-\\s]+$"); + + private static final Pattern SINGLE_SPACE_PATTERN = Pattern.compile("^\\S+(\\s\\S+)*$"); + + @Autowired private FlinkEnvService flinkEnvService; + + @Autowired private SavePointService savePointService; + + @Autowired private EnvInitializer envInitializer; + + @Autowired private FlinkK8sWatcher k8SFlinkTrackMonitor; + + @Autowired private FlinkClusterService flinkClusterService; + + @Autowired private FlinkClusterWatcher flinkClusterWatcher; + + @Override + public Map dashboard(Long teamId) { + JobsOverview.Task overview = new JobsOverview.Task(); + Integer totalJmMemory = 0; + Integer totalTmMemory = 0; + Integer totalTm = 0; + Integer totalSlot = 0; + Integer availableSlot = 0; + Integer runningJob = 0; + + // stat metrics from other than kubernetes mode + for (Application app : FlinkHttpWatcher.getWatchingApps()) { + if (!teamId.equals(app.getTeamId())) { + continue; + } + if (app.getJmMemory() != null) { + totalJmMemory += app.getJmMemory(); + } + if (app.getTmMemory() != null) { + totalTmMemory += app.getTmMemory() * (app.getTotalTM() == null ? 1 : app.getTotalTM()); + } + if (app.getTotalTM() != null) { + totalTm += app.getTotalTM(); + } + if (app.getTotalSlot() != null) { + totalSlot += app.getTotalSlot(); + } + if (app.getAvailableSlot() != null) { + availableSlot += app.getAvailableSlot(); + } + if (app.getState() == FlinkAppState.RUNNING.getValue()) { + runningJob++; + } + JobsOverview.Task task = app.getOverview(); + if (task != null) { + overview.setTotal(overview.getTotal() + task.getTotal()); + overview.setCreated(overview.getCreated() + task.getCreated()); + overview.setScheduled(overview.getScheduled() + task.getScheduled()); + overview.setDeploying(overview.getDeploying() + task.getDeploying()); + overview.setRunning(overview.getRunning() + task.getRunning()); + overview.setFinished(overview.getFinished() + task.getFinished()); + overview.setCanceling(overview.getCanceling() + task.getCanceling()); + overview.setCanceled(overview.getCanceled() + task.getCanceled()); + overview.setFailed(overview.getFailed() + task.getFailed()); + overview.setReconciling(overview.getReconciling() + task.getReconciling()); + } + } + + // merge metrics from flink kubernetes cluster + FlinkMetricCV k8sMetric = k8SFlinkTrackMonitor.getAccGroupMetrics(teamId.toString()); + if (k8sMetric != null) { + totalJmMemory += k8sMetric.totalJmMemory(); + totalTmMemory += k8sMetric.totalTmMemory(); + totalTm += k8sMetric.totalTm(); + totalSlot += k8sMetric.totalSlot(); + availableSlot += k8sMetric.availableSlot(); + runningJob += k8sMetric.runningJob(); + overview.setTotal(overview.getTotal() + k8sMetric.totalJob()); + overview.setRunning(overview.getRunning() + k8sMetric.runningJob()); + overview.setFinished(overview.getFinished() + k8sMetric.finishedJob()); + overview.setCanceled(overview.getCanceled() + k8sMetric.cancelledJob()); + overview.setFailed(overview.getFailed() + k8sMetric.failedJob()); + } + + // result json + Map map = new HashMap<>(8); + map.put("task", overview); + map.put("jmMemory", totalJmMemory); + map.put("tmMemory", totalTmMemory); + map.put("totalTM", totalTm); + map.put("availableSlot", availableSlot); + map.put("totalSlot", totalSlot); + map.put("runningJob", runningJob); + + return map; + } + + @Override + public boolean checkEnv(Application appParam) throws ApplicationException { + Application application = getById(appParam.getId()); + try { + FlinkEnv flinkEnv; + if (application.getVersionId() != null) { + flinkEnv = flinkEnvService.getByIdOrDefault(application.getVersionId()); + } else { + flinkEnv = flinkEnvService.getDefault(); + } + if (flinkEnv == null) { + return false; + } + envInitializer.checkFlinkEnv(application.getStorageType(), flinkEnv); + envInitializer.storageInitialize(application.getStorageType()); + + if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum()) + || ExecutionMode.REMOTE.equals(application.getExecutionModeEnum())) { + FlinkCluster flinkCluster = flinkClusterService.getById(application.getFlinkClusterId()); + boolean conned = flinkClusterWatcher.verifyClusterConnection(flinkCluster); + if (!conned) { + throw new ApiAlertException("the target cluster is unavailable, please check!"); + } + } + return true; + } catch (Exception e) { + log.error(Utils.stringifyException(e)); + throw new ApiDetailException(e); + } + } + + @Override + public boolean checkAlter(Application application) { + Long appId = application.getId(); + FlinkAppState state = FlinkAppState.of(application.getState()); + if (!FlinkAppState.CANCELED.equals(state)) { + return false; + } + long cancelUserId = FlinkHttpWatcher.getCanceledJobUserId(appId); + long appUserId = application.getUserId(); + return cancelUserId != -1 && cancelUserId != appUserId; + } + + @Override + public boolean existsByTeamId(Long teamId) { + return baseMapper.exists( + new LambdaQueryWrapper().eq(Application::getTeamId, teamId)); + } + + @Override + public boolean existsByUserId(Long userId) { + return baseMapper.exists( + new LambdaQueryWrapper().eq(Application::getUserId, userId)); + } + + @Override + public boolean existsRunningByClusterId(Long clusterId) { + return baseMapper.existsRunningJobByClusterId(clusterId) + || FlinkHttpWatcher.getWatchingApps().stream() + .anyMatch( + application -> + clusterId.equals(application.getFlinkClusterId()) + && FlinkAppState.RUNNING.equals(application.getFlinkAppStateEnum())); + } + + @Override + public boolean existsByClusterId(Long clusterId) { + return baseMapper.exists( + new LambdaQueryWrapper().eq(Application::getFlinkClusterId, clusterId)); + } + + @Override + public Integer countByClusterId(Long clusterId) { + return baseMapper + .selectCount( + new LambdaQueryWrapper().eq(Application::getFlinkClusterId, clusterId)) + .intValue(); + } + + @Override + public Integer countAffectedByClusterId(Long clusterId, String dbType) { + return baseMapper.countAffectedByClusterId(clusterId, dbType); + } + + @Override + public boolean existsByFlinkEnvId(Long flinkEnvId) { + return baseMapper.exists( + new LambdaQueryWrapper().eq(Application::getVersionId, flinkEnvId)); + } + + @Override + public List getRecentK8sNamespace() { + return baseMapper.getRecentK8sNamespace(DEFAULT_HISTORY_RECORD_LIMIT); + } + + @Override + public List getRecentK8sClusterId(Integer executionMode) { + return baseMapper.getRecentK8sClusterId(executionMode, DEFAULT_HISTORY_RECORD_LIMIT); + } + + @Override + public List getRecentFlinkBaseImage() { + return baseMapper.getRecentFlinkBaseImage(DEFAULT_HISTORY_RECORD_LIMIT); + } + + @Override + public List getRecentK8sPodTemplate() { + return baseMapper.getRecentK8sPodTemplate(DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT); + } + + @Override + public List getRecentK8sJmPodTemplate() { + return baseMapper.getRecentK8sJmPodTemplate(DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT); + } + + @Override + public List getRecentK8sTmPodTemplate() { + return baseMapper.getRecentK8sTmPodTemplate(DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT); + } + + @Override + public List historyUploadJars() { + return Arrays.stream(LfsOperator.listDir(Workspace.of(LFS).APP_UPLOADS())) + .filter(File::isFile) + .sorted(Comparator.comparingLong(File::lastModified).reversed()) + .map(File::getName) + .filter(fn -> fn.endsWith(".jar")) + .limit(DEFAULT_HISTORY_RECORD_LIMIT) + .collect(Collectors.toList()); + } + + @Override + public String k8sStartLog(Long id, Integer offset, Integer limit) throws Exception { + Application application = getById(id); + ApiAlertException.throwIfNull( + application, String.format("The application id=%s can't be found.", id)); + if (ExecutionMode.isKubernetesMode(application.getExecutionModeEnum())) { + CompletableFuture future = + CompletableFuture.supplyAsync( + () -> + KubernetesDeploymentHelper.watchDeploymentLog( + application.getK8sNamespace(), + application.getJobName(), + application.getJobId())); + + return future + .exceptionally( + e -> { + String errorLog = + String.format( + "%s/%s_err.log", + WebUtils.getAppTempDir().getAbsolutePath(), application.getJobId()); + File file = new File(errorLog); + if (file.exists() && file.isFile()) { + return file.getAbsolutePath(); + } + return null; + }) + .thenApply( + path -> { + if (!future.isDone()) { + future.cancel(true); + } + if (org.apache.streampark.common.util.FileUtils.exists(path)) { + return org.apache.streampark.common.util.FileUtils.tailOf(path, offset, limit); + } + return null; + }) + .toCompletableFuture() + .get(5, TimeUnit.SECONDS); + } else { + throw new ApiAlertException( + "Job executionMode must be kubernetes-session|kubernetes-application."); + } + } + + @Override + public String getYarnName(Application appParam) { + String[] args = new String[2]; + args[0] = "--name"; + args[1] = appParam.getConfig(); + return ParameterCli.read(args); + } + + /** + * Check if the current jobName and other key identifiers already exist in the database and + * yarn/k8s. + * + * @param appParam The application to check for existence. + * @return The state of the application's existence. + */ + @Override + public AppExistsState checkExists(Application appParam) { + + if (!checkJobName(appParam.getJobName())) { + return AppExistsState.INVALID; + } + + boolean existsByJobName = this.existsByJobName(appParam.getJobName()); + + if (appParam.getId() != null) { + Application app = getById(appParam.getId()); + if (app.getJobName().equals(appParam.getJobName())) { + return AppExistsState.NO; + } + + if (existsByJobName) { + return AppExistsState.IN_DB; + } + + // has stopped status + if (FlinkAppState.isEndState(app.getState())) { + // check whether jobName exists on yarn + if (ExecutionMode.isYarnMode(appParam.getExecutionMode()) + && YarnUtils.isContains(appParam.getJobName())) { + return AppExistsState.IN_YARN; + } + // check whether clusterId, namespace, jobId on kubernetes + else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode()) + && k8SFlinkTrackMonitor.checkIsInRemoteCluster(toTrackId(appParam))) { + return AppExistsState.IN_KUBERNETES; + } + } + } else { + if (existsByJobName) { + return AppExistsState.IN_DB; + } + + // check whether jobName exists on yarn + if (ExecutionMode.isYarnMode(appParam.getExecutionMode()) + && YarnUtils.isContains(appParam.getJobName())) { + return AppExistsState.IN_YARN; + } + // check whether clusterId, namespace, jobId on kubernetes + else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode()) + && k8SFlinkTrackMonitor.checkIsInRemoteCluster(toTrackId(appParam))) { + return AppExistsState.IN_KUBERNETES; + } + } + return AppExistsState.NO; + } + + private boolean existsByJobName(String jobName) { + return baseMapper.exists( + new LambdaQueryWrapper().eq(Application::getJobName, jobName)); + } + + @Override + public String readConf(Application appParam) throws IOException { + File file = new File(appParam.getConfig()); + String conf = org.apache.streampark.common.util.FileUtils.readFile(file); + return Base64.getEncoder().encodeToString(conf.getBytes()); + } + + @Override + public String getMain(Application application) { + File jarFile; + if (application.getProjectId() == null) { + jarFile = new File(application.getJar()); + } else { + Project project = new Project(); + project.setId(application.getProjectId()); + String modulePath = + project.getDistHome().getAbsolutePath().concat("/").concat(application.getModule()); + jarFile = new File(modulePath, application.getJar()); + } + Manifest manifest = Utils.getJarManifest(jarFile); + return manifest.getMainAttributes().getValue("Main-Class"); + } + + @Override + public boolean mapping(Application appParam) { + boolean mapping = this.baseMapper.mapping(appParam); + Application application = getById(appParam.getId()); + if (isKubernetesApp(application)) { + k8SFlinkTrackMonitor.doWatching(toTrackId(application)); + } else { + FlinkHttpWatcher.doWatching(application); + } + return mapping; + } + + @Override + public String checkSavepointPath(Application appParam) throws Exception { + String savepointPath = appParam.getSavePoint(); + if (StringUtils.isBlank(savepointPath)) { + savepointPath = savePointService.getSavePointPath(appParam); + } + + if (StringUtils.isNotBlank(savepointPath)) { + final URI uri = URI.create(savepointPath); + final String scheme = uri.getScheme(); + final String pathPart = uri.getPath(); + String error = null; + if (scheme == null) { + error = + "This state.savepoints.dir value " + + savepointPath + + " scheme (hdfs://, file://, etc) of is null. Please specify the file system scheme explicitly in the URI."; + } else if (pathPart == null) { + error = + "This state.savepoints.dir value " + + savepointPath + + " path part to store the checkpoint data in is null. Please specify a directory path for the checkpoint data."; + } else if (pathPart.isEmpty() || "/".equals(pathPart)) { + error = + "This state.savepoints.dir value " + + savepointPath + + " Cannot use the root directory for checkpoints."; + } + return error; + } else { + return "When custom savepoint is not set, state.savepoints.dir needs to be set in properties or flink-conf.yaml of application"; + } + } + + @Override + public void persistMetrics(Application appParam) { + this.baseMapper.persistMetrics(appParam); + } + + private Boolean checkJobName(String jobName) { + if (!StringUtils.isEmpty(jobName.trim())) { + return JOB_NAME_PATTERN.matcher(jobName).matches() + && SINGLE_SPACE_PATTERN.matcher(jobName).matches(); + } + return false; + } +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java new file mode 100644 index 0000000000..222b8967e9 --- /dev/null +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java @@ -0,0 +1,753 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.streampark.console.core.service.application.impl; + +import org.apache.streampark.common.conf.Workspace; +import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.StorageType; +import org.apache.streampark.common.fs.HdfsOperator; +import org.apache.streampark.common.util.DeflaterUtils; +import org.apache.streampark.console.base.domain.RestRequest; +import org.apache.streampark.console.base.exception.ApiAlertException; +import org.apache.streampark.console.base.mybatis.pager.MybatisPager; +import org.apache.streampark.console.base.util.CommonUtils; +import org.apache.streampark.console.base.util.ObjectUtils; +import org.apache.streampark.console.base.util.WebUtils; +import org.apache.streampark.console.core.entity.Application; +import org.apache.streampark.console.core.entity.ApplicationConfig; +import org.apache.streampark.console.core.entity.FlinkSql; +import org.apache.streampark.console.core.entity.Resource; +import org.apache.streampark.console.core.enums.CandidateType; +import org.apache.streampark.console.core.enums.ChangeTypeEnum; +import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.enums.OptionState; +import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.mapper.ApplicationMapper; +import org.apache.streampark.console.core.service.AppBuildPipeService; +import org.apache.streampark.console.core.service.ApplicationBackUpService; +import org.apache.streampark.console.core.service.ApplicationConfigService; +import org.apache.streampark.console.core.service.ApplicationLogService; +import org.apache.streampark.console.core.service.CommonService; +import org.apache.streampark.console.core.service.EffectiveService; +import org.apache.streampark.console.core.service.FlinkSqlService; +import org.apache.streampark.console.core.service.ProjectService; +import org.apache.streampark.console.core.service.ResourceService; +import org.apache.streampark.console.core.service.SavePointService; +import org.apache.streampark.console.core.service.SettingService; +import org.apache.streampark.console.core.service.YarnQueueService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; +import org.apache.streampark.console.core.task.FlinkHttpWatcher; +import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; + +import org.apache.commons.lang3.StringUtils; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.core.toolkit.Wrappers; +import com.baomidou.mybatisplus.core.toolkit.support.SFunction; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.google.common.annotations.VisibleForTesting; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Date; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.Bridge.toTrackId; +import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.isKubernetesApp; + +@Slf4j +@Service +@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class) +public class ApplicationManageServiceImpl extends ServiceImpl + implements ApplicationManageService { + + private static final String ERROR_APP_QUEUE_HINT = + "Queue label '%s' isn't available for teamId '%d', please add it into the team first."; + + @Autowired private ProjectService projectService; + + @Autowired private ApplicationBackUpService backUpService; + + @Autowired private ApplicationConfigService configService; + + @Autowired private ApplicationLogService applicationLogService; + + @Autowired private FlinkSqlService flinkSqlService; + + @Autowired private SavePointService savePointService; + + @Autowired private EffectiveService effectiveService; + + @Autowired private SettingService settingService; + + @Autowired private CommonService commonService; + + @Autowired private FlinkK8sWatcher k8SFlinkTrackMonitor; + + @Autowired private AppBuildPipeService appBuildPipeService; + + @Autowired private YarnQueueService yarnQueueService; + + @Autowired private ResourceService resourceService; + + @PostConstruct + public void resetOptionState() { + this.baseMapper.resetOptionState(); + } + + @Override + public void toEffective(Application application) { + // set latest to Effective + ApplicationConfig config = configService.getLatest(application.getId()); + if (config != null) { + this.configService.toEffective(application.getId(), config.getId()); + } + if (application.isFlinkSqlJob()) { + FlinkSql flinkSql = flinkSqlService.getCandidate(application.getId(), null); + if (flinkSql != null) { + flinkSqlService.toEffective(application.getId(), flinkSql.getId()); + // clean candidate + flinkSqlService.cleanCandidate(flinkSql.getId()); + } + } + } + + @Override + @Transactional(rollbackFor = {Exception.class}) + public Boolean delete(Application paramApp) { + + Application application = getById(paramApp.getId()); + + // 1) remove flink sql + flinkSqlService.removeApp(application.getId()); + + // 2) remove log + applicationLogService.removeApp(application.getId()); + + // 3) remove config + configService.removeApp(application.getId()); + + // 4) remove effective + effectiveService.removeApp(application.getId()); + + // remove related hdfs + // 5) remove backup + backUpService.removeApp(application); + + // 6) remove savepoint + savePointService.removeApp(application); + + // 7) remove BuildPipeline + appBuildPipeService.removeApp(application.getId()); + + // 8) remove app + removeApp(application); + + if (isKubernetesApp(application)) { + k8SFlinkTrackMonitor.unWatching(toTrackId(application)); + } else { + FlinkHttpWatcher.unWatching(paramApp.getId()); + } + return true; + } + + private void removeApp(Application application) { + Long appId = application.getId(); + removeById(appId); + try { + application + .getFsOperator() + .delete(application.getWorkspace().APP_WORKSPACE().concat("/").concat(appId.toString())); + // try to delete yarn-application, and leave no trouble. + String path = + Workspace.of(StorageType.HDFS).APP_WORKSPACE().concat("/").concat(appId.toString()); + if (HdfsOperator.exists(path)) { + HdfsOperator.delete(path); + } + } catch (Exception e) { + // skip + } + } + + @Override + public IPage page(Application appParam, RestRequest request) { + if (appParam.getTeamId() == null) { + return null; + } + Page page = new MybatisPager().getDefaultPage(request); + if (CommonUtils.notEmpty(appParam.getStateArray())) { + if (Arrays.stream(appParam.getStateArray()) + .anyMatch(x -> x == FlinkAppState.FINISHED.getValue())) { + Integer[] newArray = + CommonUtils.arrayInsertIndex( + appParam.getStateArray(), + appParam.getStateArray().length, + FlinkAppState.POS_TERMINATED.getValue()); + appParam.setStateArray(newArray); + } + } + this.baseMapper.page(page, appParam); + List records = page.getRecords(); + long now = System.currentTimeMillis(); + List newRecords = + records.stream() + .peek( + record -> { + // status of flink job on kubernetes mode had been automatically persisted to db + // in time. + if (isKubernetesApp(record)) { + // set duration + String restUrl = k8SFlinkTrackMonitor.getRemoteRestUrl(toTrackId(record)); + record.setFlinkRestUrl(restUrl); + if (record.getTracking() == 1 + && record.getStartTime() != null + && record.getStartTime().getTime() > 0) { + record.setDuration(now - record.getStartTime().getTime()); + } + } + }) + .collect(Collectors.toList()); + page.setRecords(newRecords); + return page; + } + + @Override + public void changeOwnership(Long userId, Long targetUserId) { + LambdaUpdateWrapper updateWrapper = + new LambdaUpdateWrapper() + .eq(Application::getUserId, userId) + .set(Application::getUserId, targetUserId); + this.baseMapper.update(null, updateWrapper); + } + + @SneakyThrows + @Override + @Transactional(rollbackFor = {Exception.class}) + public boolean create(Application appParam) { + ApiAlertException.throwIfNull( + appParam.getTeamId(), "The teamId can't be null. Create application failed."); + appParam.setUserId(commonService.getUserId()); + appParam.setState(FlinkAppState.ADDED.getValue()); + appParam.setRelease(ReleaseState.NEED_RELEASE.get()); + appParam.setOptionState(OptionState.NONE.getValue()); + appParam.setCreateTime(new Date()); + appParam.setDefaultModeIngress(settingService.getIngressModeDefault()); + + boolean success = validateQueueIfNeeded(appParam); + ApiAlertException.throwIfFalse( + success, + String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId())); + + appParam.doSetHotParams(); + if (appParam.isUploadJob()) { + String jarPath = + String.format( + "%s/%d/%s", Workspace.local().APP_UPLOADS(), appParam.getTeamId(), appParam.getJar()); + if (!new File(jarPath).exists()) { + Resource resource = + resourceService.findByResourceName(appParam.getTeamId(), appParam.getJar()); + if (resource != null && StringUtils.isNotBlank(resource.getFilePath())) { + jarPath = resource.getFilePath(); + } + } + appParam.setJarCheckSum(org.apache.commons.io.FileUtils.checksumCRC32(new File(jarPath))); + } + + if (save(appParam)) { + if (appParam.isFlinkSqlJob()) { + FlinkSql flinkSql = new FlinkSql(appParam); + flinkSqlService.create(flinkSql); + } + if (appParam.getConfig() != null) { + configService.create(appParam, true); + } + return true; + } else { + throw new ApiAlertException("create application failed"); + } + } + + private boolean existsByJobName(String jobName) { + return baseMapper.exists( + new LambdaQueryWrapper().eq(Application::getJobName, jobName)); + } + + @SuppressWarnings("checkstyle:WhitespaceAround") + @Override + @SneakyThrows + @Transactional(rollbackFor = {Exception.class}) + public Long copy(Application appParam) { + boolean existsByJobName = this.existsByJobName(appParam.getJobName()); + ApiAlertException.throwIfFalse( + !existsByJobName, + "[StreamPark] Application names can't be repeated, copy application failed."); + + Application oldApp = getById(appParam.getId()); + Application newApp = new Application(); + String jobName = appParam.getJobName(); + + newApp.setJobName(jobName); + newApp.setClusterId( + ExecutionMode.isSessionMode(oldApp.getExecutionModeEnum()) + ? oldApp.getClusterId() + : jobName); + newApp.setArgs(appParam.getArgs() != null ? appParam.getArgs() : oldApp.getArgs()); + newApp.setVersionId(oldApp.getVersionId()); + + newApp.setFlinkClusterId(oldApp.getFlinkClusterId()); + newApp.setRestartSize(oldApp.getRestartSize()); + newApp.setJobType(oldApp.getJobType()); + newApp.setOptions(oldApp.getOptions()); + newApp.setDynamicProperties(oldApp.getDynamicProperties()); + newApp.setResolveOrder(oldApp.getResolveOrder()); + newApp.setExecutionMode(oldApp.getExecutionMode()); + newApp.setFlinkImage(oldApp.getFlinkImage()); + newApp.setK8sNamespace(oldApp.getK8sNamespace()); + newApp.setK8sRestExposedType(oldApp.getK8sRestExposedType()); + newApp.setK8sPodTemplate(oldApp.getK8sPodTemplate()); + newApp.setK8sJmPodTemplate(oldApp.getK8sJmPodTemplate()); + newApp.setK8sTmPodTemplate(oldApp.getK8sTmPodTemplate()); + newApp.setK8sHadoopIntegration(oldApp.getK8sHadoopIntegration()); + newApp.setDescription(oldApp.getDescription()); + newApp.setAlertId(oldApp.getAlertId()); + newApp.setCpFailureAction(oldApp.getCpFailureAction()); + newApp.setCpFailureRateInterval(oldApp.getCpFailureRateInterval()); + newApp.setCpMaxFailureInterval(oldApp.getCpMaxFailureInterval()); + newApp.setMainClass(oldApp.getMainClass()); + newApp.setAppType(oldApp.getAppType()); + newApp.setResourceFrom(oldApp.getResourceFrom()); + newApp.setProjectId(oldApp.getProjectId()); + newApp.setModule(oldApp.getModule()); + newApp.setUserId(commonService.getUserId()); + newApp.setState(FlinkAppState.ADDED.getValue()); + newApp.setRelease(ReleaseState.NEED_RELEASE.get()); + newApp.setOptionState(OptionState.NONE.getValue()); + newApp.setCreateTime(new Date()); + newApp.setHotParams(oldApp.getHotParams()); + + newApp.setJar(oldApp.getJar()); + newApp.setJarCheckSum(oldApp.getJarCheckSum()); + newApp.setTags(oldApp.getTags()); + newApp.setTeamId(oldApp.getTeamId()); + + boolean saved = save(newApp); + if (saved) { + if (newApp.isFlinkSqlJob()) { + FlinkSql copyFlinkSql = flinkSqlService.getLatestFlinkSql(appParam.getId(), true); + newApp.setFlinkSql(copyFlinkSql.getSql()); + newApp.setTeamResource(copyFlinkSql.getTeamResource()); + newApp.setDependency(copyFlinkSql.getDependency()); + FlinkSql flinkSql = new FlinkSql(newApp); + flinkSqlService.create(flinkSql); + } + ApplicationConfig copyConfig = configService.getEffective(appParam.getId()); + if (copyConfig != null) { + ApplicationConfig config = new ApplicationConfig(); + config.setAppId(newApp.getId()); + config.setFormat(copyConfig.getFormat()); + config.setContent(copyConfig.getContent()); + config.setCreateTime(new Date()); + config.setVersion(1); + configService.save(config); + configService.setLatestOrEffective(true, config.getId(), newApp.getId()); + } + return newApp.getId(); + } else { + throw new ApiAlertException( + "create application from copy failed, copy source app: " + oldApp.getJobName()); + } + } + + @Override + @Transactional(rollbackFor = {Exception.class}) + public boolean update(Application appParam) { + Application application = getById(appParam.getId()); + + boolean success = validateQueueIfNeeded(application, appParam); + ApiAlertException.throwIfFalse( + success, + String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId())); + + application.setRelease(ReleaseState.NEED_RELEASE.get()); + + // 1) jar job jar file changed + if (application.isUploadJob()) { + if (!Objects.equals(application.getJar(), appParam.getJar())) { + application.setBuild(true); + } else { + File jarFile = new File(WebUtils.getAppTempDir(), appParam.getJar()); + if (jarFile.exists()) { + try { + long checkSum = org.apache.commons.io.FileUtils.checksumCRC32(jarFile); + if (!Objects.equals(checkSum, application.getJarCheckSum())) { + application.setBuild(true); + } + } catch (IOException e) { + log.error("Error in checksumCRC32 for {}.", jarFile); + throw new RuntimeException(e); + } + } + } + } + + // 2) k8s podTemplate changed. + if (application.getBuild() && ExecutionMode.isKubernetesMode(appParam.getExecutionMode())) { + if (ObjectUtils.trimNoEquals( + application.getK8sRestExposedType(), appParam.getK8sRestExposedType()) + || ObjectUtils.trimNoEquals( + application.getK8sJmPodTemplate(), appParam.getK8sJmPodTemplate()) + || ObjectUtils.trimNoEquals( + application.getK8sTmPodTemplate(), appParam.getK8sTmPodTemplate()) + || ObjectUtils.trimNoEquals( + application.getK8sPodTemplates(), appParam.getK8sPodTemplates()) + || ObjectUtils.trimNoEquals( + application.getK8sHadoopIntegration(), appParam.getK8sHadoopIntegration()) + || ObjectUtils.trimNoEquals(application.getFlinkImage(), appParam.getFlinkImage())) { + application.setBuild(true); + } + } + + // 3) flink version changed + if (!application.getBuild() + && !Objects.equals(application.getVersionId(), appParam.getVersionId())) { + application.setBuild(true); + } + + // 4) yarn application mode change + if (!application.getBuild()) { + if (!application.getExecutionMode().equals(appParam.getExecutionMode())) { + if (appParam.getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION) + || application.getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION)) { + application.setBuild(true); + } + } + } + + appParam.setJobType(application.getJobType()); + // changes to the following parameters need to be re-release to take effect + application.setJobName(appParam.getJobName()); + application.setVersionId(appParam.getVersionId()); + application.setArgs(appParam.getArgs()); + application.setOptions(appParam.getOptions()); + application.setDynamicProperties(appParam.getDynamicProperties()); + application.setResolveOrder(appParam.getResolveOrder()); + application.setExecutionMode(appParam.getExecutionMode()); + application.setClusterId(appParam.getClusterId()); + application.setFlinkImage(appParam.getFlinkImage()); + application.setK8sNamespace(appParam.getK8sNamespace()); + application.updateHotParams(appParam); + application.setK8sRestExposedType(appParam.getK8sRestExposedType()); + application.setK8sPodTemplate(appParam.getK8sPodTemplate()); + application.setK8sJmPodTemplate(appParam.getK8sJmPodTemplate()); + application.setK8sTmPodTemplate(appParam.getK8sTmPodTemplate()); + application.setK8sHadoopIntegration(appParam.getK8sHadoopIntegration()); + + // changes to the following parameters do not affect running tasks + application.setModifyTime(new Date()); + application.setDescription(appParam.getDescription()); + application.setAlertId(appParam.getAlertId()); + application.setRestartSize(appParam.getRestartSize()); + application.setCpFailureAction(appParam.getCpFailureAction()); + application.setCpFailureRateInterval(appParam.getCpFailureRateInterval()); + application.setCpMaxFailureInterval(appParam.getCpMaxFailureInterval()); + application.setTags(appParam.getTags()); + + switch (appParam.getExecutionModeEnum()) { + case YARN_APPLICATION: + case YARN_PER_JOB: + case KUBERNETES_NATIVE_APPLICATION: + application.setFlinkClusterId(null); + break; + case REMOTE: + case YARN_SESSION: + case KUBERNETES_NATIVE_SESSION: + application.setFlinkClusterId(appParam.getFlinkClusterId()); + break; + default: + break; + } + + // Flink Sql job... + if (application.isFlinkSqlJob()) { + updateFlinkSqlJob(application, appParam); + return true; + } + + if (application.isStreamParkJob()) { + configService.update(appParam, application.isRunning()); + } else { + application.setJar(appParam.getJar()); + application.setMainClass(appParam.getMainClass()); + } + this.updateById(application); + return true; + } + + /** + * update FlinkSql type jobs, there are 3 aspects to consider
+ * 1. flink sql has changed
+ * 2. dependency has changed
+ * 3. parameter has changed
+ * + * @param application + * @param appParam + */ + private void updateFlinkSqlJob(Application application, Application appParam) { + FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(application.getId(), true); + if (effectiveFlinkSql == null) { + effectiveFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); + flinkSqlService.removeById(effectiveFlinkSql.getId()); + FlinkSql sql = new FlinkSql(appParam); + flinkSqlService.create(sql); + application.setBuild(true); + } else { + // get previous flink sql and decode + FlinkSql copySourceFlinkSql = flinkSqlService.getById(appParam.getSqlId()); + ApiAlertException.throwIfNull( + copySourceFlinkSql, "Flink sql is null, update flink sql job failed."); + copySourceFlinkSql.decode(); + + // get submit flink sql + FlinkSql targetFlinkSql = new FlinkSql(appParam); + + // judge sql and dependency has changed + ChangeTypeEnum changeTypeEnum = copySourceFlinkSql.checkChange(targetFlinkSql); + + log.info("updateFlinkSqlJob changeTypeEnum: {}", changeTypeEnum); + + // if has been changed + if (changeTypeEnum.hasChanged()) { + // check if there is a candidate version for the newly added record + FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); + // If the candidate version of the new record exists, it will be deleted directly, + // and only one candidate version will be retained. If the new candidate version is not + // effective, + // if it is edited again and the next record comes in, the previous candidate version will + // be deleted. + if (newFlinkSql != null) { + // delete all records about candidates + flinkSqlService.removeById(newFlinkSql.getId()); + } + FlinkSql historyFlinkSql = + flinkSqlService.getCandidate(application.getId(), CandidateType.HISTORY); + // remove candidate flags that already exist but are set as candidates + if (historyFlinkSql != null) { + flinkSqlService.cleanCandidate(historyFlinkSql.getId()); + } + FlinkSql sql = new FlinkSql(appParam); + flinkSqlService.create(sql); + if (changeTypeEnum.isDependencyChanged()) { + application.setBuild(true); + } + } else { + // judge version has changed + boolean versionChanged = !effectiveFlinkSql.getId().equals(appParam.getSqlId()); + if (versionChanged) { + // sql and dependency not changed, but version changed, means that rollback to the version + CandidateType type = CandidateType.HISTORY; + flinkSqlService.setCandidate(type, appParam.getId(), appParam.getSqlId()); + application.setRelease(ReleaseState.NEED_ROLLBACK.get()); + application.setBuild(true); + } + } + } + this.updateById(application); + this.configService.update(appParam, application.isRunning()); + } + + @Override + public void updateRelease(Application application) { + LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); + updateWrapper.eq(Application::getId, application.getId()); + updateWrapper.set(Application::getRelease, application.getRelease()); + updateWrapper.set(Application::getBuild, application.getBuild()); + if (application.getOptionState() != null) { + updateWrapper.set(Application::getOptionState, application.getOptionState()); + } + this.update(updateWrapper); + } + + @Override + public List getByProjectId(Long id) { + return baseMapper.getByProjectId(id); + } + + @Override + public List getByTeamId(Long teamId) { + return baseMapper.getByTeamId(teamId); + } + + @Override + public List getByTeamIdAndExecutionModes( + Long teamId, @Nonnull Collection executionModes) { + return getBaseMapper() + .selectList( + new LambdaQueryWrapper() + .eq((SFunction) Application::getTeamId, teamId) + .in( + Application::getExecutionMode, + executionModes.stream() + .map(ExecutionMode::getMode) + .collect(Collectors.toSet()))); + } + + @Override + public boolean checkBuildAndUpdate(Application application) { + boolean build = application.getBuild(); + if (!build) { + LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); + updateWrapper.eq(Application::getId, application.getId()); + if (application.isRunning()) { + updateWrapper.set(Application::getRelease, ReleaseState.NEED_RESTART.get()); + } else { + updateWrapper.set(Application::getRelease, ReleaseState.DONE.get()); + updateWrapper.set(Application::getOptionState, OptionState.NONE.getValue()); + } + this.update(updateWrapper); + + // backup + if (application.isFlinkSqlJob()) { + FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); + if (!application.isNeedRollback() && newFlinkSql != null) { + backUpService.backup(application, newFlinkSql); + } + } + + // If the current task is not running, or the task has just been added, + // directly set the candidate version to the official version + FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false); + if (!application.isRunning() || flinkSql == null) { + this.toEffective(application); + } + } + return build; + } + + @Override + public void clean(Application appParam) { + appParam.setRelease(ReleaseState.DONE.get()); + this.updateRelease(appParam); + } + + @Override + public Application getApp(Application appParam) { + Application application = this.baseMapper.getApp(appParam); + ApplicationConfig config = configService.getEffective(appParam.getId()); + config = config == null ? configService.getLatest(appParam.getId()) : config; + if (config != null) { + config.setToApplication(application); + } + if (application.isFlinkSqlJob()) { + FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), true); + if (flinkSql == null) { + flinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); + flinkSql.setSql(DeflaterUtils.unzipString(flinkSql.getSql())); + } + flinkSql.setToApplication(application); + } else { + if (application.isCICDJob()) { + String path = + this.projectService.getAppConfPath(application.getProjectId(), application.getModule()); + application.setConfPath(path); + } + } + // add flink web url info for k8s-mode + if (isKubernetesApp(application)) { + String restUrl = k8SFlinkTrackMonitor.getRemoteRestUrl(toTrackId(application)); + application.setFlinkRestUrl(restUrl); + + // set duration + long now = System.currentTimeMillis(); + if (application.getTracking() == 1 + && application.getStartTime() != null + && application.getStartTime().getTime() > 0) { + application.setDuration(now - application.getStartTime().getTime()); + } + } + + application.setYarnQueueByHotParams(); + + return application; + } + + /** + * Check queue label validation when create the application if needed. + * + * @param appParam the app to create. + * @return true if validate it successfully, false else. + */ + @VisibleForTesting + public boolean validateQueueIfNeeded(Application appParam) { + yarnQueueService.checkQueueLabel(appParam.getExecutionModeEnum(), appParam.getYarnQueue()); + if (!isYarnNotDefaultQueue(appParam)) { + return true; + } + return yarnQueueService.existByTeamIdQueueLabel(appParam.getTeamId(), appParam.getYarnQueue()); + } + + /** + * Check queue label validation when update the application if needed. + * + * @param oldApp the old app to update. + * @param newApp the new app payload. + * @return true if validate it successfully, false else. + */ + @VisibleForTesting + public boolean validateQueueIfNeeded(Application oldApp, Application newApp) { + yarnQueueService.checkQueueLabel(newApp.getExecutionModeEnum(), newApp.getYarnQueue()); + if (!isYarnNotDefaultQueue(newApp)) { + return true; + } + + oldApp.setYarnQueueByHotParams(); + if (ExecutionMode.isYarnPerJobOrAppMode(newApp.getExecutionModeEnum()) + && StringUtils.equals(oldApp.getYarnQueue(), newApp.getYarnQueue())) { + return true; + } + return yarnQueueService.existByTeamIdQueueLabel(newApp.getTeamId(), newApp.getYarnQueue()); + } + + /** + * Judge the execution mode whether is the Yarn PerJob or Application mode with not default or + * empty queue label. + * + * @param application application entity. + * @return If the executionMode is (Yarn PerJob or application mode) and the queue label is not + * (empty or default), return true, false else. + */ + private boolean isYarnNotDefaultQueue(Application application) { + return ExecutionMode.isYarnPerJobOrAppMode(application.getExecutionModeEnum()) + && !yarnQueueService.isDefaultQueue(application.getYarnQueue()); + } +} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java index a20f5a08c4..8fd23043fc 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java @@ -49,13 +49,15 @@ import org.apache.streampark.console.core.service.ApplicationBackUpService; import org.apache.streampark.console.core.service.ApplicationConfigService; import org.apache.streampark.console.core.service.ApplicationLogService; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.FlinkEnvService; import org.apache.streampark.console.core.service.FlinkSqlService; import org.apache.streampark.console.core.service.MessageService; import org.apache.streampark.console.core.service.ResourceService; import org.apache.streampark.console.core.service.SettingService; +import org.apache.streampark.console.core.service.application.ApplicationActionService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.task.FlinkHttpWatcher; import org.apache.streampark.flink.packer.docker.DockerConf; import org.apache.streampark.flink.packer.maven.Artifact; @@ -131,7 +133,9 @@ public class AppBuildPipeServiceImpl @Autowired private MessageService messageService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; + + @Autowired private ApplicationInfoService applicationInfoService; @Autowired private ApplicationLogService applicationLogService; @@ -172,7 +176,7 @@ public boolean buildApplication(Long appId, boolean forceBuild) { // check the build environment checkBuildEnv(appId, forceBuild); - Application app = applicationService.getById(appId); + Application app = applicationManageService.getById(appId); ApplicationLog applicationLog = new ApplicationLog(); applicationLog.setOptionName(RELEASE.getValue()); applicationLog.setAppId(app.getId()); @@ -181,7 +185,7 @@ public boolean buildApplication(Long appId, boolean forceBuild) { // check if you need to go through the build process (if the jar and pom have changed, // you need to go through the build process, if other common parameters are modified, // you don't need to go through the build process) - boolean needBuild = applicationService.checkBuildAndUpdate(app); + boolean needBuild = applicationManageService.checkBuildAndUpdate(app); if (!needBuild) { applicationLog.setSuccess(true); applicationLogService.save(applicationLog); @@ -218,14 +222,14 @@ public void onStart(PipeSnapshot snapshot) { saveEntity(buildPipeline); app.setRelease(ReleaseState.RELEASING.get()); - applicationService.updateRelease(app); + applicationManageService.updateRelease(app); if (flinkHttpWatcher.isWatchingApp(app.getId())) { flinkHttpWatcher.init(); } // 1) checkEnv - applicationService.checkEnv(app); + applicationInfoService.checkEnv(app); // 2) some preparatory work String appUploads = app.getWorkspace().APP_UPLOADS(); @@ -313,7 +317,7 @@ public void onFinish(PipeSnapshot snapshot, BuildResult result) { // If the current task is not running, or the task has just been added, directly set // the candidate version to the official version if (app.isFlinkSqlJob()) { - applicationService.toEffective(app); + applicationManageService.toEffective(app); } else { if (app.isStreamParkJob()) { ApplicationConfig config = applicationConfigService.getLatest(app.getId()); @@ -350,7 +354,7 @@ public void onFinish(PipeSnapshot snapshot, BuildResult result) { applicationLog.setException(Utils.stringifyException(snapshot.error().exception())); applicationLog.setSuccess(false); } - applicationService.updateRelease(app); + applicationManageService.updateRelease(app); applicationLogService.save(applicationLog); if (flinkHttpWatcher.isWatchingApp(app.getId())) { flinkHttpWatcher.init(); @@ -399,7 +403,7 @@ public void onDockerPushProgressChange(DockerPushSnapshot snapshot) { * @return */ private void checkBuildEnv(Long appId, boolean forceBuild) { - Application app = applicationService.getById(appId); + Application app = applicationManageService.getById(appId); // 1) check flink version FlinkEnv env = flinkEnvService.getById(app.getVersionId()); @@ -408,7 +412,7 @@ private void checkBuildEnv(Long appId, boolean forceBuild) { checkVersion, "Unsupported flink version:" + env.getFlinkVersion().version()); // 2) check env - boolean envOk = applicationService.checkEnv(app); + boolean envOk = applicationInfoService.checkEnv(app); ApiAlertException.throwIfFalse( envOk, "Check flink env failed, please check the flink version of this job"); @@ -514,7 +518,7 @@ private BuildPipeline createPipelineInstance(@Nonnull Application app) { } } - /** copy from {@link ApplicationServiceImpl#start(Application, boolean)} */ + /** copy from {@link ApplicationActionService#start(Application, boolean)} */ private String retrieveFlinkUserJar(FlinkEnv flinkEnv, Application app) { switch (app.getDevelopmentMode()) { case CUSTOM_CODE: @@ -626,13 +630,12 @@ private DependencyInfo getMergedDependencyInfo(Application application) { JacksonUtils.read(resource.getResource(), String[].class); Arrays.stream(groupElements) .forEach( - resourceIdInGroup -> { - mergeDependency( - application, - mvnArtifacts, - jarLibs, - resourceService.getById(resourceIdInGroup)); - }); + resourceIdInGroup -> + mergeDependency( + application, + mvnArtifacts, + jarLibs, + resourceService.getById(resourceIdInGroup))); } catch (JsonProcessingException e) { throw new ApiAlertException("Parse resource group failed.", e); } @@ -654,21 +657,20 @@ private static void mergeDependency( dependency .getPom() .forEach( - pom -> { - mvnArtifacts.add( - new Artifact( - pom.getGroupId(), - pom.getArtifactId(), - pom.getVersion(), - pom.getClassifier())); - }); + pom -> + mvnArtifacts.add( + new Artifact( + pom.getGroupId(), + pom.getArtifactId(), + pom.getVersion(), + pom.getClassifier()))); dependency .getJar() .forEach( - jar -> { - jarLibs.add( - String.format( - "%s/%d/%s", Workspace.local().APP_UPLOADS(), application.getTeamId(), jar)); - }); + jar -> + jarLibs.add( + String.format( + "%s/%d/%s", + Workspace.local().APP_UPLOADS(), application.getTeamId(), jar))); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java index 5622ee3885..2b7779abc2 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java @@ -31,9 +31,9 @@ import org.apache.streampark.console.core.mapper.ApplicationBackUpMapper; import org.apache.streampark.console.core.service.ApplicationBackUpService; import org.apache.streampark.console.core.service.ApplicationConfigService; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.EffectiveService; import org.apache.streampark.console.core.service.FlinkSqlService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper; @@ -53,7 +53,7 @@ public class ApplicationBackUpServiceImpl extends ServiceImpl implements ApplicationBackUpService { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; @Autowired private ApplicationConfigService configService; @@ -74,7 +74,7 @@ public IPage page(ApplicationBackUp backUp, RestRequest reque @Transactional(rollbackFor = {Exception.class}) public void rollback(ApplicationBackUp backParam) { - Application application = applicationService.getById(backParam.getAppId()); + Application application = applicationManageService.getById(backParam.getAppId()); FsOperator fsOperator = application.getFsOperator(); // backup files not exist @@ -119,7 +119,7 @@ public void rollback(ApplicationBackUp backParam) { fsOperator.copyDir(backParam.getPath(), application.getAppHome()); // update restart status - applicationService.update( + applicationManageService.update( new UpdateWrapper() .lambda() .eq(Application::getId, application.getId()) @@ -194,7 +194,7 @@ private ApplicationBackUp getFlinkSqlBackup(Long appId, Long sqlId) { public Boolean delete(Long id) throws InternalException { ApplicationBackUp backUp = getById(id); try { - Application application = applicationService.getById(backUp.getAppId()); + Application application = applicationManageService.getById(backUp.getAppId()); application.getFsOperator().delete(backUp.getPath()); removeById(id); return true; diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java deleted file mode 100644 index c76540eb4b..0000000000 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationServiceImpl.java +++ /dev/null @@ -1,1834 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.streampark.console.core.service.impl; - -import org.apache.streampark.common.conf.ConfigConst; -import org.apache.streampark.common.conf.Workspace; -import org.apache.streampark.common.enums.DevelopmentMode; -import org.apache.streampark.common.enums.ExecutionMode; -import org.apache.streampark.common.enums.ResolveOrder; -import org.apache.streampark.common.enums.RestoreMode; -import org.apache.streampark.common.enums.StorageType; -import org.apache.streampark.common.fs.FsOperator; -import org.apache.streampark.common.fs.HdfsOperator; -import org.apache.streampark.common.fs.LfsOperator; -import org.apache.streampark.common.util.CompletableFutureUtils; -import org.apache.streampark.common.util.DeflaterUtils; -import org.apache.streampark.common.util.FileUtils; -import org.apache.streampark.common.util.HadoopUtils; -import org.apache.streampark.common.util.PropertiesUtils; -import org.apache.streampark.common.util.ThreadUtils; -import org.apache.streampark.common.util.Utils; -import org.apache.streampark.common.util.YarnUtils; -import org.apache.streampark.console.base.domain.RestRequest; -import org.apache.streampark.console.base.exception.ApiAlertException; -import org.apache.streampark.console.base.exception.ApiDetailException; -import org.apache.streampark.console.base.exception.ApplicationException; -import org.apache.streampark.console.base.mybatis.pager.MybatisPager; -import org.apache.streampark.console.base.util.CommonUtils; -import org.apache.streampark.console.base.util.ObjectUtils; -import org.apache.streampark.console.base.util.WebUtils; -import org.apache.streampark.console.core.entity.AppBuildPipeline; -import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.entity.ApplicationConfig; -import org.apache.streampark.console.core.entity.ApplicationLog; -import org.apache.streampark.console.core.entity.FlinkCluster; -import org.apache.streampark.console.core.entity.FlinkEnv; -import org.apache.streampark.console.core.entity.FlinkSql; -import org.apache.streampark.console.core.entity.Project; -import org.apache.streampark.console.core.entity.Resource; -import org.apache.streampark.console.core.entity.SavePoint; -import org.apache.streampark.console.core.enums.AppExistsState; -import org.apache.streampark.console.core.enums.CandidateType; -import org.apache.streampark.console.core.enums.ChangeTypeEnum; -import org.apache.streampark.console.core.enums.CheckPointType; -import org.apache.streampark.console.core.enums.ConfigFileType; -import org.apache.streampark.console.core.enums.FlinkAppState; -import org.apache.streampark.console.core.enums.Operation; -import org.apache.streampark.console.core.enums.OptionState; -import org.apache.streampark.console.core.enums.ReleaseState; -import org.apache.streampark.console.core.mapper.ApplicationMapper; -import org.apache.streampark.console.core.metrics.flink.JobsOverview; -import org.apache.streampark.console.core.runner.EnvInitializer; -import org.apache.streampark.console.core.service.AppBuildPipeService; -import org.apache.streampark.console.core.service.ApplicationBackUpService; -import org.apache.streampark.console.core.service.ApplicationConfigService; -import org.apache.streampark.console.core.service.ApplicationLogService; -import org.apache.streampark.console.core.service.ApplicationService; -import org.apache.streampark.console.core.service.CommonService; -import org.apache.streampark.console.core.service.EffectiveService; -import org.apache.streampark.console.core.service.FlinkClusterService; -import org.apache.streampark.console.core.service.FlinkEnvService; -import org.apache.streampark.console.core.service.FlinkSqlService; -import org.apache.streampark.console.core.service.ProjectService; -import org.apache.streampark.console.core.service.ResourceService; -import org.apache.streampark.console.core.service.SavePointService; -import org.apache.streampark.console.core.service.SettingService; -import org.apache.streampark.console.core.service.VariableService; -import org.apache.streampark.console.core.service.YarnQueueService; -import org.apache.streampark.console.core.task.FlinkClusterWatcher; -import org.apache.streampark.console.core.task.FlinkHttpWatcher; -import org.apache.streampark.flink.client.FlinkClient; -import org.apache.streampark.flink.client.bean.CancelRequest; -import org.apache.streampark.flink.client.bean.CancelResponse; -import org.apache.streampark.flink.client.bean.KubernetesSubmitParam; -import org.apache.streampark.flink.client.bean.SubmitRequest; -import org.apache.streampark.flink.client.bean.SubmitResponse; -import org.apache.streampark.flink.core.conf.ParameterCli; -import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; -import org.apache.streampark.flink.kubernetes.helper.KubernetesDeploymentHelper; -import org.apache.streampark.flink.kubernetes.ingress.IngressController; -import org.apache.streampark.flink.kubernetes.model.FlinkMetricCV; -import org.apache.streampark.flink.kubernetes.model.TrackId; -import org.apache.streampark.flink.packer.pipeline.BuildResult; -import org.apache.streampark.flink.packer.pipeline.ShadedBuildResponse; - -import org.apache.commons.lang3.StringUtils; -import org.apache.flink.api.common.JobID; -import org.apache.flink.configuration.CoreOptions; -import org.apache.flink.configuration.JobManagerOptions; -import org.apache.flink.configuration.MemorySize; -import org.apache.flink.configuration.RestOptions; -import org.apache.flink.runtime.jobgraph.SavepointConfigOptions; - -import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; -import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.core.toolkit.Wrappers; -import com.baomidou.mybatisplus.core.toolkit.support.SFunction; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; -import com.google.common.annotations.VisibleForTesting; -import io.fabric8.kubernetes.client.KubernetesClientException; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; - -import javax.annotation.Nonnull; -import javax.annotation.PostConstruct; - -import java.io.File; -import java.io.IOException; -import java.io.Serializable; -import java.net.URI; -import java.util.Arrays; -import java.util.Base64; -import java.util.Collection; -import java.util.Comparator; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.CancellationException; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.jar.Manifest; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -import static org.apache.streampark.common.enums.StorageType.LFS; -import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.Bridge.toTrackId; -import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.isKubernetesApp; - -@Slf4j -@Service -@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class) -public class ApplicationServiceImpl extends ServiceImpl - implements ApplicationService { - - private static final String ERROR_APP_QUEUE_HINT = - "Queue label '%s' isn't available for teamId '%d', please add it into the team first."; - - private static final int DEFAULT_HISTORY_RECORD_LIMIT = 25; - - private static final int DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT = 5; - - private final ExecutorService executorService = - new ThreadPoolExecutor( - Runtime.getRuntime().availableProcessors() * 5, - Runtime.getRuntime().availableProcessors() * 10, - 60L, - TimeUnit.SECONDS, - new LinkedBlockingQueue<>(1024), - ThreadUtils.threadFactory("streampark-deploy-executor"), - new ThreadPoolExecutor.AbortPolicy()); - - private static final Pattern JOB_NAME_PATTERN = - Pattern.compile("^[.\\x{4e00}-\\x{9fa5}A-Za-z\\d_\\-\\s]+$"); - - private static final Pattern SINGLE_SPACE_PATTERN = Pattern.compile("^\\S+(\\s\\S+)*$"); - - @Autowired private ProjectService projectService; - - @Autowired private ApplicationBackUpService backUpService; - - @Autowired private ApplicationConfigService configService; - - @Autowired private ApplicationLogService applicationLogService; - - @Autowired private FlinkEnvService flinkEnvService; - - @Autowired private FlinkSqlService flinkSqlService; - - @Autowired private SavePointService savePointService; - - @Autowired private EffectiveService effectiveService; - - @Autowired private SettingService settingService; - - @Autowired private CommonService commonService; - - @Autowired private EnvInitializer envInitializer; - - @Autowired private FlinkK8sWatcher k8SFlinkTrackMonitor; - - @Autowired private AppBuildPipeService appBuildPipeService; - - @Autowired private FlinkClusterService flinkClusterService; - - @Autowired private VariableService variableService; - - @Autowired private YarnQueueService yarnQueueService; - - @Autowired private FlinkClusterWatcher flinkClusterWatcher; - - @Autowired private ResourceService resourceService; - - @PostConstruct - public void resetOptionState() { - this.baseMapper.resetOptionState(); - } - - private final Map> startFutureMap = - new ConcurrentHashMap<>(); - - private final Map> cancelFutureMap = - new ConcurrentHashMap<>(); - - @Override - public Map dashboard(Long teamId) { - JobsOverview.Task overview = new JobsOverview.Task(); - Integer totalJmMemory = 0; - Integer totalTmMemory = 0; - Integer totalTm = 0; - Integer totalSlot = 0; - Integer availableSlot = 0; - Integer runningJob = 0; - - // stat metrics from other than kubernetes mode - for (Application app : FlinkHttpWatcher.getWatchingApps()) { - if (!teamId.equals(app.getTeamId())) { - continue; - } - if (app.getJmMemory() != null) { - totalJmMemory += app.getJmMemory(); - } - if (app.getTmMemory() != null) { - totalTmMemory += app.getTmMemory() * (app.getTotalTM() == null ? 1 : app.getTotalTM()); - } - if (app.getTotalTM() != null) { - totalTm += app.getTotalTM(); - } - if (app.getTotalSlot() != null) { - totalSlot += app.getTotalSlot(); - } - if (app.getAvailableSlot() != null) { - availableSlot += app.getAvailableSlot(); - } - if (app.getState() == FlinkAppState.RUNNING.getValue()) { - runningJob++; - } - JobsOverview.Task task = app.getOverview(); - if (task != null) { - overview.setTotal(overview.getTotal() + task.getTotal()); - overview.setCreated(overview.getCreated() + task.getCreated()); - overview.setScheduled(overview.getScheduled() + task.getScheduled()); - overview.setDeploying(overview.getDeploying() + task.getDeploying()); - overview.setRunning(overview.getRunning() + task.getRunning()); - overview.setFinished(overview.getFinished() + task.getFinished()); - overview.setCanceling(overview.getCanceling() + task.getCanceling()); - overview.setCanceled(overview.getCanceled() + task.getCanceled()); - overview.setFailed(overview.getFailed() + task.getFailed()); - overview.setReconciling(overview.getReconciling() + task.getReconciling()); - } - } - - // merge metrics from flink kubernetes cluster - FlinkMetricCV k8sMetric = k8SFlinkTrackMonitor.getAccGroupMetrics(teamId.toString()); - if (k8sMetric != null) { - totalJmMemory += k8sMetric.totalJmMemory(); - totalTmMemory += k8sMetric.totalTmMemory(); - totalTm += k8sMetric.totalTm(); - totalSlot += k8sMetric.totalSlot(); - availableSlot += k8sMetric.availableSlot(); - runningJob += k8sMetric.runningJob(); - overview.setTotal(overview.getTotal() + k8sMetric.totalJob()); - overview.setRunning(overview.getRunning() + k8sMetric.runningJob()); - overview.setFinished(overview.getFinished() + k8sMetric.finishedJob()); - overview.setCanceled(overview.getCanceled() + k8sMetric.cancelledJob()); - overview.setFailed(overview.getFailed() + k8sMetric.failedJob()); - } - - // result json - Map map = new HashMap<>(8); - map.put("task", overview); - map.put("jmMemory", totalJmMemory); - map.put("tmMemory", totalTmMemory); - map.put("totalTM", totalTm); - map.put("availableSlot", availableSlot); - map.put("totalSlot", totalSlot); - map.put("runningJob", runningJob); - - return map; - } - - @Override - public void toEffective(Application application) { - // set latest to Effective - ApplicationConfig config = configService.getLatest(application.getId()); - if (config != null) { - this.configService.toEffective(application.getId(), config.getId()); - } - if (application.isFlinkSqlJob()) { - FlinkSql flinkSql = flinkSqlService.getCandidate(application.getId(), null); - if (flinkSql != null) { - flinkSqlService.toEffective(application.getId(), flinkSql.getId()); - // clean candidate - flinkSqlService.cleanCandidate(flinkSql.getId()); - } - } - } - - @Override - public void revoke(Application appParma) throws ApplicationException { - Application application = getById(appParma.getId()); - ApiAlertException.throwIfNull( - application, - String.format("The application id=%s not found, revoke failed.", appParma.getId())); - - // 1) delete files that have been published to workspace - application.getFsOperator().delete(application.getAppHome()); - - // 2) rollback the files to the workspace - backUpService.revoke(application); - - // 3) restore related status - LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); - updateWrapper.eq(Application::getId, application.getId()); - if (application.isFlinkSqlJob()) { - updateWrapper.set(Application::getRelease, ReleaseState.FAILED.get()); - } else { - updateWrapper.set(Application::getRelease, ReleaseState.NEED_RELEASE.get()); - } - if (!application.isRunning()) { - updateWrapper.set(Application::getState, FlinkAppState.REVOKED.getValue()); - } - baseMapper.update(null, updateWrapper); - } - - @Override - @Transactional(rollbackFor = {Exception.class}) - public Boolean delete(Application paramApp) { - - Application application = getById(paramApp.getId()); - - // 1) remove flink sql - flinkSqlService.removeApp(application.getId()); - - // 2) remove log - applicationLogService.removeApp(application.getId()); - - // 3) remove config - configService.removeApp(application.getId()); - - // 4) remove effective - effectiveService.removeApp(application.getId()); - - // remove related hdfs - // 5) remove backup - backUpService.removeApp(application); - - // 6) remove savepoint - savePointService.removeApp(application); - - // 7) remove BuildPipeline - appBuildPipeService.removeApp(application.getId()); - - // 8) remove app - removeApp(application); - - if (isKubernetesApp(application)) { - k8SFlinkTrackMonitor.unWatching(toTrackId(application)); - } else { - FlinkHttpWatcher.unWatching(paramApp.getId()); - } - return true; - } - - @Override - public void restart(Application application) throws Exception { - this.cancel(application); - this.start(application, false); - } - - @Override - public boolean checkEnv(Application appParam) throws ApplicationException { - Application application = getById(appParam.getId()); - try { - FlinkEnv flinkEnv; - if (application.getVersionId() != null) { - flinkEnv = flinkEnvService.getByIdOrDefault(application.getVersionId()); - } else { - flinkEnv = flinkEnvService.getDefault(); - } - if (flinkEnv == null) { - return false; - } - envInitializer.checkFlinkEnv(application.getStorageType(), flinkEnv); - envInitializer.storageInitialize(application.getStorageType()); - - if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum()) - || ExecutionMode.REMOTE.equals(application.getExecutionModeEnum())) { - FlinkCluster flinkCluster = flinkClusterService.getById(application.getFlinkClusterId()); - boolean conned = flinkClusterWatcher.verifyClusterConnection(flinkCluster); - if (!conned) { - throw new ApiAlertException("the target cluster is unavailable, please check!"); - } - } - return true; - } catch (Exception e) { - log.error(Utils.stringifyException(e)); - throw new ApiDetailException(e); - } - } - - @Override - public boolean checkAlter(Application application) { - Long appId = application.getId(); - FlinkAppState state = FlinkAppState.of(application.getState()); - if (!FlinkAppState.CANCELED.equals(state)) { - return false; - } - long cancelUserId = FlinkHttpWatcher.getCanceledJobUserId(appId); - long appUserId = application.getUserId(); - return cancelUserId != -1 && cancelUserId != appUserId; - } - - private void removeApp(Application application) { - Long appId = application.getId(); - removeById(appId); - try { - application - .getFsOperator() - .delete(application.getWorkspace().APP_WORKSPACE().concat("/").concat(appId.toString())); - // try to delete yarn-application, and leave no trouble. - String path = - Workspace.of(StorageType.HDFS).APP_WORKSPACE().concat("/").concat(appId.toString()); - if (HdfsOperator.exists(path)) { - HdfsOperator.delete(path); - } - } catch (Exception e) { - // skip - } - } - - @Override - public IPage page(Application appParam, RestRequest request) { - if (appParam.getTeamId() == null) { - return null; - } - Page page = new MybatisPager().getDefaultPage(request); - if (CommonUtils.notEmpty(appParam.getStateArray())) { - if (Arrays.stream(appParam.getStateArray()) - .anyMatch(x -> x == FlinkAppState.FINISHED.getValue())) { - Integer[] newArray = - CommonUtils.arrayInsertIndex( - appParam.getStateArray(), - appParam.getStateArray().length, - FlinkAppState.POS_TERMINATED.getValue()); - appParam.setStateArray(newArray); - } - } - this.baseMapper.page(page, appParam); - List records = page.getRecords(); - long now = System.currentTimeMillis(); - List newRecords = - records.stream() - .peek( - record -> { - // status of flink job on kubernetes mode had been automatically persisted to db - // in time. - if (isKubernetesApp(record)) { - // set duration - String restUrl = k8SFlinkTrackMonitor.getRemoteRestUrl(toTrackId(record)); - record.setFlinkRestUrl(restUrl); - if (record.getTracking() == 1 - && record.getStartTime() != null - && record.getStartTime().getTime() > 0) { - record.setDuration(now - record.getStartTime().getTime()); - } - } - }) - .collect(Collectors.toList()); - page.setRecords(newRecords); - return page; - } - - @Override - public boolean existsByTeamId(Long teamId) { - return baseMapper.exists( - new LambdaQueryWrapper().eq(Application::getTeamId, teamId)); - } - - @Override - public boolean existsByUserId(Long userId) { - return baseMapper.exists( - new LambdaQueryWrapper().eq(Application::getUserId, userId)); - } - - @Override - public boolean existsRunningByClusterId(Long clusterId) { - return baseMapper.existsRunningJobByClusterId(clusterId) - || FlinkHttpWatcher.getWatchingApps().stream() - .anyMatch( - application -> - clusterId.equals(application.getFlinkClusterId()) - && FlinkAppState.RUNNING.equals(application.getFlinkAppStateEnum())); - } - - @Override - public boolean existsByClusterId(Long clusterId) { - return baseMapper.exists( - new LambdaQueryWrapper().eq(Application::getFlinkClusterId, clusterId)); - } - - @Override - public Integer countByClusterId(Long clusterId) { - return baseMapper - .selectCount( - new LambdaQueryWrapper().eq(Application::getFlinkClusterId, clusterId)) - .intValue(); - } - - @Override - public Integer countAffectedByClusterId(Long clusterId, String dbType) { - return baseMapper.countAffectedByClusterId(clusterId, dbType); - } - - @Override - public boolean existsByFlinkEnvId(Long flinkEnvId) { - return baseMapper.exists( - new LambdaQueryWrapper().eq(Application::getVersionId, flinkEnvId)); - } - - @Override - public List getRecentK8sNamespace() { - return baseMapper.getRecentK8sNamespace(DEFAULT_HISTORY_RECORD_LIMIT); - } - - @Override - public List getRecentK8sClusterId(Integer executionMode) { - return baseMapper.getRecentK8sClusterId(executionMode, DEFAULT_HISTORY_RECORD_LIMIT); - } - - @Override - public List getRecentFlinkBaseImage() { - return baseMapper.getRecentFlinkBaseImage(DEFAULT_HISTORY_RECORD_LIMIT); - } - - @Override - public List getRecentK8sPodTemplate() { - return baseMapper.getRecentK8sPodTemplate(DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT); - } - - @Override - public List getRecentK8sJmPodTemplate() { - return baseMapper.getRecentK8sJmPodTemplate(DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT); - } - - @Override - public List getRecentK8sTmPodTemplate() { - return baseMapper.getRecentK8sTmPodTemplate(DEFAULT_HISTORY_POD_TMPL_RECORD_LIMIT); - } - - @Override - public List historyUploadJars() { - return Arrays.stream(LfsOperator.listDir(Workspace.of(LFS).APP_UPLOADS())) - .filter(File::isFile) - .sorted(Comparator.comparingLong(File::lastModified).reversed()) - .map(File::getName) - .filter(fn -> fn.endsWith(".jar")) - .limit(DEFAULT_HISTORY_RECORD_LIMIT) - .collect(Collectors.toList()); - } - - @Override - public String k8sStartLog(Long id, Integer offset, Integer limit) throws Exception { - Application application = getById(id); - ApiAlertException.throwIfNull( - application, String.format("The application id=%s can't be found.", id)); - if (ExecutionMode.isKubernetesMode(application.getExecutionModeEnum())) { - CompletableFuture future = - CompletableFuture.supplyAsync( - () -> - KubernetesDeploymentHelper.watchDeploymentLog( - application.getK8sNamespace(), - application.getJobName(), - application.getJobId())); - - return future - .exceptionally( - e -> { - String errorLog = - String.format( - "%s/%s_err.log", - WebUtils.getAppTempDir().getAbsolutePath(), application.getJobId()); - File file = new File(errorLog); - if (file.exists() && file.isFile()) { - return file.getAbsolutePath(); - } - return null; - }) - .thenApply( - path -> { - if (!future.isDone()) { - future.cancel(true); - } - if (FileUtils.exists(path)) { - return FileUtils.tailOf(path, offset, limit); - } - return null; - }) - .toCompletableFuture() - .get(5, TimeUnit.SECONDS); - } else { - throw new ApiAlertException( - "Job executionMode must be kubernetes-session|kubernetes-application."); - } - } - - @Override - public void changeOwnership(Long userId, Long targetUserId) { - LambdaUpdateWrapper updateWrapper = - new LambdaUpdateWrapper() - .eq(Application::getUserId, userId) - .set(Application::getUserId, targetUserId); - this.baseMapper.update(null, updateWrapper); - } - - @Override - public String getYarnName(Application appParam) { - String[] args = new String[2]; - args[0] = "--name"; - args[1] = appParam.getConfig(); - return ParameterCli.read(args); - } - - /** - * Check if the current jobName and other key identifiers already exist in the database and - * yarn/k8s. - * - * @param appParam The application to check for existence. - * @return The state of the application's existence. - */ - @Override - public AppExistsState checkExists(Application appParam) { - - if (!checkJobName(appParam.getJobName())) { - return AppExistsState.INVALID; - } - - boolean existsByJobName = this.existsByJobName(appParam.getJobName()); - - if (appParam.getId() != null) { - Application app = getById(appParam.getId()); - if (app.getJobName().equals(appParam.getJobName())) { - return AppExistsState.NO; - } - - if (existsByJobName) { - return AppExistsState.IN_DB; - } - - // has stopped status - if (FlinkAppState.isEndState(app.getState())) { - // check whether jobName exists on yarn - if (ExecutionMode.isYarnMode(appParam.getExecutionMode()) - && YarnUtils.isContains(appParam.getJobName())) { - return AppExistsState.IN_YARN; - } - // check whether clusterId, namespace, jobId on kubernetes - else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode()) - && k8SFlinkTrackMonitor.checkIsInRemoteCluster(toTrackId(appParam))) { - return AppExistsState.IN_KUBERNETES; - } - } - } else { - if (existsByJobName) { - return AppExistsState.IN_DB; - } - - // check whether jobName exists on yarn - if (ExecutionMode.isYarnMode(appParam.getExecutionMode()) - && YarnUtils.isContains(appParam.getJobName())) { - return AppExistsState.IN_YARN; - } - // check whether clusterId, namespace, jobId on kubernetes - else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode()) - && k8SFlinkTrackMonitor.checkIsInRemoteCluster(toTrackId(appParam))) { - return AppExistsState.IN_KUBERNETES; - } - } - return AppExistsState.NO; - } - - @SneakyThrows - @Override - @Transactional(rollbackFor = {Exception.class}) - public boolean create(Application appParam) { - ApiAlertException.throwIfNull( - appParam.getTeamId(), "The teamId can't be null. Create application failed."); - appParam.setUserId(commonService.getUserId()); - appParam.setState(FlinkAppState.ADDED.getValue()); - appParam.setRelease(ReleaseState.NEED_RELEASE.get()); - appParam.setOptionState(OptionState.NONE.getValue()); - appParam.setCreateTime(new Date()); - appParam.setDefaultModeIngress(settingService.getIngressModeDefault()); - - boolean success = validateQueueIfNeeded(appParam); - ApiAlertException.throwIfFalse( - success, - String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId())); - - appParam.doSetHotParams(); - if (appParam.isUploadJob()) { - String jarPath = - String.format( - "%s/%d/%s", Workspace.local().APP_UPLOADS(), appParam.getTeamId(), appParam.getJar()); - if (!new File(jarPath).exists()) { - Resource resource = - resourceService.findByResourceName(appParam.getTeamId(), appParam.getJar()); - if (resource != null && StringUtils.isNotBlank(resource.getFilePath())) { - jarPath = resource.getFilePath(); - } - } - appParam.setJarCheckSum(org.apache.commons.io.FileUtils.checksumCRC32(new File(jarPath))); - } - - if (save(appParam)) { - if (appParam.isFlinkSqlJob()) { - FlinkSql flinkSql = new FlinkSql(appParam); - flinkSqlService.create(flinkSql); - } - if (appParam.getConfig() != null) { - configService.create(appParam, true); - } - return true; - } else { - throw new ApiAlertException("create application failed"); - } - } - - private boolean existsByJobName(String jobName) { - return baseMapper.exists( - new LambdaQueryWrapper().eq(Application::getJobName, jobName)); - } - - @SuppressWarnings("checkstyle:WhitespaceAround") - @Override - @SneakyThrows - @Transactional(rollbackFor = {Exception.class}) - public Long copy(Application appParam) { - boolean existsByJobName = this.existsByJobName(appParam.getJobName()); - ApiAlertException.throwIfFalse( - !existsByJobName, - "[StreamPark] Application names can't be repeated, copy application failed."); - - Application oldApp = getById(appParam.getId()); - Application newApp = new Application(); - String jobName = appParam.getJobName(); - - newApp.setJobName(jobName); - newApp.setClusterId( - ExecutionMode.isSessionMode(oldApp.getExecutionModeEnum()) - ? oldApp.getClusterId() - : jobName); - newApp.setArgs(appParam.getArgs() != null ? appParam.getArgs() : oldApp.getArgs()); - newApp.setVersionId(oldApp.getVersionId()); - - newApp.setFlinkClusterId(oldApp.getFlinkClusterId()); - newApp.setRestartSize(oldApp.getRestartSize()); - newApp.setJobType(oldApp.getJobType()); - newApp.setOptions(oldApp.getOptions()); - newApp.setDynamicProperties(oldApp.getDynamicProperties()); - newApp.setResolveOrder(oldApp.getResolveOrder()); - newApp.setExecutionMode(oldApp.getExecutionMode()); - newApp.setFlinkImage(oldApp.getFlinkImage()); - newApp.setK8sNamespace(oldApp.getK8sNamespace()); - newApp.setK8sRestExposedType(oldApp.getK8sRestExposedType()); - newApp.setK8sPodTemplate(oldApp.getK8sPodTemplate()); - newApp.setK8sJmPodTemplate(oldApp.getK8sJmPodTemplate()); - newApp.setK8sTmPodTemplate(oldApp.getK8sTmPodTemplate()); - newApp.setK8sHadoopIntegration(oldApp.getK8sHadoopIntegration()); - newApp.setDescription(oldApp.getDescription()); - newApp.setAlertId(oldApp.getAlertId()); - newApp.setCpFailureAction(oldApp.getCpFailureAction()); - newApp.setCpFailureRateInterval(oldApp.getCpFailureRateInterval()); - newApp.setCpMaxFailureInterval(oldApp.getCpMaxFailureInterval()); - newApp.setMainClass(oldApp.getMainClass()); - newApp.setAppType(oldApp.getAppType()); - newApp.setResourceFrom(oldApp.getResourceFrom()); - newApp.setProjectId(oldApp.getProjectId()); - newApp.setModule(oldApp.getModule()); - newApp.setUserId(commonService.getUserId()); - newApp.setState(FlinkAppState.ADDED.getValue()); - newApp.setRelease(ReleaseState.NEED_RELEASE.get()); - newApp.setOptionState(OptionState.NONE.getValue()); - newApp.setCreateTime(new Date()); - newApp.setHotParams(oldApp.getHotParams()); - - newApp.setJar(oldApp.getJar()); - newApp.setJarCheckSum(oldApp.getJarCheckSum()); - newApp.setTags(oldApp.getTags()); - newApp.setTeamId(oldApp.getTeamId()); - - boolean saved = save(newApp); - if (saved) { - if (newApp.isFlinkSqlJob()) { - FlinkSql copyFlinkSql = flinkSqlService.getLatestFlinkSql(appParam.getId(), true); - newApp.setFlinkSql(copyFlinkSql.getSql()); - newApp.setTeamResource(copyFlinkSql.getTeamResource()); - newApp.setDependency(copyFlinkSql.getDependency()); - FlinkSql flinkSql = new FlinkSql(newApp); - flinkSqlService.create(flinkSql); - } - ApplicationConfig copyConfig = configService.getEffective(appParam.getId()); - if (copyConfig != null) { - ApplicationConfig config = new ApplicationConfig(); - config.setAppId(newApp.getId()); - config.setFormat(copyConfig.getFormat()); - config.setContent(copyConfig.getContent()); - config.setCreateTime(new Date()); - config.setVersion(1); - configService.save(config); - configService.setLatestOrEffective(true, config.getId(), newApp.getId()); - } - return newApp.getId(); - } else { - throw new ApiAlertException( - "create application from copy failed, copy source app: " + oldApp.getJobName()); - } - } - - @Override - @Transactional(rollbackFor = {Exception.class}) - public boolean update(Application appParam) { - Application application = getById(appParam.getId()); - - boolean success = validateQueueIfNeeded(application, appParam); - ApiAlertException.throwIfFalse( - success, - String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId())); - - application.setRelease(ReleaseState.NEED_RELEASE.get()); - - // 1) jar job jar file changed - if (application.isUploadJob()) { - if (!Objects.equals(application.getJar(), appParam.getJar())) { - application.setBuild(true); - } else { - File jarFile = new File(WebUtils.getAppTempDir(), appParam.getJar()); - if (jarFile.exists()) { - try { - long checkSum = org.apache.commons.io.FileUtils.checksumCRC32(jarFile); - if (!Objects.equals(checkSum, application.getJarCheckSum())) { - application.setBuild(true); - } - } catch (IOException e) { - log.error("Error in checksumCRC32 for {}.", jarFile); - throw new RuntimeException(e); - } - } - } - } - - // 2) k8s podTemplate changed. - if (application.getBuild() && ExecutionMode.isKubernetesMode(appParam.getExecutionMode())) { - if (ObjectUtils.trimNoEquals( - application.getK8sRestExposedType(), appParam.getK8sRestExposedType()) - || ObjectUtils.trimNoEquals( - application.getK8sJmPodTemplate(), appParam.getK8sJmPodTemplate()) - || ObjectUtils.trimNoEquals( - application.getK8sTmPodTemplate(), appParam.getK8sTmPodTemplate()) - || ObjectUtils.trimNoEquals( - application.getK8sPodTemplates(), appParam.getK8sPodTemplates()) - || ObjectUtils.trimNoEquals( - application.getK8sHadoopIntegration(), appParam.getK8sHadoopIntegration()) - || ObjectUtils.trimNoEquals(application.getFlinkImage(), appParam.getFlinkImage())) { - application.setBuild(true); - } - } - - // 3) flink version changed - if (!application.getBuild() - && !Objects.equals(application.getVersionId(), appParam.getVersionId())) { - application.setBuild(true); - } - - // 4) yarn application mode change - if (!application.getBuild()) { - if (!application.getExecutionMode().equals(appParam.getExecutionMode())) { - if (appParam.getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION) - || application.getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION)) { - application.setBuild(true); - } - } - } - - appParam.setJobType(application.getJobType()); - // changes to the following parameters need to be re-release to take effect - application.setJobName(appParam.getJobName()); - application.setVersionId(appParam.getVersionId()); - application.setArgs(appParam.getArgs()); - application.setOptions(appParam.getOptions()); - application.setDynamicProperties(appParam.getDynamicProperties()); - application.setResolveOrder(appParam.getResolveOrder()); - application.setExecutionMode(appParam.getExecutionMode()); - application.setClusterId(appParam.getClusterId()); - application.setFlinkImage(appParam.getFlinkImage()); - application.setK8sNamespace(appParam.getK8sNamespace()); - application.updateHotParams(appParam); - application.setK8sRestExposedType(appParam.getK8sRestExposedType()); - application.setK8sPodTemplate(appParam.getK8sPodTemplate()); - application.setK8sJmPodTemplate(appParam.getK8sJmPodTemplate()); - application.setK8sTmPodTemplate(appParam.getK8sTmPodTemplate()); - application.setK8sHadoopIntegration(appParam.getK8sHadoopIntegration()); - - // changes to the following parameters do not affect running tasks - application.setModifyTime(new Date()); - application.setDescription(appParam.getDescription()); - application.setAlertId(appParam.getAlertId()); - application.setRestartSize(appParam.getRestartSize()); - application.setCpFailureAction(appParam.getCpFailureAction()); - application.setCpFailureRateInterval(appParam.getCpFailureRateInterval()); - application.setCpMaxFailureInterval(appParam.getCpMaxFailureInterval()); - application.setTags(appParam.getTags()); - - switch (appParam.getExecutionModeEnum()) { - case YARN_APPLICATION: - case YARN_PER_JOB: - case KUBERNETES_NATIVE_APPLICATION: - application.setFlinkClusterId(null); - break; - case REMOTE: - case YARN_SESSION: - case KUBERNETES_NATIVE_SESSION: - application.setFlinkClusterId(appParam.getFlinkClusterId()); - break; - default: - break; - } - - // Flink Sql job... - if (application.isFlinkSqlJob()) { - updateFlinkSqlJob(application, appParam); - return true; - } - - if (application.isStreamParkJob()) { - configService.update(appParam, application.isRunning()); - } else { - application.setJar(appParam.getJar()); - application.setMainClass(appParam.getMainClass()); - } - this.updateById(application); - return true; - } - - /** - * update FlinkSql type jobs, there are 3 aspects to consider
- * 1. flink sql has changed
- * 2. dependency has changed
- * 3. parameter has changed
- * - * @param application - * @param appParam - */ - private void updateFlinkSqlJob(Application application, Application appParam) { - FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(application.getId(), true); - if (effectiveFlinkSql == null) { - effectiveFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); - flinkSqlService.removeById(effectiveFlinkSql.getId()); - FlinkSql sql = new FlinkSql(appParam); - flinkSqlService.create(sql); - application.setBuild(true); - } else { - // get previous flink sql and decode - FlinkSql copySourceFlinkSql = flinkSqlService.getById(appParam.getSqlId()); - ApiAlertException.throwIfNull( - copySourceFlinkSql, "Flink sql is null, update flink sql job failed."); - copySourceFlinkSql.decode(); - - // get submit flink sql - FlinkSql targetFlinkSql = new FlinkSql(appParam); - - // judge sql and dependency has changed - ChangeTypeEnum changeTypeEnum = copySourceFlinkSql.checkChange(targetFlinkSql); - - log.info("updateFlinkSqlJob changeTypeEnum: {}", changeTypeEnum); - - // if has been changed - if (changeTypeEnum.hasChanged()) { - // check if there is a candidate version for the newly added record - FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); - // If the candidate version of the new record exists, it will be deleted directly, - // and only one candidate version will be retained. If the new candidate version is not - // effective, - // if it is edited again and the next record comes in, the previous candidate version will - // be deleted. - if (newFlinkSql != null) { - // delete all records about candidates - flinkSqlService.removeById(newFlinkSql.getId()); - } - FlinkSql historyFlinkSql = - flinkSqlService.getCandidate(application.getId(), CandidateType.HISTORY); - // remove candidate flags that already exist but are set as candidates - if (historyFlinkSql != null) { - flinkSqlService.cleanCandidate(historyFlinkSql.getId()); - } - FlinkSql sql = new FlinkSql(appParam); - flinkSqlService.create(sql); - if (changeTypeEnum.isDependencyChanged()) { - application.setBuild(true); - } - } else { - // judge version has changed - boolean versionChanged = !effectiveFlinkSql.getId().equals(appParam.getSqlId()); - if (versionChanged) { - // sql and dependency not changed, but version changed, means that rollback to the version - CandidateType type = CandidateType.HISTORY; - flinkSqlService.setCandidate(type, appParam.getId(), appParam.getSqlId()); - application.setRelease(ReleaseState.NEED_ROLLBACK.get()); - application.setBuild(true); - } - } - } - this.updateById(application); - this.configService.update(appParam, application.isRunning()); - } - - @Override - public void updateRelease(Application application) { - LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); - updateWrapper.eq(Application::getId, application.getId()); - updateWrapper.set(Application::getRelease, application.getRelease()); - updateWrapper.set(Application::getBuild, application.getBuild()); - if (application.getOptionState() != null) { - updateWrapper.set(Application::getOptionState, application.getOptionState()); - } - this.update(updateWrapper); - } - - @Override - public List getByProjectId(Long id) { - return baseMapper.getByProjectId(id); - } - - @Override - public List getByTeamId(Long teamId) { - return baseMapper.getByTeamId(teamId); - } - - @Override - public List getByTeamIdAndExecutionModes( - Long teamId, @Nonnull Collection executionModes) { - return getBaseMapper() - .selectList( - new LambdaQueryWrapper() - .eq((SFunction) Application::getTeamId, teamId) - .in( - Application::getExecutionMode, - executionModes.stream() - .map(ExecutionMode::getMode) - .collect(Collectors.toSet()))); - } - - @Override - public boolean checkBuildAndUpdate(Application application) { - boolean build = application.getBuild(); - if (!build) { - LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); - updateWrapper.eq(Application::getId, application.getId()); - if (application.isRunning()) { - updateWrapper.set(Application::getRelease, ReleaseState.NEED_RESTART.get()); - } else { - updateWrapper.set(Application::getRelease, ReleaseState.DONE.get()); - updateWrapper.set(Application::getOptionState, OptionState.NONE.getValue()); - } - this.update(updateWrapper); - - // backup - if (application.isFlinkSqlJob()) { - FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); - if (!application.isNeedRollback() && newFlinkSql != null) { - backUpService.backup(application, newFlinkSql); - } - } - - // If the current task is not running, or the task has just been added, - // directly set the candidate version to the official version - FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false); - if (!application.isRunning() || flinkSql == null) { - this.toEffective(application); - } - } - return build; - } - - @Override - public void forcedStop(Application app) { - CompletableFuture startFuture = startFutureMap.remove(app.getId()); - CompletableFuture cancelFuture = cancelFutureMap.remove(app.getId()); - Application application = this.baseMapper.getApp(app); - if (isKubernetesApp(application)) { - KubernetesDeploymentHelper.watchPodTerminatedLog( - application.getK8sNamespace(), application.getJobName(), application.getJobId()); - KubernetesDeploymentHelper.deleteTaskDeployment( - application.getK8sNamespace(), application.getJobName()); - KubernetesDeploymentHelper.deleteTaskConfigMap( - application.getK8sNamespace(), application.getJobName()); - } - if (startFuture != null) { - startFuture.cancel(true); - } - if (cancelFuture != null) { - cancelFuture.cancel(true); - } - if (startFuture == null && cancelFuture == null) { - this.updateToStopped(app); - } - } - - @Override - public void clean(Application appParam) { - appParam.setRelease(ReleaseState.DONE.get()); - this.updateRelease(appParam); - } - - @Override - public String readConf(Application appParam) throws IOException { - File file = new File(appParam.getConfig()); - String conf = FileUtils.readFile(file); - return Base64.getEncoder().encodeToString(conf.getBytes()); - } - - @Override - public Application getApp(Application appParam) { - Application application = this.baseMapper.getApp(appParam); - ApplicationConfig config = configService.getEffective(appParam.getId()); - config = config == null ? configService.getLatest(appParam.getId()) : config; - if (config != null) { - config.setToApplication(application); - } - if (application.isFlinkSqlJob()) { - FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), true); - if (flinkSql == null) { - flinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); - flinkSql.setSql(DeflaterUtils.unzipString(flinkSql.getSql())); - } - flinkSql.setToApplication(application); - } else { - if (application.isCICDJob()) { - String path = - this.projectService.getAppConfPath(application.getProjectId(), application.getModule()); - application.setConfPath(path); - } - } - // add flink web url info for k8s-mode - if (isKubernetesApp(application)) { - String restUrl = k8SFlinkTrackMonitor.getRemoteRestUrl(toTrackId(application)); - application.setFlinkRestUrl(restUrl); - - // set duration - long now = System.currentTimeMillis(); - if (application.getTracking() == 1 - && application.getStartTime() != null - && application.getStartTime().getTime() > 0) { - application.setDuration(now - application.getStartTime().getTime()); - } - } - - application.setYarnQueueByHotParams(); - - return application; - } - - @Override - public String getMain(Application application) { - File jarFile; - if (application.getProjectId() == null) { - jarFile = new File(application.getJar()); - } else { - Project project = new Project(); - project.setId(application.getProjectId()); - String modulePath = - project.getDistHome().getAbsolutePath().concat("/").concat(application.getModule()); - jarFile = new File(modulePath, application.getJar()); - } - Manifest manifest = Utils.getJarManifest(jarFile); - return manifest.getMainAttributes().getValue("Main-Class"); - } - - @Override - public boolean mapping(Application appParam) { - boolean mapping = this.baseMapper.mapping(appParam); - Application application = getById(appParam.getId()); - if (isKubernetesApp(application)) { - k8SFlinkTrackMonitor.doWatching(toTrackId(application)); - } else { - FlinkHttpWatcher.doWatching(application); - } - return mapping; - } - - @Override - public void cancel(Application appParam) throws Exception { - FlinkHttpWatcher.setOptionState(appParam.getId(), OptionState.CANCELLING); - Application application = getById(appParam.getId()); - application.setState(FlinkAppState.CANCELLING.getValue()); - - ApplicationLog applicationLog = new ApplicationLog(); - applicationLog.setOptionName(Operation.CANCEL.getValue()); - applicationLog.setAppId(application.getId()); - applicationLog.setJobManagerUrl(application.getJobManagerUrl()); - applicationLog.setOptionTime(new Date()); - applicationLog.setYarnAppId(application.getClusterId()); - - if (appParam.getSavePointed()) { - FlinkHttpWatcher.addSavepoint(application.getId()); - application.setOptionState(OptionState.SAVEPOINTING.getValue()); - } else { - application.setOptionState(OptionState.CANCELLING.getValue()); - } - - application.setOptionTime(new Date()); - this.baseMapper.updateById(application); - - Long userId = commonService.getUserId(); - if (!application.getUserId().equals(userId)) { - FlinkHttpWatcher.addCanceledApp(application.getId(), userId); - } - - FlinkEnv flinkEnv = flinkEnvService.getById(application.getVersionId()); - - // infer savepoint - String customSavepoint = null; - if (appParam.getSavePointed()) { - customSavepoint = appParam.getSavePoint(); - if (StringUtils.isBlank(customSavepoint)) { - customSavepoint = savePointService.getSavePointPath(appParam); - } - } - - String clusterId = null; - if (ExecutionMode.isKubernetesMode(application.getExecutionMode())) { - clusterId = application.getClusterId(); - } else if (ExecutionMode.isYarnMode(application.getExecutionMode())) { - if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) { - FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); - ApiAlertException.throwIfNull( - cluster, - String.format( - "The yarn session clusterId=%s can't found, maybe the clusterId is wrong or the cluster has been deleted. Please contact the Admin.", - application.getFlinkClusterId())); - clusterId = cluster.getClusterId(); - } else { - clusterId = application.getAppId(); - } - } - - Map properties = new HashMap<>(); - - if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) { - FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); - ApiAlertException.throwIfNull( - cluster, - String.format( - "The clusterId=%s cannot be find, maybe the clusterId is wrong or " - + "the cluster has been deleted. Please contact the Admin.", - application.getFlinkClusterId())); - URI activeAddress = cluster.getRemoteURI(); - properties.put(RestOptions.ADDRESS.key(), activeAddress.getHost()); - properties.put(RestOptions.PORT.key(), activeAddress.getPort()); - } - - CancelRequest cancelRequest = - new CancelRequest( - flinkEnv.getFlinkVersion(), - ExecutionMode.of(application.getExecutionMode()), - properties, - clusterId, - application.getJobId(), - appParam.getSavePointed(), - appParam.getDrain(), - customSavepoint, - appParam.getNativeFormat(), - application.getK8sNamespace()); - - final Date triggerTime = new Date(); - CompletableFuture cancelFuture = - CompletableFuture.supplyAsync(() -> FlinkClient.cancel(cancelRequest), executorService); - - cancelFutureMap.put(application.getId(), cancelFuture); - - CompletableFutureUtils.runTimeout( - cancelFuture, - 10L, - TimeUnit.MINUTES, - cancelResponse -> { - applicationLog.setSuccess(true); - if (cancelResponse != null && cancelResponse.savePointDir() != null) { - String savePointDir = cancelResponse.savePointDir(); - log.info("savePoint path: {}", savePointDir); - SavePoint savePoint = new SavePoint(); - savePoint.setPath(savePointDir); - savePoint.setAppId(application.getId()); - savePoint.setLatest(true); - savePoint.setType(CheckPointType.SAVEPOINT.get()); - savePoint.setCreateTime(new Date()); - savePoint.setTriggerTime(triggerTime); - savePointService.save(savePoint); - } - if (isKubernetesApp(application)) { - k8SFlinkTrackMonitor.unWatching(toTrackId(application)); - } - }, - e -> { - if (e.getCause() instanceof CancellationException) { - updateToStopped(application); - } else { - log.error("stop flink job fail.", e); - application.setOptionState(OptionState.NONE.getValue()); - application.setState(FlinkAppState.FAILED.getValue()); - updateById(application); - - if (appParam.getSavePointed()) { - savePointService.expire(application.getId()); - } - - // re-tracking flink job on kubernetes and logging exception - if (isKubernetesApp(application)) { - TrackId id = toTrackId(application); - k8SFlinkTrackMonitor.unWatching(id); - k8SFlinkTrackMonitor.doWatching(id); - } else { - FlinkHttpWatcher.unWatching(application.getId()); - } - - String exception = Utils.stringifyException(e); - applicationLog.setException(exception); - applicationLog.setSuccess(false); - } - }) - .whenComplete( - (t, e) -> { - cancelFutureMap.remove(application.getId()); - applicationLogService.save(applicationLog); - }); - } - - @Override - public String checkSavepointPath(Application appParam) throws Exception { - String savepointPath = appParam.getSavePoint(); - if (StringUtils.isBlank(savepointPath)) { - savepointPath = savePointService.getSavePointPath(appParam); - } - - if (StringUtils.isNotBlank(savepointPath)) { - final URI uri = URI.create(savepointPath); - final String scheme = uri.getScheme(); - final String pathPart = uri.getPath(); - String error = null; - if (scheme == null) { - error = - "This state.savepoints.dir value " - + savepointPath - + " scheme (hdfs://, file://, etc) of is null. Please specify the file system scheme explicitly in the URI."; - } else if (pathPart == null) { - error = - "This state.savepoints.dir value " - + savepointPath - + " path part to store the checkpoint data in is null. Please specify a directory path for the checkpoint data."; - } else if (pathPart.isEmpty() || "/".equals(pathPart)) { - error = - "This state.savepoints.dir value " - + savepointPath - + " Cannot use the root directory for checkpoints."; - } - return error; - } else { - return "When custom savepoint is not set, state.savepoints.dir needs to be set in properties or flink-conf.yaml of application"; - } - } - - @Override - public void persistMetrics(Application appParam) { - this.baseMapper.persistMetrics(appParam); - } - - /** - * Setup task is starting (for webUI "state" display) - * - * @param application - */ - @Override - public void starting(Application application) { - application.setState(FlinkAppState.STARTING.getValue()); - application.setOptionTime(new Date()); - updateById(application); - } - - @Override - @Transactional(rollbackFor = {Exception.class}) - public void start(Application appParam, boolean auto) throws Exception { - final Application application = getById(appParam.getId()); - Utils.notNull(application); - if (!application.isCanBeStart()) { - throw new ApiAlertException("[StreamPark] The application cannot be started repeatedly."); - } - - FlinkEnv flinkEnv = flinkEnvService.getByIdOrDefault(application.getVersionId()); - if (flinkEnv == null) { - throw new ApiAlertException("[StreamPark] can no found flink version"); - } - - // if manually started, clear the restart flag - if (!auto) { - application.setRestartCount(0); - } else { - if (!application.isNeedRestartOnFailed()) { - return; - } - appParam.setSavePointed(true); - application.setRestartCount(application.getRestartCount() + 1); - } - - starting(application); - application.setAllowNonRestored(appParam.getAllowNonRestored()); - - String appConf; - String flinkUserJar = null; - String jobId = new JobID().toHexString(); - ApplicationLog applicationLog = new ApplicationLog(); - applicationLog.setOptionName(Operation.START.getValue()); - applicationLog.setAppId(application.getId()); - applicationLog.setOptionTime(new Date()); - - // set the latest to Effective, (it will only become the current effective at this time) - this.toEffective(application); - - ApplicationConfig applicationConfig = configService.getEffective(application.getId()); - ExecutionMode executionMode = ExecutionMode.of(application.getExecutionMode()); - ApiAlertException.throwIfNull( - executionMode, "ExecutionMode can't be null, start application failed."); - if (application.isCustomCodeJob()) { - if (application.isUploadJob()) { - appConf = - String.format( - "json://{\"%s\":\"%s\"}", - ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(), application.getMainClass()); - } else { - switch (application.getApplicationType()) { - case STREAMPARK_FLINK: - ConfigFileType fileType = ConfigFileType.of(applicationConfig.getFormat()); - if (fileType != null && !fileType.equals(ConfigFileType.UNKNOWN)) { - appConf = - String.format("%s://%s", fileType.getTypeName(), applicationConfig.getContent()); - } else { - throw new IllegalArgumentException( - "application' config type error,must be ( yaml| properties| hocon )"); - } - break; - case APACHE_FLINK: - appConf = - String.format( - "json://{\"%s\":\"%s\"}", - ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(), application.getMainClass()); - break; - default: - throw new IllegalArgumentException( - "[StreamPark] ApplicationType must be (StreamPark flink | Apache flink)... "); - } - } - - if (ExecutionMode.YARN_APPLICATION.equals(executionMode)) { - switch (application.getApplicationType()) { - case STREAMPARK_FLINK: - flinkUserJar = - String.format( - "%s/%s", application.getAppLib(), application.getModule().concat(".jar")); - break; - case APACHE_FLINK: - flinkUserJar = String.format("%s/%s", application.getAppHome(), application.getJar()); - if (!FsOperator.hdfs().exists(flinkUserJar)) { - Resource resource = - resourceService.findByResourceName(application.getTeamId(), application.getJar()); - if (resource != null && StringUtils.isNotBlank(resource.getFilePath())) { - flinkUserJar = - String.format( - "%s/%s", - application.getAppHome(), new File(resource.getFilePath()).getName()); - } - } - break; - default: - throw new IllegalArgumentException( - "[StreamPark] ApplicationType must be (StreamPark flink | Apache flink)... "); - } - } - } else if (application.isFlinkSqlJob()) { - FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false); - Utils.notNull(flinkSql); - // 1) dist_userJar - String sqlDistJar = commonService.getSqlClientJar(flinkEnv); - // 2) appConfig - appConf = - applicationConfig == null - ? null - : String.format("yaml://%s", applicationConfig.getContent()); - // 3) client - if (ExecutionMode.YARN_APPLICATION.equals(executionMode)) { - String clientPath = Workspace.remote().APP_CLIENT(); - flinkUserJar = String.format("%s/%s", clientPath, sqlDistJar); - } - } else { - throw new UnsupportedOperationException("Unsupported..."); - } - - Map extraParameter = new HashMap<>(0); - if (application.isFlinkSqlJob()) { - FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), true); - // Get the sql of the replaced placeholder - String realSql = variableService.replaceVariable(application.getTeamId(), flinkSql.getSql()); - flinkSql.setSql(DeflaterUtils.zipString(realSql)); - extraParameter.put(ConfigConst.KEY_FLINK_SQL(null), flinkSql.getSql()); - } - - KubernetesSubmitParam kubernetesSubmitParam = - new KubernetesSubmitParam( - application.getClusterId(), - application.getK8sNamespace(), - application.getK8sRestExposedTypeEnum()); - - AppBuildPipeline buildPipeline = appBuildPipeService.getById(application.getId()); - - Utils.notNull(buildPipeline); - - BuildResult buildResult = buildPipeline.getBuildResult(); - if (ExecutionMode.YARN_APPLICATION.equals(executionMode)) { - buildResult = new ShadedBuildResponse(null, flinkUserJar, true); - } - - // Get the args after placeholder replacement - String applicationArgs = - variableService.replaceVariable(application.getTeamId(), application.getArgs()); - - String pyflinkFilePath = ""; - Resource resource = - resourceService.findByResourceName(application.getTeamId(), application.getJar()); - if (resource != null - && StringUtils.isNotBlank(resource.getFilePath()) - && resource.getFilePath().endsWith(ConfigConst.PYTHON_SUFFIX())) { - pyflinkFilePath = resource.getFilePath(); - } - SubmitRequest submitRequest = - new SubmitRequest( - flinkEnv.getFlinkVersion(), - ExecutionMode.of(application.getExecutionMode()), - getProperties(application), - flinkEnv.getFlinkConf(), - DevelopmentMode.of(application.getJobType()), - application.getId(), - jobId, - application.getJobName(), - appConf, - application.getApplicationType(), - getSavePointed(appParam), - appParam.getRestoreMode() == null ? null : RestoreMode.of(appParam.getRestoreMode()), - applicationArgs, - pyflinkFilePath, - buildResult, - kubernetesSubmitParam, - extraParameter); - - CompletableFuture future = - CompletableFuture.supplyAsync(() -> FlinkClient.submit(submitRequest), executorService); - - startFutureMap.put(application.getId(), future); - - CompletableFutureUtils.runTimeout( - future, - 2L, - TimeUnit.MINUTES, - submitResponse -> { - if (submitResponse.flinkConfig() != null) { - String jmMemory = - submitResponse.flinkConfig().get(ConfigConst.KEY_FLINK_JM_PROCESS_MEMORY()); - if (jmMemory != null) { - application.setJmMemory(MemorySize.parse(jmMemory).getMebiBytes()); - } - String tmMemory = - submitResponse.flinkConfig().get(ConfigConst.KEY_FLINK_TM_PROCESS_MEMORY()); - if (tmMemory != null) { - application.setTmMemory(MemorySize.parse(tmMemory).getMebiBytes()); - } - } - application.setAppId(submitResponse.clusterId()); - if (StringUtils.isNoneEmpty(submitResponse.jobId())) { - application.setJobId(submitResponse.jobId()); - } - - if (StringUtils.isNoneEmpty(submitResponse.jobManagerUrl())) { - application.setJobManagerUrl(submitResponse.jobManagerUrl()); - applicationLog.setJobManagerUrl(submitResponse.jobManagerUrl()); - } - applicationLog.setYarnAppId(submitResponse.clusterId()); - application.setStartTime(new Date()); - application.setEndTime(null); - if (isKubernetesApp(application)) { - application.setRelease(ReleaseState.DONE.get()); - } - updateById(application); - - // if start completed, will be added task to tracking queue - if (isKubernetesApp(application)) { - k8SFlinkTrackMonitor.doWatching(toTrackId(application)); - } else { - FlinkHttpWatcher.setOptionState(appParam.getId(), OptionState.STARTING); - FlinkHttpWatcher.doWatching(application); - } - - applicationLog.setSuccess(true); - // set savepoint to expire - savePointService.expire(application.getId()); - }, - e -> { - if (e.getCause() instanceof CancellationException) { - updateToStopped(application); - } else { - String exception = Utils.stringifyException(e); - applicationLog.setException(exception); - applicationLog.setSuccess(false); - Application app = getById(appParam.getId()); - app.setState(FlinkAppState.FAILED.getValue()); - app.setOptionState(OptionState.NONE.getValue()); - updateById(app); - if (isKubernetesApp(app)) { - k8SFlinkTrackMonitor.unWatching(toTrackId(app)); - } else { - FlinkHttpWatcher.unWatching(appParam.getId()); - } - } - }) - .whenComplete( - (t, e) -> { - if (ExecutionMode.isKubernetesApplicationMode(application.getExecutionMode())) { - String domainName = settingService.getIngressModeDefault(); - if (StringUtils.isNotBlank(domainName)) { - try { - IngressController.configureIngress( - domainName, application.getClusterId(), application.getK8sNamespace()); - } catch (KubernetesClientException kubernetesClientException) { - log.info( - "Failed to create ingress, stack info:{}", - kubernetesClientException.getMessage()); - applicationLog.setException(e.getMessage()); - applicationLog.setSuccess(false); - applicationLogService.save(applicationLog); - application.setState(FlinkAppState.FAILED.getValue()); - application.setOptionState(OptionState.NONE.getValue()); - updateById(application); - return; - } - } - } - - applicationLogService.save(applicationLog); - startFutureMap.remove(application.getId()); - }); - } - - private Map getProperties(Application application) { - Map properties = new HashMap<>(application.getOptionMap()); - if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) { - FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); - ApiAlertException.throwIfNull( - cluster, - String.format( - "The clusterId=%s can't be find, maybe the clusterId is wrong or " - + "the cluster has been deleted. Please contact the Admin.", - application.getFlinkClusterId())); - URI activeAddress = cluster.getRemoteURI(); - properties.put(RestOptions.ADDRESS.key(), activeAddress.getHost()); - properties.put(RestOptions.PORT.key(), activeAddress.getPort()); - } else if (ExecutionMode.isYarnMode(application.getExecutionModeEnum())) { - if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) { - FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); - ApiAlertException.throwIfNull( - cluster, - String.format( - "The yarn session clusterId=%s cannot be find, maybe the clusterId is wrong or " - + "the cluster has been deleted. Please contact the Admin.", - application.getFlinkClusterId())); - properties.put(ConfigConst.KEY_YARN_APP_ID(), cluster.getClusterId()); - } else { - String yarnQueue = - (String) application.getHotParamsMap().get(ConfigConst.KEY_YARN_APP_QUEUE()); - String yarnLabelExpr = - (String) application.getHotParamsMap().get(ConfigConst.KEY_YARN_APP_NODE_LABEL()); - Optional.ofNullable(yarnQueue) - .ifPresent(yq -> properties.put(ConfigConst.KEY_YARN_APP_QUEUE(), yq)); - Optional.ofNullable(yarnLabelExpr) - .ifPresent(yLabel -> properties.put(ConfigConst.KEY_YARN_APP_NODE_LABEL(), yLabel)); - } - } else if (ExecutionMode.isKubernetesMode(application.getExecutionModeEnum())) { - properties.put(ConfigConst.KEY_K8S_IMAGE_PULL_POLICY(), "Always"); - } - - if (ExecutionMode.isKubernetesApplicationMode(application.getExecutionMode())) { - try { - HadoopUtils.yarnClient(); - properties.put(JobManagerOptions.ARCHIVE_DIR.key(), Workspace.ARCHIVES_FILE_PATH()); - } catch (Exception e) { - // skip - } - } - - if (application.getAllowNonRestored()) { - properties.put(SavepointConfigOptions.SAVEPOINT_IGNORE_UNCLAIMED_STATE.key(), true); - } - - Map dynamicProperties = - PropertiesUtils.extractDynamicPropertiesAsJava(application.getDynamicProperties()); - properties.putAll(dynamicProperties); - ResolveOrder resolveOrder = ResolveOrder.of(application.getResolveOrder()); - if (resolveOrder != null) { - properties.put(CoreOptions.CLASSLOADER_RESOLVE_ORDER.key(), resolveOrder.getName()); - } - - return properties; - } - - private void updateToStopped(Application app) { - Application application = getById(app); - application.setOptionState(OptionState.NONE.getValue()); - application.setState(FlinkAppState.CANCELED.getValue()); - application.setOptionTime(new Date()); - updateById(application); - savePointService.expire(application.getId()); - // re-tracking flink job on kubernetes and logging exception - if (isKubernetesApp(application)) { - TrackId id = toTrackId(application); - k8SFlinkTrackMonitor.unWatching(id); - k8SFlinkTrackMonitor.doWatching(id); - } else { - FlinkHttpWatcher.unWatching(application.getId()); - } - } - - private Boolean checkJobName(String jobName) { - if (!StringUtils.isEmpty(jobName.trim())) { - return JOB_NAME_PATTERN.matcher(jobName).matches() - && SINGLE_SPACE_PATTERN.matcher(jobName).matches(); - } - return false; - } - - private String getSavePointed(Application appParam) { - if (appParam.getSavePointed()) { - if (appParam.getSavePoint() == null) { - SavePoint savePoint = savePointService.getLatest(appParam.getId()); - if (savePoint != null) { - return savePoint.getPath(); - } - } else { - return appParam.getSavePoint(); - } - } - return null; - } - - /** - * Check queue label validation when create the application if needed. - * - * @param appParam the app to create. - * @return true if validate it successfully, false else. - */ - @VisibleForTesting - public boolean validateQueueIfNeeded(Application appParam) { - yarnQueueService.checkQueueLabel(appParam.getExecutionModeEnum(), appParam.getYarnQueue()); - if (!isYarnNotDefaultQueue(appParam)) { - return true; - } - return yarnQueueService.existByTeamIdQueueLabel(appParam.getTeamId(), appParam.getYarnQueue()); - } - - /** - * Check queue label validation when update the application if needed. - * - * @param oldApp the old app to update. - * @param newApp the new app payload. - * @return true if validate it successfully, false else. - */ - @VisibleForTesting - public boolean validateQueueIfNeeded(Application oldApp, Application newApp) { - yarnQueueService.checkQueueLabel(newApp.getExecutionModeEnum(), newApp.getYarnQueue()); - if (!isYarnNotDefaultQueue(newApp)) { - return true; - } - - oldApp.setYarnQueueByHotParams(); - if (ExecutionMode.isYarnPerJobOrAppMode(newApp.getExecutionModeEnum()) - && StringUtils.equals(oldApp.getYarnQueue(), newApp.getYarnQueue())) { - return true; - } - return yarnQueueService.existByTeamIdQueueLabel(newApp.getTeamId(), newApp.getYarnQueue()); - } - - /** - * Judge the execution mode whether is the Yarn PerJob or Application mode with not default or - * empty queue label. - * - * @param application application entity. - * @return If the executionMode is (Yarn PerJob or application mode) and the queue label is not - * (empty or default), return true, false else. - */ - private boolean isYarnNotDefaultQueue(Application application) { - return ExecutionMode.isYarnPerJobOrAppMode(application.getExecutionModeEnum()) - && !yarnQueueService.isDefaultQueue(application.getYarnQueue()); - } -} diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java index 41015e5442..4fc211d496 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java @@ -22,13 +22,13 @@ import org.apache.streampark.console.core.entity.ExternalLink; import org.apache.streampark.console.core.enums.PlaceholderType; import org.apache.streampark.console.core.mapper.ExternalLinkMapper; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.ExternalLinkService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; @@ -40,11 +40,12 @@ @Slf4j @Service +@RequiredArgsConstructor @Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class) public class ExternalLinkServiceImpl extends ServiceImpl implements ExternalLinkService { - @Autowired private ApplicationService applicationService; + private final ApplicationManageService applicationManageService; @Override public void create(ExternalLink externalLink) { @@ -73,7 +74,7 @@ public void delete(Long linkId) { @Override public List render(Long appId) { - Application app = applicationService.getById(appId); + Application app = applicationManageService.getById(appId); Utils.notNull(app, "Application doesn't exist"); List externalLink = this.list(); if (externalLink != null && externalLink.size() > 0) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java index 636caac80d..3bbe93d946 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java @@ -26,11 +26,11 @@ import org.apache.streampark.console.core.bean.ResponseResult; import org.apache.streampark.console.core.entity.FlinkCluster; import org.apache.streampark.console.core.mapper.FlinkClusterMapper; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.FlinkClusterService; import org.apache.streampark.console.core.service.FlinkEnvService; import org.apache.streampark.console.core.service.YarnQueueService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.streampark.console.core.task.FlinkClusterWatcher; import org.apache.streampark.flink.client.FlinkClient; import org.apache.streampark.flink.client.bean.DeployRequest; @@ -90,7 +90,7 @@ public class FlinkClusterServiceImpl extends ServiceImpl implements FlinkEnvService { @Autowired private FlinkClusterService flinkClusterService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationInfoService applicationInfoService; /** * two places will be checked:
@@ -164,7 +164,7 @@ private void checkOrElseAlert(FlinkEnv flinkEnv) { // 3.check if it is being used by any application ApiAlertException.throwIfTrue( - applicationService.existsByFlinkEnvId(flinkEnv.getId()), + applicationInfoService.existsByFlinkEnvId(flinkEnv.getId()), "The flink home is still in use by some application, please check."); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java index d48228706e..2904ed1774 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java @@ -36,8 +36,8 @@ import org.apache.streampark.console.core.enums.GitCredential; import org.apache.streampark.console.core.enums.ReleaseState; import org.apache.streampark.console.core.mapper.ProjectMapper; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.ProjectService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.task.FlinkHttpWatcher; import org.apache.streampark.console.core.task.ProjectBuildTask; @@ -80,7 +80,7 @@ public class ProjectServiceImpl extends ServiceImpl implements ProjectService { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; @Autowired private FlinkHttpWatcher flinkHttpWatcher; @@ -149,7 +149,7 @@ public boolean update(Project projectParam) { log.info( "update deploy by project: {}, appName:{}", project.getName(), app.getJobName()); app.setRelease(ReleaseState.NEED_CHECK.get()); - applicationService.updateRelease(app); + applicationManageService.updateRelease(app); }); } } @@ -164,7 +164,7 @@ public boolean delete(Long id) { Utils.notNull(project); LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper().eq(Application::getProjectId, id); - long count = applicationService.count(queryWrapper); + long count = applicationManageService.count(queryWrapper); if (count > 0) { return false; } @@ -211,7 +211,7 @@ public void build(Long id) throws Exception { }, fileLogger -> { List applications = - this.applicationService.getByProjectId(project.getId()); + this.applicationManageService.getByProjectId(project.getId()); applications.forEach( (app) -> { fileLogger.info( @@ -220,7 +220,7 @@ public void build(Long id) throws Exception { app.getJobName()); app.setRelease(ReleaseState.NEED_RELEASE.get()); app.setBuild(true); - this.applicationService.updateRelease(app); + this.applicationManageService.updateRelease(app); }); flinkHttpWatcher.init(); }); @@ -273,7 +273,7 @@ public String getAppConfPath(Long id, String module) { @Override public List getApplications(Project project) { - return this.applicationService.getByProjectId(project.getId()); + return this.applicationManageService.getByProjectId(project.getId()); } @Override diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java index 40314bca53..24ed747c89 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java @@ -36,10 +36,10 @@ import org.apache.streampark.console.core.entity.Resource; import org.apache.streampark.console.core.enums.ResourceType; import org.apache.streampark.console.core.mapper.ResourceMapper; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.FlinkSqlService; import org.apache.streampark.console.core.service.ResourceService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.flink.packer.maven.Artifact; import org.apache.streampark.flink.packer.maven.MavenTool; @@ -90,7 +90,7 @@ public class ResourceServiceImpl extends ServiceImpl implements ResourceService { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; @Autowired private CommonService commonService; @Autowired private FlinkSqlService flinkSqlService; @@ -491,7 +491,7 @@ private boolean isDependByApplications(Resource resource) { private List getResourceApplicationsById(Resource resource) { List dependApplications = new ArrayList<>(); - List applications = applicationService.getByTeamId(resource.getTeamId()); + List applications = applicationManageService.getByTeamId(resource.getTeamId()); Map applicationMap = applications.stream() .collect(Collectors.toMap(Application::getId, application -> application)); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java index 4622fbe414..20f7c31f45 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java @@ -39,10 +39,10 @@ import org.apache.streampark.console.core.mapper.SavePointMapper; import org.apache.streampark.console.core.service.ApplicationConfigService; import org.apache.streampark.console.core.service.ApplicationLogService; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.FlinkClusterService; import org.apache.streampark.console.core.service.FlinkEnvService; import org.apache.streampark.console.core.service.SavePointService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.task.FlinkHttpWatcher; import org.apache.streampark.flink.client.FlinkClient; import org.apache.streampark.flink.client.bean.SavepointResponse; @@ -93,7 +93,7 @@ public class SavePointServiceImpl extends ServiceImpl private static final String PLACEHOLDER_END = "}"; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; @Autowired private FlinkSqlService flinkSqlService; @@ -136,7 +136,7 @@ public void updateVariable(Variable variable) { // set Application's field release to NEED_RESTART List applications = getDependApplicationsByCode(variable); if (CollectionUtils.isNotEmpty(applications)) { - applicationService.update( + applicationManageService.update( new UpdateWrapper() .lambda() .in( @@ -216,7 +216,7 @@ private boolean isDependByApplications(Variable variable) { private List getDependApplicationsByCode(Variable variable) { List dependApplications = new ArrayList<>(); - List applications = applicationService.getByTeamId(variable.getTeamId()); + List applications = applicationManageService.getByTeamId(variable.getTeamId()); Map applicationMap = applications.stream() .collect(Collectors.toMap(Application::getId, application -> application)); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java index 0b7421353b..7961f5e063 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java @@ -26,9 +26,9 @@ import org.apache.streampark.console.core.entity.FlinkCluster; import org.apache.streampark.console.core.entity.YarnQueue; import org.apache.streampark.console.core.mapper.YarnQueueMapper; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.FlinkClusterService; import org.apache.streampark.console.core.service.YarnQueueService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -67,7 +67,7 @@ public class YarnQueueServiceImpl extends ServiceImpl appsReferenceQueueLabel = - applicationService + applicationManageService .getByTeamIdAndExecutionModes( teamId, Sets.newHashSet(ExecutionMode.YARN_APPLICATION, ExecutionMode.YARN_PER_JOB)) .stream() diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/CheckpointProcessor.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/CheckpointProcessor.java index 2c17f51f2b..eba7041b2e 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/CheckpointProcessor.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/CheckpointProcessor.java @@ -22,9 +22,9 @@ import org.apache.streampark.console.core.enums.CheckPointStatus; import org.apache.streampark.console.core.enums.FailoverStrategy; import org.apache.streampark.console.core.metrics.flink.CheckPoints; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.SavePointService; import org.apache.streampark.console.core.service.alert.AlertService; +import org.apache.streampark.console.core.service.application.ApplicationActionService; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; @@ -63,7 +63,7 @@ public class CheckpointProcessor { private final Map checkPointFailedCache = new ConcurrentHashMap<>(0); - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationActionService applicationActionService; @Autowired private AlertService alertService; @@ -114,7 +114,7 @@ private void process(Application application, @Nonnull CheckPoints.CheckPoint ch break; case RESTART: try { - applicationService.restart(application); + applicationActionService.restart(application); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java index 090e75e75b..f144248771 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java @@ -29,9 +29,9 @@ import org.apache.streampark.console.core.entity.FlinkCluster; import org.apache.streampark.console.core.metrics.flink.Overview; import org.apache.streampark.console.core.metrics.yarn.YarnAppInfo; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.FlinkClusterService; import org.apache.streampark.console.core.service.alert.AlertService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.yarn.api.records.YarnApplicationState; @@ -66,7 +66,7 @@ public class FlinkClusterWatcher { @Autowired private AlertService alertService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationInfoService applicationInfoService; private Long lastWatchTime = 0L; @@ -132,9 +132,9 @@ private void start() { private void alert(FlinkCluster cluster, ClusterState state) { if (cluster.getAlertId() != null) { - cluster.setAllJobs(applicationService.countByClusterId(cluster.getId())); + cluster.setAllJobs(applicationInfoService.countByClusterId(cluster.getId())); cluster.setAffectedJobs( - applicationService.countAffectedByClusterId( + applicationInfoService.countAffectedByClusterId( cluster.getId(), InternalConfigHolder.get(CommonConfig.SPRING_PROFILES_ACTIVE()))); cluster.setClusterState(state.getValue()); cluster.setEndTime(new Date()); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkHttpWatcher.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkHttpWatcher.java index ce271da594..ec15c3bda2 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkHttpWatcher.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkHttpWatcher.java @@ -32,10 +32,12 @@ import org.apache.streampark.console.core.metrics.flink.JobsOverview; import org.apache.streampark.console.core.metrics.flink.Overview; import org.apache.streampark.console.core.metrics.yarn.YarnAppInfo; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.FlinkClusterService; import org.apache.streampark.console.core.service.SavePointService; import org.apache.streampark.console.core.service.alert.AlertService; +import org.apache.streampark.console.core.service.application.ApplicationActionService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.commons.lang3.StringUtils; import org.apache.flink.annotation.VisibleForTesting; @@ -74,7 +76,9 @@ @Component public class FlinkHttpWatcher { - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationActionService applicationActionService; + @Autowired private ApplicationInfoService applicationInfoService; @Autowired private AlertService alertService; @@ -162,7 +166,7 @@ public class FlinkHttpWatcher { public void init() { WATCHING_APPS.clear(); List applications = - applicationService.list( + applicationManageService.list( new LambdaQueryWrapper() .eq(Application::getTracking, 1) .notIn(Application::getExecutionMode, ExecutionMode.getKubernetesMode())); @@ -177,7 +181,7 @@ public void init() { public void doStop() { log.info( "FlinkHttpWatcher StreamPark Console will be shutdown,persistent application to database."); - WATCHING_APPS.forEach((k, v) -> applicationService.persistMetrics(v)); + WATCHING_APPS.forEach((k, v) -> applicationInfoService.persistMetrics(v)); } /** @@ -251,7 +255,7 @@ private void watch(Long id, Application application) { doAlert(application, FlinkAppState.of(application.getState())); if (appState.equals(FlinkAppState.FAILED)) { try { - applicationService.start(application, true); + applicationActionService.start(application, true); } catch (Exception e) { log.error(e.getMessage(), e); } @@ -389,7 +393,7 @@ private void handleRunningState( new LambdaUpdateWrapper() .eq(Application::getId, application.getId()) .set(Application::getRelease, ReleaseState.DONE.get()); - applicationService.update(updateWrapper); + applicationManageService.update(updateWrapper); break; default: break; @@ -423,7 +427,7 @@ private void doPersistMetrics(Application application, boolean stopWatch) { } else { WATCHING_APPS.put(application.getId(), application); } - applicationService.persistMetrics(application); + applicationInfoService.persistMetrics(application); } /** @@ -453,7 +457,7 @@ private void handleNotRunState( currentState.name()); cleanSavepoint(application); application.setState(currentState.getValue()); - if (StopFrom.NONE.equals(stopFrom) || applicationService.checkAlter(application)) { + if (StopFrom.NONE.equals(stopFrom) || applicationInfoService.checkAlter(application)) { if (StopFrom.NONE.equals(stopFrom)) { log.info( "FlinkHttpWatcher getFromFlinkRestApi, job cancel is not form StreamPark,savePoint expired!"); @@ -472,7 +476,7 @@ private void handleNotRunState( application.setState(FlinkAppState.FAILED.getValue()); doPersistMetrics(application, true); doAlert(application, FlinkAppState.FAILED); - applicationService.start(application, true); + applicationActionService.start(application, true); break; case RESTARTING: log.info( @@ -547,11 +551,11 @@ and the status is not obtained this time (flink rest server is closed), if (flinkAppState.equals(FlinkAppState.FAILED) || flinkAppState.equals(FlinkAppState.LOST) || (flinkAppState.equals(FlinkAppState.CANCELED) && StopFrom.NONE.equals(stopFrom)) - || applicationService.checkAlter(application)) { + || applicationInfoService.checkAlter(application)) { doAlert(application, flinkAppState); stopCanceledJob(application.getId()); if (flinkAppState.equals(FlinkAppState.FAILED)) { - applicationService.start(application, true); + applicationActionService.start(application, true); } } } catch (Exception e) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java index a9e9962f98..7f7972632d 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java @@ -23,8 +23,9 @@ import org.apache.streampark.console.core.enums.FlinkAppState; import org.apache.streampark.console.core.enums.OptionState; import org.apache.streampark.console.core.metrics.flink.CheckPoints; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.alert.AlertService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.flink.kubernetes.enums.FlinkJobState; import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteMode; import org.apache.streampark.flink.kubernetes.event.FlinkClusterMetricChangeEvent; @@ -56,7 +57,8 @@ @Component public class FlinkK8sChangeEventListener { - @Lazy @Autowired private ApplicationService applicationService; + @Lazy @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationInfoService applicationInfoService; @Lazy @Autowired private AlertService alertService; @@ -83,13 +85,13 @@ public void subscribeJobStatusChange(FlinkJobStatusChangeEvent event) { JobStatusCV jobStatus = event.jobStatus(); TrackId trackId = event.trackId(); // get pre application record - Application app = applicationService.getById(trackId.appId()); + Application app = applicationManageService.getById(trackId.appId()); if (app == null) { return; } // update application record setByJobStatusCV(app, jobStatus); - applicationService.persistMetrics(app); + applicationInfoService.persistMetrics(app); // email alerts when necessary FlinkAppState state = FlinkAppState.of(app.getState()); @@ -116,7 +118,7 @@ public void subscribeMetricsChange(FlinkClusterMetricChangeEvent event) { return; } - Application app = applicationService.getById(trackId.appId()); + Application app = applicationManageService.getById(trackId.appId()); if (app == null) { return; } @@ -128,7 +130,7 @@ public void subscribeMetricsChange(FlinkClusterMetricChangeEvent event) { app.setTotalSlot(metrics.totalSlot()); app.setAvailableSlot(metrics.availableSlot()); - applicationService.persistMetrics(app); + applicationInfoService.persistMetrics(app); } @SuppressWarnings("UnstableApiUsage") @@ -148,7 +150,8 @@ public void subscribeCheckpointChange(FlinkJobCheckpointChangeEvent event) { CheckPoints checkPoint = new CheckPoints(); checkPoint.setLatest(latest); - checkpointProcessor.process(applicationService.getById(event.trackId().appId()), checkPoint); + checkpointProcessor.process( + applicationManageService.getById(event.trackId().appId()), checkPoint); } private void setByJobStatusCV(Application app, JobStatusCV jobStatus) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java index af3b816d56..c385764ef1 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java @@ -19,7 +19,7 @@ import org.apache.streampark.common.enums.ExecutionMode; import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.service.ApplicationService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; import org.apache.streampark.flink.kubernetes.FlinkK8sWatcherFactory; import org.apache.streampark.flink.kubernetes.FlinkTrackConfig; @@ -60,7 +60,7 @@ public class FlinkK8sWatcherWrapper { @Lazy @Autowired private FlinkK8sChangeEventListener flinkK8sChangeEventListener; - @Lazy @Autowired private ApplicationService applicationService; + @Lazy @Autowired private ApplicationManageService applicationManageService; /** Register FlinkTrackMonitor bean for tracking flink job on kubernetes. */ @Bean(destroyMethod = "close") @@ -97,7 +97,7 @@ private List getK8sWatchingApps() { .eq(Application::getTracking, 1) .in(Application::getExecutionMode, ExecutionMode.getKubernetesMode()); - List k8sApplication = applicationService.list(queryWrapper); + List k8sApplication = applicationManageService.list(queryWrapper); if (CollectionUtils.isEmpty(k8sApplication)) { return Lists.newArrayList(); } @@ -107,7 +107,7 @@ private List getK8sWatchingApps() { .filter(app -> !Bridge.toTrackId(app).isLegal()) .collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(correctApps)) { - applicationService.saveOrUpdateBatch(correctApps); + applicationManageService.saveOrUpdateBatch(correctApps); } // filter out the application that should be tracking return k8sApplication.stream() diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java index e4899831c6..9d49c5c1a0 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java @@ -20,10 +20,10 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.base.exception.ApiAlertException; import org.apache.streampark.console.core.enums.UserType; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.ProjectService; import org.apache.streampark.console.core.service.VariableService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.streampark.console.system.entity.Team; import org.apache.streampark.console.system.entity.User; import org.apache.streampark.console.system.mapper.TeamMapper; @@ -52,7 +52,7 @@ public class TeamServiceImpl extends ServiceImpl implements Te @Autowired private UserService userService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationInfoService applicationInfoService; @Autowired private ProjectService projectService; @@ -99,7 +99,7 @@ public void deleteTeam(Long teamId) { if (team == null) { throw new ApiAlertException(String.format("The team[Id=%s] doesn't exists.", teamId)); } - if (applicationService.existsByTeamId(teamId)) { + if (applicationInfoService.existsByTeamId(teamId)) { throw new ApiAlertException( String.format( "Please delete the applications under the team[name=%s] first!", team.getTeamName())); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java index 6622a959f6..a817f0c615 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java @@ -27,8 +27,9 @@ import org.apache.streampark.console.base.util.ShaHashUtils; import org.apache.streampark.console.base.util.WebUtils; import org.apache.streampark.console.core.enums.LoginType; -import org.apache.streampark.console.core.service.ApplicationService; import org.apache.streampark.console.core.service.ResourceService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.system.authentication.JWTToken; import org.apache.streampark.console.system.authentication.JWTUtil; import org.apache.streampark.console.system.entity.Team; @@ -72,7 +73,9 @@ public class UserServiceImpl extends ServiceImpl implements Us @Autowired private MenuService menuService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; + + @Autowired private ApplicationInfoService applicationInfoService; @Autowired private ResourceService resourceService; @@ -141,7 +144,7 @@ private boolean needTransferResource(User existsUser, User user) { || User.STATUS_VALID.equals(user.getStatus())) { return false; } - return applicationService.existsByUserId(user.getUserId()) + return applicationInfoService.existsByUserId(user.getUserId()) || resourceService.existsByUserId(user.getUserId()); } @@ -270,7 +273,7 @@ public Map generateFrontendUserInfo(User user, Long teamId, JWTT @Override @Transactional(rollbackFor = Exception.class) public void transferResource(Long userId, Long targetUserId) { - applicationService.changeOwnership(userId, targetUserId); + applicationManageService.changeOwnership(userId, targetUserId); resourceService.changeOwnership(userId, targetUserId); } diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationServiceITest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java similarity index 87% rename from streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationServiceITest.java rename to streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java index 950b2ce2a4..d32cd009de 100644 --- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationServiceITest.java +++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java @@ -26,6 +26,8 @@ import org.apache.streampark.console.core.entity.FlinkSql; import org.apache.streampark.console.core.enums.FlinkAppState; import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.service.application.ApplicationActionService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.service.impl.FlinkClusterServiceImpl; import org.apache.streampark.console.core.task.FlinkHttpWatcher; import org.apache.streampark.testcontainer.flink.FlinkStandaloneSessionCluster; @@ -49,12 +51,13 @@ * Integration test for {@link * org.apache.streampark.console.core.service.impl.ApplicationServiceImpl}. */ -class ApplicationServiceITest extends SpringIntegrationTestBase { +class ApplicationManageServiceITest extends SpringIntegrationTestBase { static FlinkStandaloneSessionCluster cluster = FlinkStandaloneSessionCluster.builder().slotsNumPerTm(4).slf4jLogConsumer(null).build(); - @Autowired private ApplicationService appService; + @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationActionService applicationActionService; @Autowired private FlinkClusterService clusterService; @@ -78,7 +81,7 @@ static void teardown() { @AfterEach void clear() { - appService.getBaseMapper().delete(new QueryWrapper<>()); + applicationManageService.getBaseMapper().delete(new QueryWrapper<>()); clusterService.getBaseMapper().delete(new QueryWrapper<>()); envService.getBaseMapper().delete(new QueryWrapper<>()); appBuildPipeService.getBaseMapper().delete(new QueryWrapper<>()); @@ -104,7 +107,7 @@ void testStartAppOnRemoteSessionMode() throws Exception { Application appParam = new Application(); appParam.setId(100000L); appParam.setTeamId(100000L); - Application application = appService.getApp(appParam); + Application application = applicationManageService.getApp(appParam); application.setFlinkClusterId(1L); application.setSqlId(100000L); application.setVersionId(1L); @@ -118,14 +121,14 @@ void testStartAppOnRemoteSessionMode() throws Exception { sqlService.getBaseMapper().updateById(flinkSql); // Continue operations link. - appService.update(application); + applicationManageService.update(application); appBuildPipeService.buildApplication(100000L, false); CompletableFuture buildCompletableFuture = CompletableFuture.supplyAsync( () -> { while (true) { - Application app = appService.getById(100000L); + Application app = applicationManageService.getById(100000L); if (app != null && app.getReleaseState() == ReleaseState.DONE) { break; } @@ -134,7 +137,7 @@ void testStartAppOnRemoteSessionMode() throws Exception { }); buildCompletableFuture.get(); - appService.start(appService.getById(100000L), false); + applicationActionService.start(applicationManageService.getById(100000L), false); CompletableFuture completableFuture = CompletableFuture.supplyAsync( () -> { diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationServiceTest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceTest.java similarity index 86% rename from streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationServiceTest.java rename to streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceTest.java index f6ce39a54a..3f67ae7ddc 100644 --- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationServiceTest.java +++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceTest.java @@ -21,7 +21,9 @@ import org.apache.streampark.console.SpringUnitTestBase; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.YarnQueue; -import org.apache.streampark.console.core.service.impl.ApplicationServiceImpl; +import org.apache.streampark.console.core.service.application.ApplicationActionService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; +import org.apache.streampark.console.core.service.application.impl.ApplicationManageServiceImpl; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import org.junit.jupiter.api.AfterEach; @@ -35,15 +37,15 @@ import static org.assertj.core.api.Assertions.assertThat; /** org.apache.streampark.console.core.service.ApplicationServiceUnitTest. */ -class ApplicationServiceTest extends SpringUnitTestBase { - - @Autowired private ApplicationService applicationService; +class ApplicationManageServiceTest extends SpringUnitTestBase { + @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationActionService applicationActionService; @Autowired private YarnQueueService yarnQueueService; @AfterEach void cleanTestRecordsInDatabase() { - applicationService.remove(new QueryWrapper<>()); + applicationManageService.remove(new QueryWrapper<>()); yarnQueueService.remove(new QueryWrapper<>()); } @@ -81,7 +83,7 @@ void testRevoke() { app.setDrain(false); app.setAllowNonRestored(false); - Assertions.assertDoesNotThrow(() -> applicationService.updateRelease(app)); + Assertions.assertDoesNotThrow(() -> applicationManageService.updateRelease(app)); } @Test @@ -93,12 +95,13 @@ void testStart() throws Exception { application.setSavePointed(false); application.setAllowNonRestored(false); - applicationService.start(application, false); + applicationActionService.start(application, false); } @Test void testCheckQueueValidationIfNeeded() { - ApplicationServiceImpl applicationServiceImpl = (ApplicationServiceImpl) applicationService; + ApplicationManageServiceImpl applicationServiceImpl = + (ApplicationManageServiceImpl) applicationManageService; // ------- Test it for the create operation. ------- final String queueLabel = "queue1@label1"; diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/SavePointServiceTest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/SavePointServiceTest.java index 93968dd6f1..5d37b6200a 100644 --- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/SavePointServiceTest.java +++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/SavePointServiceTest.java @@ -28,6 +28,7 @@ import org.apache.streampark.console.core.entity.FlinkEnv; import org.apache.streampark.console.core.enums.ConfigFileType; import org.apache.streampark.console.core.enums.EffectiveType; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.service.impl.SavePointServiceImpl; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; @@ -56,7 +57,7 @@ class SavePointServiceTest extends SpringUnitTestBase { @Autowired private FlinkEnvService flinkEnvService; @Autowired private FlinkClusterService flinkClusterService; - @Autowired ApplicationService applicationService; + @Autowired ApplicationManageService applicationManageService; @AfterEach void cleanTestRecordsInDatabase() { @@ -65,7 +66,7 @@ void cleanTestRecordsInDatabase() { effectiveService.remove(new QueryWrapper<>()); flinkEnvService.remove(new QueryWrapper<>()); flinkClusterService.remove(new QueryWrapper<>()); - applicationService.remove(new QueryWrapper<>()); + applicationManageService.remove(new QueryWrapper<>()); } /** @@ -149,7 +150,7 @@ void testGetSavepointFromDeployLayer() throws JsonProcessingException { application.setTeamId(teamId); application.setVersionId(idOfFlinkEnv); application.setExecutionMode(ExecutionMode.YARN_APPLICATION.getMode()); - applicationService.save(application); + applicationManageService.save(application); FlinkEnv flinkEnv = new FlinkEnv(); flinkEnv.setFlinkName("mockFlinkName"); diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/UserServiceTest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/UserServiceTest.java index 1c05924639..cd742c58e5 100644 --- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/UserServiceTest.java +++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/UserServiceTest.java @@ -24,6 +24,8 @@ import org.apache.streampark.console.core.enums.EngineType; import org.apache.streampark.console.core.enums.ResourceType; import org.apache.streampark.console.core.enums.UserType; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.system.entity.User; import org.apache.streampark.console.system.service.UserService; @@ -39,7 +41,8 @@ @Transactional class UserServiceTest extends SpringUnitTestBase { @Autowired private UserService userService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationInfoService applicationInfoService; @Autowired private ResourceService resourceService; @Test @@ -107,7 +110,7 @@ void testTransferResource() { Application app = new Application(); app.setUserId(user.getUserId()); app.setTeamId(1L); - applicationService.save(app); + applicationManageService.save(app); User targetUser = new User(); targetUser.setUsername("test0"); @@ -117,15 +120,15 @@ void testTransferResource() { targetUser.setStatus(User.STATUS_VALID); userService.save(targetUser); - Assertions.assertTrue(applicationService.existsByUserId(user.getUserId())); + Assertions.assertTrue(applicationInfoService.existsByUserId(user.getUserId())); Assertions.assertTrue(resourceService.existsByUserId(user.getUserId())); userService.transferResource(user.getUserId(), targetUser.getUserId()); - Assertions.assertFalse(applicationService.existsByUserId(user.getUserId())); + Assertions.assertFalse(applicationInfoService.existsByUserId(user.getUserId())); Assertions.assertFalse(resourceService.existsByUserId(user.getUserId())); - Assertions.assertTrue(applicationService.existsByUserId(targetUser.getUserId())); + Assertions.assertTrue(applicationInfoService.existsByUserId(targetUser.getUserId())); Assertions.assertTrue(resourceService.existsByUserId(targetUser.getUserId())); } } diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java index 9a91d7b68a..85c5f4432f 100644 --- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java +++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/YarnQueueServiceTest.java @@ -23,6 +23,7 @@ import org.apache.streampark.console.base.exception.ApiAlertException; import org.apache.streampark.console.core.bean.ResponseResult; import org.apache.streampark.console.core.entity.YarnQueue; +import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.service.impl.YarnQueueServiceImpl; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; @@ -51,14 +52,14 @@ class YarnQueueServiceTest extends SpringUnitTestBase { @Autowired private FlinkClusterService flinkClusterService; - @Autowired private ApplicationService applicationService; + @Autowired private ApplicationManageService applicationManageService; @Autowired private YarnQueueService yarnQueueService; @AfterEach void cleanTestRecordsInDatabase() { flinkClusterService.remove(new QueryWrapper<>()); - applicationService.remove(new QueryWrapper<>()); + applicationManageService.remove(new QueryWrapper<>()); yarnQueueService.remove(new QueryWrapper<>()); } @@ -260,24 +261,26 @@ void testCheckNotReferencedByApplications() { yarnQueueServiceImpl.checkNotReferencedByApplications(targetTeamId, queueLabel, operation); // Test for existed applications that don't belong to the same team, not in yarn mode. - applicationService.save(mockYarnModeJobApp(2L, "app1", null, ExecutionMode.REMOTE)); + applicationManageService.save(mockYarnModeJobApp(2L, "app1", null, ExecutionMode.REMOTE)); yarnQueueServiceImpl.checkNotReferencedByApplications(targetTeamId, queueLabel, operation); // Test for existed applications that don't belong to the same team, in yarn mode - applicationService.save(mockYarnModeJobApp(2L, "app2", null, ExecutionMode.YARN_APPLICATION)); + applicationManageService.save( + mockYarnModeJobApp(2L, "app2", null, ExecutionMode.YARN_APPLICATION)); yarnQueueServiceImpl.checkNotReferencedByApplications(targetTeamId, queueLabel, operation); // Test for existed applications that belong to the same team, but not in yarn mode. - applicationService.save(mockYarnModeJobApp(targetTeamId, "app3", null, ExecutionMode.REMOTE)); + applicationManageService.save( + mockYarnModeJobApp(targetTeamId, "app3", null, ExecutionMode.REMOTE)); yarnQueueServiceImpl.checkNotReferencedByApplications(targetTeamId, queueLabel, operation); // Test for existed applications that belong to the same team, but without yarn queue value. - applicationService.save( + applicationManageService.save( mockYarnModeJobApp(targetTeamId, "app4", null, ExecutionMode.YARN_PER_JOB)); yarnQueueServiceImpl.checkNotReferencedByApplications(targetTeamId, queueLabel, operation); // Test for existed applications, some apps belong to the same team, but others don't belong to. - applicationService.save( + applicationManageService.save( mockYarnModeJobApp(targetTeamId, "app5", queueLabel, ExecutionMode.YARN_PER_JOB)); assertThatThrownBy( () -> From 6a5e6e4e97dbf5f3fc745df7b36292acc2fd4888 Mon Sep 17 00:00:00 2001 From: benjobs Date: Sat, 2 Sep 2023 21:16:40 -0500 Subject: [PATCH 2/2] [Improve] Application service minor improvement (#3015) * [Improve] application service minor improvement * [Improve] applicationService minor improvement * minor improvement * minor improvement * minor improvement --- .../controller/ApplicationController.java | 47 +---------- .../application/ApplicationActionService.java | 27 +++---- .../application/ApplicationInfoService.java | 36 ++++----- .../application/ApplicationManageService.java | 40 +++++----- .../impl/ApplicationActionServiceImpl.java | 49 ++++++------ .../impl/ApplicationInfoServiceImpl.java | 20 ++--- .../impl/ApplicationManageServiceImpl.java | 77 ++++++++++++------- .../ApplicationManageServiceITest.java | 3 +- 8 files changed, 138 insertions(+), 161 deletions(-) diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java index ed2a113522..048f95f2cc 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java @@ -26,20 +26,17 @@ import org.apache.streampark.console.core.annotation.ApiAccess; import org.apache.streampark.console.core.annotation.AppUpdated; import org.apache.streampark.console.core.annotation.PermissionAction; -import org.apache.streampark.console.core.bean.AppControl; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.ApplicationBackUp; import org.apache.streampark.console.core.entity.ApplicationLog; import org.apache.streampark.console.core.enums.AppExistsState; import org.apache.streampark.console.core.enums.PermissionType; -import org.apache.streampark.console.core.service.AppBuildPipeService; import org.apache.streampark.console.core.service.ApplicationBackUpService; import org.apache.streampark.console.core.service.ApplicationLogService; import org.apache.streampark.console.core.service.ResourceService; import org.apache.streampark.console.core.service.application.ApplicationActionService; import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.streampark.console.core.service.application.ApplicationManageService; -import org.apache.streampark.flink.packer.pipeline.PipelineStatus; import org.apache.shiro.authz.annotation.RequiresPermissions; @@ -63,10 +60,7 @@ import java.io.IOException; import java.io.Serializable; import java.net.URI; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.stream.Collectors; @Tag(name = "FLINK_APPLICATION_TAG") @Slf4j @@ -76,15 +70,15 @@ public class ApplicationController { @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationActionService applicationActionService; + @Autowired private ApplicationInfoService applicationInfoService; @Autowired private ApplicationBackUpService backUpService; @Autowired private ApplicationLogService applicationLogService; - @Autowired private AppBuildPipeService appBuildPipeService; - @Autowired private ResourceService resourceService; @Operation(summary = "Get application") @@ -128,12 +122,8 @@ public RestResponse create(Application app) throws IOException { @PostMapping(value = "copy") @RequiresPermissions("app:copy") public RestResponse copy(@Parameter(hidden = true) Application app) throws IOException { - Long id = applicationManageService.copy(app); - Map data = new HashMap<>(); - data.put("id", Long.toString(id)); - return id.equals(0L) - ? RestResponse.success(false).data(data) - : RestResponse.success(true).data(data); + applicationManageService.copy(app); + return RestResponse.success(); } @Operation(summary = "Update application") @@ -159,34 +149,6 @@ public RestResponse dashboard(Long teamId) { @RequiresPermissions("app:view") public RestResponse list(Application app, RestRequest request) { IPage applicationList = applicationManageService.page(app, request); - List appRecords = applicationList.getRecords(); - List appIds = appRecords.stream().map(Application::getId).collect(Collectors.toList()); - Map pipeStates = appBuildPipeService.listPipelineStatus(appIds); - - // add building pipeline status info and app control info - appRecords = - appRecords.stream() - .peek( - e -> { - if (pipeStates.containsKey(e.getId())) { - e.setBuildStatus(pipeStates.get(e.getId()).getCode()); - } - }) - .peek( - e -> { - AppControl appControl = - new AppControl() - .setAllowBuild( - e.getBuildStatus() == null - || !PipelineStatus.running.getCode().equals(e.getBuildStatus())) - .setAllowStart( - !e.shouldBeTrack() - && PipelineStatus.success.getCode().equals(e.getBuildStatus())) - .setAllowStop(e.isRunning()); - e.setAppControl(appControl); - }) - .collect(Collectors.toList()); - applicationList.setRecords(appRecords); return RestResponse.success(applicationList); } @@ -245,7 +207,6 @@ public RestResponse revoke(Application app) { @RequiresPermissions("app:start") public RestResponse start(@Parameter(hidden = true) Application app) { try { - applicationInfoService.checkEnv(app); applicationActionService.start(app, false); return RestResponse.success(true); } catch (Exception e) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java index 3e77485f80..e2936cb73d 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationActionService.java @@ -28,50 +28,43 @@ */ public interface ApplicationActionService extends IService { - /** - * This method is used to start the given application. - * - * @param app The application object to be started. - */ - void starting(Application app); - /** * Starts the specified application. * - * @param app The application to start. + * @param appParam The application to start. * @param auto True if the application should start automatically, False otherwise. * @throws Exception If an error occurs while starting the application. */ - void start(Application app, boolean auto) throws Exception; + void start(Application appParam, boolean auto) throws Exception; /** * Restarts the given application. * - * @param application The application to restart. + * @param appParam The application to restart. * @throws Exception If an error occurs while restarting the application. */ - void restart(Application application) throws Exception; + void restart(Application appParam) throws Exception; /** * Revokes access for the given application. * - * @param app The application for which access needs to be revoked. + * @param appParam The application for which access needs to be revoked. * @throws ApplicationException if an error occurs while revoking access. */ - void revoke(Application app) throws ApplicationException; + void revoke(Application appParam) throws ApplicationException; /** * Cancels the given application. Throws an exception if cancellation fails. * - * @param app the application to be canceled + * @param appParam the application to be canceled * @throws Exception if cancellation fails */ - void cancel(Application app) throws Exception; + void cancel(Application appParam) throws Exception; /** * Forces the given application to stop. * - * @param app the application to be stopped + * @param appParam the application to be stopped */ - void forcedStop(Application app); + void forcedStop(Application appParam); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java index 093b1b0d44..57fbdef59d 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java @@ -37,37 +37,37 @@ public interface ApplicationInfoService extends IService { /** * Maps the given application. * - * @param app The application to be mapped. + * @param appParam The application to be mapped. * @return True if the mapping was successful, false otherwise. */ - boolean mapping(Application app); + boolean mapping(Application appParam); /** * Checks the environment for the given application. * - * @param app the application to check the environment for + * @param appParam the application to check the environment for * @return true if the environment is valid for the application, false otherwise * @throws ApplicationException if an error occurs while checking the environment */ - boolean checkEnv(Application app) throws ApplicationException; + boolean checkEnv(Application appParam) throws ApplicationException; /** * Checks the savepoint path for the given application. * - * @param app the application to check the savepoint path for + * @param appParam the application to check the savepoint path for * @return the check message * @throws Exception if an error occurs while checking the savepoint path */ - String checkSavepointPath(Application app) throws Exception; + String checkSavepointPath(Application appParam) throws Exception; /** * Checks if the given application meets the required alterations. * - * @param application The application to be checked. + * @param appParam The application to be checked. * @return True if the application meets the required alterations, false otherwise. * @throws ApplicationException If an error occurs while checking the alterations. */ - boolean checkAlter(Application application); + boolean checkAlter(Application appParam); /** * Checks if a record exists in the database with the given team ID. @@ -129,42 +129,42 @@ public interface ApplicationInfoService extends IService { /** * Gets the YARN name for the given application. * - * @param app The application for which to retrieve the YARN name. + * @param appParam The application for which to retrieve the YARN name. * @return The YARN name of the application as a String. */ - String getYarnName(Application app); + String getYarnName(Application appParam); /** * Checks if the given application exists in the system. * - * @param app The application to check for existence. + * @param appParam The application to check for existence. * @return AppExistsState indicating the existence state of the application. */ - AppExistsState checkExists(Application app); + AppExistsState checkExists(Application appParam); /** * Persists the metrics of the given application. * - * @param application The application which metrics need to be persisted. + * @param appParam The application which metrics need to be persisted. */ - void persistMetrics(Application application); + void persistMetrics(Application appParam); /** * Reads the configuration for the given application and returns it as a String. * - * @param app The application for which the configuration needs to be read. + * @param appParam The application for which the configuration needs to be read. * @return The configuration for the given application as a String. * @throws IOException If an I/O error occurs while reading the configuration. */ - String readConf(Application app) throws IOException; + String readConf(Application appParam) throws IOException; /** * Retrieves the main configuration value for the given Application. * - * @param application the Application object for which to fetch the main configuration value + * @param appParam the Application object for which to fetch the main configuration value * @return the main configuration value as a String */ - String getMain(Application application); + String getMain(Application appParam); /** * Returns the dashboard for the specified team. diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java index 3d2c6f4fa2..d55564111e 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java @@ -37,83 +37,83 @@ public interface ApplicationManageService extends IService { /** * Retrieves a page of applications based on the provided parameters. * - * @param app The application object to be used for filtering the results. + * @param appParam The application object to be used for filtering the results. * @param request The REST request object containing additional parameters or headers. * @return A page of Application objects based on the provided parameters. */ - IPage page(Application app, RestRequest request); + IPage page(Application appParam, RestRequest request); /** * Creates a new application. * - * @param app The application to create. + * @param appParam The application to create. * @return True if the application was successfully created, false otherwise. * @throws IOException If an I/O error occurs. */ - boolean create(Application app) throws IOException; + boolean create(Application appParam) throws IOException; /** * Copies the given Application. * - * @param app the Application to be copied + * @param appParam the Application to be copied * @return the size of the copied Application in bytes as a Long value * @throws IOException if there was an error during the copy process */ - Long copy(Application app) throws IOException; + Long copy(Application appParam) throws IOException; /** * Updates the given application. * - * @param app the application to be updated + * @param appParam the application to be updated * @return true if the update was successful, false otherwise */ - boolean update(Application app); + boolean update(Application appParam); /** * Sets the given application to be effective. * - * @param application the application to be set effective + * @param appParam the application to be set effective */ - void toEffective(Application application); + void toEffective(Application appParam); /** * Checks if the given application is ready to build and update. * - * @param app the application to check for readiness + * @param appParam the application to check for readiness * @return true if the application is ready to build and update, false otherwise */ - boolean checkBuildAndUpdate(Application app); + boolean checkBuildAndUpdate(Application appParam); /** * Deletes the given Application from the system. * - * @param app The Application to be deleted. + * @param appParam The Application to be deleted. * @return True if the deletion was successful, false otherwise. */ - Boolean delete(Application app); + Boolean delete(Application appParam); /** * Retrieves the Application with the specified details from the system. * - * @param app The Application object containing the details of the Application to retrieve. + * @param appParam The Application object containing the details of the Application to retrieve. * @return The Application object that matches the specified details, or null if no matching * Application is found. */ - Application getApp(Application app); + Application getApp(Application appParam); /** * Updates the release of the given application. * - * @param application The application to update the release for. + * @param appParam The application to update the release for. */ - void updateRelease(Application application); + void updateRelease(Application appParam); /** * Cleans the application by performing necessary cleanup tasks. * - * @param app The application to clean. + * @param appParam The application to clean. */ - void clean(Application app); + void clean(Application appParam); /** * Retrieves a list of applications by project ID. diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java index 128893d81b..9d6926a9a8 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java @@ -61,6 +61,7 @@ import org.apache.streampark.console.core.service.SettingService; import org.apache.streampark.console.core.service.VariableService; import org.apache.streampark.console.core.service.application.ApplicationActionService; +import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.task.FlinkHttpWatcher; import org.apache.streampark.flink.client.FlinkClient; @@ -130,6 +131,8 @@ public class ApplicationActionServiceImpl extends ServiceImpl(); @Override - public void revoke(Application appParma) throws ApplicationException { - Application application = getById(appParma.getId()); + public void revoke(Application appParam) throws ApplicationException { + Application application = getById(appParam.getId()); ApiAlertException.throwIfNull( application, - String.format("The application id=%s not found, revoke failed.", appParma.getId())); + String.format("The application id=%s not found, revoke failed.", appParam.getId())); // 1) delete files that have been published to workspace application.getFsOperator().delete(application.getAppHome()); @@ -188,16 +191,16 @@ public void revoke(Application appParma) throws ApplicationException { } @Override - public void restart(Application application) throws Exception { - this.cancel(application); - this.start(application, false); + public void restart(Application appParam) throws Exception { + this.cancel(appParam); + this.start(appParam, false); } @Override - public void forcedStop(Application app) { - CompletableFuture startFuture = startFutureMap.remove(app.getId()); - CompletableFuture cancelFuture = cancelFutureMap.remove(app.getId()); - Application application = this.baseMapper.getApp(app); + public void forcedStop(Application appParam) { + CompletableFuture startFuture = startFutureMap.remove(appParam.getId()); + CompletableFuture cancelFuture = cancelFutureMap.remove(appParam.getId()); + Application application = this.baseMapper.getApp(appParam); if (isKubernetesApp(application)) { KubernetesDeploymentHelper.watchPodTerminatedLog( application.getK8sNamespace(), application.getJobName(), application.getJobId()); @@ -213,7 +216,7 @@ public void forcedStop(Application app) { cancelFuture.cancel(true); } if (startFuture == null && cancelFuture == null) { - this.updateToStopped(app); + this.updateToStopped(appParam); } } @@ -363,23 +366,12 @@ public void cancel(Application appParam) throws Exception { }); } - /** - * Setup task is starting (for webUI "state" display) - * - * @param application - */ - @Override - public void starting(Application application) { - application.setState(FlinkAppState.STARTING.getValue()); - application.setOptionTime(new Date()); - updateById(application); - } - @Override @Transactional(rollbackFor = {Exception.class}) public void start(Application appParam, boolean auto) throws Exception { final Application application = getById(appParam.getId()); - Utils.notNull(application); + ApiAlertException.throwIfNull(application, "[StreamPark] application is not exists."); + if (!application.isCanBeStart()) { throw new ApiAlertException("[StreamPark] The application cannot be started repeatedly."); } @@ -389,6 +381,13 @@ public void start(Application appParam, boolean auto) throws Exception { throw new ApiAlertException("[StreamPark] can no found flink version"); } + applicationInfoService.checkEnv(appParam); + + // update state to starting + application.setState(FlinkAppState.STARTING.getValue()); + application.setOptionTime(new Date()); + updateById(application); + // if manually started, clear the restart flag if (!auto) { application.setRestartCount(0); @@ -399,8 +398,6 @@ public void start(Application appParam, boolean auto) throws Exception { appParam.setSavePointed(true); application.setRestartCount(application.getRestartCount() + 1); } - - starting(application); application.setAllowNonRestored(appParam.getAllowNonRestored()); String appConf; diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java index 8ded940f7f..b26de031f1 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java @@ -209,14 +209,14 @@ public boolean checkEnv(Application appParam) throws ApplicationException { } @Override - public boolean checkAlter(Application application) { - Long appId = application.getId(); - FlinkAppState state = FlinkAppState.of(application.getState()); + public boolean checkAlter(Application appParam) { + Long appId = appParam.getId(); + FlinkAppState state = FlinkAppState.of(appParam.getState()); if (!FlinkAppState.CANCELED.equals(state)) { return false; } long cancelUserId = FlinkHttpWatcher.getCanceledJobUserId(appId); - long appUserId = application.getUserId(); + long appUserId = appParam.getUserId(); return cancelUserId != -1 && cancelUserId != appUserId; } @@ -432,16 +432,16 @@ public String readConf(Application appParam) throws IOException { } @Override - public String getMain(Application application) { + public String getMain(Application appParam) { File jarFile; - if (application.getProjectId() == null) { - jarFile = new File(application.getJar()); + if (appParam.getProjectId() == null) { + jarFile = new File(appParam.getJar()); } else { Project project = new Project(); - project.setId(application.getProjectId()); + project.setId(appParam.getProjectId()); String modulePath = - project.getDistHome().getAbsolutePath().concat("/").concat(application.getModule()); - jarFile = new File(modulePath, application.getJar()); + project.getDistHome().getAbsolutePath().concat("/").concat(appParam.getModule()); + jarFile = new File(modulePath, appParam.getJar()); } Manifest manifest = Utils.getJarManifest(jarFile); return manifest.getMainAttributes().getValue("Main-Class"); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java index 222b8967e9..eeabbaad8f 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java @@ -28,6 +28,7 @@ import org.apache.streampark.console.base.util.CommonUtils; import org.apache.streampark.console.base.util.ObjectUtils; import org.apache.streampark.console.base.util.WebUtils; +import org.apache.streampark.console.core.bean.AppControl; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.ApplicationConfig; import org.apache.streampark.console.core.entity.FlinkSql; @@ -53,6 +54,7 @@ import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.task.FlinkHttpWatcher; import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; +import org.apache.streampark.flink.packer.pipeline.PipelineStatus; import org.apache.commons.lang3.StringUtils; @@ -80,6 +82,7 @@ import java.util.Collection; import java.util.Date; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; @@ -127,16 +130,16 @@ public void resetOptionState() { } @Override - public void toEffective(Application application) { + public void toEffective(Application appParam) { // set latest to Effective - ApplicationConfig config = configService.getLatest(application.getId()); + ApplicationConfig config = configService.getLatest(appParam.getId()); if (config != null) { - this.configService.toEffective(application.getId(), config.getId()); + this.configService.toEffective(appParam.getId(), config.getId()); } - if (application.isFlinkSqlJob()) { - FlinkSql flinkSql = flinkSqlService.getCandidate(application.getId(), null); + if (appParam.isFlinkSqlJob()) { + FlinkSql flinkSql = flinkSqlService.getCandidate(appParam.getId(), null); if (flinkSql != null) { - flinkSqlService.toEffective(application.getId(), flinkSql.getId()); + flinkSqlService.toEffective(appParam.getId(), flinkSql.getId()); // clean candidate flinkSqlService.cleanCandidate(flinkSql.getId()); } @@ -145,9 +148,9 @@ public void toEffective(Application application) { @Override @Transactional(rollbackFor = {Exception.class}) - public Boolean delete(Application paramApp) { + public Boolean delete(Application appParam) { - Application application = getById(paramApp.getId()); + Application application = getById(appParam.getId()); // 1) remove flink sql flinkSqlService.removeApp(application.getId()); @@ -177,7 +180,7 @@ public Boolean delete(Application paramApp) { if (isKubernetesApp(application)) { k8SFlinkTrackMonitor.unWatching(toTrackId(application)); } else { - FlinkHttpWatcher.unWatching(paramApp.getId()); + FlinkHttpWatcher.unWatching(appParam.getId()); } return true; } @@ -220,6 +223,10 @@ public IPage page(Application appParam, RestRequest request) { this.baseMapper.page(page, appParam); List records = page.getRecords(); long now = System.currentTimeMillis(); + + List appIds = records.stream().map(Application::getId).collect(Collectors.toList()); + Map pipeStates = appBuildPipeService.listPipelineStatus(appIds); + List newRecords = records.stream() .peek( @@ -236,6 +243,24 @@ record -> { record.setDuration(now - record.getStartTime().getTime()); } } + if (pipeStates.containsKey(record.getId())) { + record.setBuildStatus(pipeStates.get(record.getId()).getCode()); + } + + AppControl appControl = + new AppControl() + .setAllowBuild( + record.getBuildStatus() == null + || !PipelineStatus.running + .getCode() + .equals(record.getBuildStatus())) + .setAllowStart( + !record.shouldBeTrack() + && PipelineStatus.success + .getCode() + .equals(record.getBuildStatus())) + .setAllowStop(record.isRunning()); + record.setAppControl(appControl); }) .collect(Collectors.toList()); page.setRecords(newRecords); @@ -587,13 +612,13 @@ private void updateFlinkSqlJob(Application application, Application appParam) { } @Override - public void updateRelease(Application application) { + public void updateRelease(Application appParam) { LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); - updateWrapper.eq(Application::getId, application.getId()); - updateWrapper.set(Application::getRelease, application.getRelease()); - updateWrapper.set(Application::getBuild, application.getBuild()); - if (application.getOptionState() != null) { - updateWrapper.set(Application::getOptionState, application.getOptionState()); + updateWrapper.eq(Application::getId, appParam.getId()); + updateWrapper.set(Application::getRelease, appParam.getRelease()); + updateWrapper.set(Application::getBuild, appParam.getBuild()); + if (appParam.getOptionState() != null) { + updateWrapper.set(Application::getOptionState, appParam.getOptionState()); } this.update(updateWrapper); } @@ -623,12 +648,12 @@ public List getByTeamIdAndExecutionModes( } @Override - public boolean checkBuildAndUpdate(Application application) { - boolean build = application.getBuild(); + public boolean checkBuildAndUpdate(Application appParam) { + boolean build = appParam.getBuild(); if (!build) { LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); - updateWrapper.eq(Application::getId, application.getId()); - if (application.isRunning()) { + updateWrapper.eq(Application::getId, appParam.getId()); + if (appParam.isRunning()) { updateWrapper.set(Application::getRelease, ReleaseState.NEED_RESTART.get()); } else { updateWrapper.set(Application::getRelease, ReleaseState.DONE.get()); @@ -637,18 +662,18 @@ public boolean checkBuildAndUpdate(Application application) { this.update(updateWrapper); // backup - if (application.isFlinkSqlJob()) { - FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); - if (!application.isNeedRollback() && newFlinkSql != null) { - backUpService.backup(application, newFlinkSql); + if (appParam.isFlinkSqlJob()) { + FlinkSql newFlinkSql = flinkSqlService.getCandidate(appParam.getId(), CandidateType.NEW); + if (!appParam.isNeedRollback() && newFlinkSql != null) { + backUpService.backup(appParam, newFlinkSql); } } // If the current task is not running, or the task has just been added, // directly set the candidate version to the official version - FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false); - if (!application.isRunning() || flinkSql == null) { - this.toEffective(application); + FlinkSql flinkSql = flinkSqlService.getEffective(appParam.getId(), false); + if (!appParam.isRunning() || flinkSql == null) { + this.toEffective(appParam); } } return build; diff --git a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java index d32cd009de..759fc9dac6 100644 --- a/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java +++ b/streampark-console/streampark-console-service/src/test/java/org/apache/streampark/console/core/service/ApplicationManageServiceITest.java @@ -49,7 +49,7 @@ /** * Integration test for {@link - * org.apache.streampark.console.core.service.impl.ApplicationServiceImpl}. + * org.apache.streampark.console.core.service.application.ApplicationManageService}. */ class ApplicationManageServiceITest extends SpringIntegrationTestBase { @@ -57,6 +57,7 @@ class ApplicationManageServiceITest extends SpringIntegrationTestBase { FlinkStandaloneSessionCluster.builder().slotsNumPerTm(4).slf4jLogConsumer(null).build(); @Autowired private ApplicationManageService applicationManageService; + @Autowired private ApplicationActionService applicationActionService; @Autowired private FlinkClusterService clusterService;