diff --git a/pom.xml b/pom.xml
index ff61935d05..a0f80bc80b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -63,7 +63,7 @@
1.8
UTF-8
UTF-8
- com.streamxhub.streamx.shaded
+ com.streamxhub.streamx.shaded
1.14
1.14.0
diff --git a/streamx-common/pom.xml b/streamx-common/pom.xml
index 9605cfb65d..7911e18429 100644
--- a/streamx-common/pom.xml
+++ b/streamx-common/pom.xml
@@ -242,7 +242,7 @@
com.fasterxml.jackson
- ${streamx.shaded.packageName}.com.fasterxml.jackson
+ ${streamx.shaded.package}.com.fasterxml.jackson
diff --git a/streamx-common/src/main/scala/com/streamxhub/streamx/common/util/Utils.scala b/streamx-common/src/main/scala/com/streamxhub/streamx/common/util/Utils.scala
index d1ba2a328c..a63ad12a4b 100644
--- a/streamx-common/src/main/scala/com/streamxhub/streamx/common/util/Utils.scala
+++ b/streamx-common/src/main/scala/com/streamxhub/streamx/common/util/Utils.scala
@@ -117,4 +117,12 @@ object Utils {
})
}
+ /**
+ * calculate the percentage of num1 / num2, the result range from 0 to 100, with one small digit reserve.
+ */
+ def calPercent(num1: Long, num2: Long): Double =
+ if (num1 == 0 || num2 == 0) 0.0
+ else (num1.toDouble / num2.toDouble * 100).formatted("%.1f").toDouble
+
+
}
diff --git a/streamx-console/streamx-console-service/src/assembly/bin/streamx.sh b/streamx-console/streamx-console-service/src/assembly/bin/streamx.sh
index b510755f8e..1cd319eca0 100755
--- a/streamx-console/streamx-console-service/src/assembly/bin/streamx.sh
+++ b/streamx-console/streamx-console-service/src/assembly/bin/streamx.sh
@@ -326,7 +326,7 @@ print_logo() {
printf ' • WebSite: %s http://www.streamxhub.com%s\n' $BLUE $RESET
printf ' • GitHub : %s http://github.com/streamxhub/streamx%s\n' $BLUE $RESET
printf ' • Gitee : %s http://gitee.com/streamxhub/streamx%s\n' $BLUE $RESET
- printf ' %s ──────── Make stream processing easier ô‿ô!%s\n\n' $GREEN $RESET
+ printf ' %s ──────── Make stream processing easier ô~ô!%s\n\n' $GREEN $RESET
}
# shellcheck disable=SC2120
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/StreamXConsole.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/StreamXConsole.java
index cf7a73904d..635a36db32 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/StreamXConsole.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/StreamXConsole.java
@@ -46,7 +46,7 @@
* GitHub : https://github.com/streamxhub/streamx
* Gitee : https://gitee.com/streamxhub/streamx
*
- * [StreamX] Make stream processing easier ô‿ô!
+ * [StreamX] Make stream processing easier ô~ô!
*
* 十步杀一人 千里不留行 事了拂衣去 深藏身与名
*
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/controller/ApplicationBuildPipelineController.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/controller/ApplicationBuildPipelineController.java
index c3508c5d14..d4ea157806 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/controller/ApplicationBuildPipelineController.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/controller/ApplicationBuildPipelineController.java
@@ -81,10 +81,8 @@ public RestResponse buildApplication(Long appId, boolean forceBuild) {
}
//回滚任务.
- if (app.isNeedRollback()) {
- if (app.isFlinkSqlJob()) {
- flinkSqlService.rollback(app);
- }
+ if (app.isNeedRollback() && app.isFlinkSqlJob()) {
+ flinkSqlService.rollback(app);
}
boolean actionResult = appBuildPipeService.buildApplication(app);
@@ -103,7 +101,7 @@ public RestResponse buildApplication(Long appId, boolean forceBuild) {
@PostMapping("/detail")
@RequiresPermissions("app:view")
public RestResponse getBuildProgressDetail(Long appId) {
- Map details = new HashMap<>();
+ Map details = new HashMap<>(0);
Optional pipeline = appBuildPipeService.getCurrentBuildPipeline(appId);
details.put("pipeline", pipeline.map(AppBuildPipeline::toView).orElse(null));
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/AppBuildPipeline.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/AppBuildPipeline.java
index 0f94e699a8..1dafc79102 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/AppBuildPipeline.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/AppBuildPipeline.java
@@ -27,13 +27,13 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.collect.Maps;
+import com.streamxhub.streamx.common.util.Utils;
import com.streamxhub.streamx.console.base.util.JacksonUtils;
import com.streamxhub.streamx.flink.packer.pipeline.PipeError;
import com.streamxhub.streamx.flink.packer.pipeline.PipelineStatus;
import com.streamxhub.streamx.flink.packer.pipeline.PipelineStepStatus;
import com.streamxhub.streamx.flink.packer.pipeline.PipelineType;
import com.streamxhub.streamx.flink.packer.pipeline.BuildResult;
-import com.streamxhub.streamx.flink.packer.pipeline.BuildPipelineHelper;
import com.streamxhub.streamx.flink.packer.pipeline.BuildPipeline;
import com.streamxhub.streamx.flink.packer.pipeline.PipeSnapshot;
import lombok.AllArgsConstructor;
@@ -143,7 +143,7 @@ public AppBuildPipeline setStepStatus(@Nonnull Map
this.stepStatusJson = JacksonUtils.write(stepStatus);
} catch (JsonProcessingException e) {
log.error("json parse error on ApplicationBuildPipeline, stepStatusMap=({})",
- stepStatus.entrySet().stream().map(et -> et.getKey() + "->" + et.getValue()).collect(Collectors.joining(",")), e);
+ stepStatus.entrySet().stream().map(et -> et.getKey() + "->" + et.getValue()).collect(Collectors.joining(",")), e);
}
return this;
}
@@ -169,7 +169,7 @@ public AppBuildPipeline setStepStatusTimestamp(@Nonnull Map stepS
this.stepStatusTimestampJson = JacksonUtils.write(stepStatusSt);
} catch (JsonProcessingException e) {
log.error("json parse error on ApplicationBuildPipeline, stepStatusSt=({})",
- stepStatusSt.entrySet().stream().map(et -> et.getKey() + "->" + et.getValue()).collect(Collectors.joining(",")), e);
+ stepStatusSt.entrySet().stream().map(et -> et.getKey() + "->" + et.getValue()).collect(Collectors.joining(",")), e);
}
return this;
}
@@ -228,7 +228,7 @@ public long calCostSecond() {
@JsonIgnore
public R getBuildResult() {
PipelineType pipeType = getPipeType();
- if (pipeType.isUnknown()) {
+ if (pipeType.isUnknown() || buildResultJson == null) {
return null;
}
try {
@@ -251,14 +251,14 @@ public static AppBuildPipeline initFromPipeline(@Nonnull BuildPipeline pipeline)
*/
public static AppBuildPipeline fromPipeSnapshot(@Nonnull PipeSnapshot snapshot) {
return new AppBuildPipeline()
- .setPipeType(snapshot.pipeType())
- .setPipeStatus(snapshot.pipeStatus())
- .setTotalStep(snapshot.allSteps())
- .setCurStep(snapshot.curStep())
- .setStepStatus(snapshot.pureStepStatusAsJava())
- .setStepStatusTimestamp(snapshot.stepStatusTimestampAsJava())
- .setError(snapshot.error())
- .setUpdateTime(new Date(snapshot.emitTime()));
+ .setPipeType(snapshot.pipeType())
+ .setPipeStatus(snapshot.pipeStatus())
+ .setTotalStep(snapshot.allSteps())
+ .setCurStep(snapshot.curStep())
+ .setStepStatus(snapshot.pureStepStatusAsJava())
+ .setStepStatusTimestamp(snapshot.stepStatusTimestampAsJava())
+ .setError(snapshot.error())
+ .setUpdateTime(new Date(snapshot.emitTime()));
}
/**
@@ -269,7 +269,6 @@ public View toView() {
}
-
/**
* View object of AppBuildPipeline
*/
@@ -285,9 +284,9 @@ public static class View {
private Double percent;
private Long costSec;
private List steps;
- private Boolean isErr;
- private String errSummary;
- private String errStack;
+ private Boolean hasError;
+ private String errorSummary;
+ private String errorStack;
private Date updateTime;
public static View of(@Nonnull AppBuildPipeline pipe) {
@@ -298,28 +297,32 @@ public static View of(@Nonnull AppBuildPipeline pipe) {
List steps = new ArrayList<>(stepDesc.size());
for (int i = 1; i <= pipe.getPipeType().getSteps().size(); i++) {
Step step = new Step()
- .setSeq(i)
- .setDesc(stepDesc.getOrDefault(i, "unknown step"))
- .setStatus(stepStatus.getOrDefault(i, PipelineStepStatus.unknown).getCode());
+ .setSeq(i)
+ .setDesc(stepDesc.getOrDefault(i, "unknown step"))
+ .setStatus(stepStatus.getOrDefault(i, PipelineStepStatus.unknown).getCode());
Long st = stepTs.get(i);
if (st != null) {
step.setTs(new Date(st));
}
steps.add(step);
}
+
return new View()
- .setAppId(pipe.getAppId())
- .setPipeType(pipe.getPipeTypeCode())
- .setPipeStatus(pipe.getPipeStatusCode())
- .setCurStep(pipe.getCurStep())
- .setTotalStep(pipe.getTotalStep())
- .setPercent(BuildPipelineHelper.calPercent(pipe.getCurStep(), pipe.getTotalStep()))
- .setCostSec(pipe.calCostSecond())
- .setSteps(steps)
- .setIsErr(pipe.getError().nonEmpty())
- .setErrSummary(pipe.getError().summary())
- .setErrStack(pipe.getError().exceptionStack())
- .setUpdateTime(pipe.getUpdateTime());
+ .setAppId(pipe.getAppId())
+ .setPipeType(pipe.getPipeTypeCode())
+ .setPipeStatus(pipe.getPipeStatusCode())
+ .setCurStep(pipe.getCurStep())
+ .setTotalStep(pipe.getTotalStep())
+ .setPercent(Utils.calPercent(
+ pipe.getBuildResult() == null ? pipe.getCurStep() - 1 : pipe.getCurStep(),
+ pipe.getTotalStep())
+ )
+ .setCostSec(pipe.calCostSecond())
+ .setSteps(steps)
+ .setHasError(pipe.getError().nonEmpty())
+ .setErrorSummary(pipe.getError().summary())
+ .setErrorStack(pipe.getError().exceptionStack())
+ .setUpdateTime(pipe.getUpdateTime());
}
}
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/Application.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/Application.java
index 96d9b10f4b..6b5d0e7e69 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/Application.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/entity/Application.java
@@ -312,6 +312,7 @@ public void setState(Integer state) {
*/
public static Integer shouldTracking(@Nonnull FlinkAppState state) {
switch (state) {
+ case ADDED:
case CREATED:
case FINISHED:
case FAILED:
@@ -459,42 +460,58 @@ public AppInfo httpYarnAppInfo() throws Exception {
}
@JsonIgnore
- public JobsOverview httpJobsOverview(FlinkEnv env, FlinkCluster flinkCluster) throws Exception {
- final String flinkUrl = "jobs/overview";
+ public Overview httpOverview(FlinkEnv env, FlinkCluster flinkCluster) throws IOException {
+ final String flinkUrl = "overview";
if (appId != null) {
- if (ExecutionMode.isYarnMode(executionMode)) {
+ if (getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION) ||
+ getExecutionModeEnum().equals(ExecutionMode.YARN_PER_JOB)) {
String format = "%s/proxy/%s/" + flinkUrl;
try {
String url = String.format(format, HadoopUtils.getRMWebAppURL(false), appId);
- return httpGetDoResult(url, JobsOverview.class);
+ return httpGetDoResult(url, Overview.class);
} catch (IOException e) {
String url = String.format(format, HadoopUtils.getRMWebAppURL(true), appId);
- return httpGetDoResult(url, JobsOverview.class);
+ return httpGetDoResult(url, Overview.class);
}
- } else if (ExecutionMode.isRemoteMode(executionMode)) {
- String remoteUrl = getFlinkClusterRestUrl(flinkCluster, flinkUrl);
- return httpGetDoResult(remoteUrl, JobsOverview.class);
+ // TODO: yarn-session
+ //String remoteUrl = getFlinkClusterRestUrl(flinkCluster, flinkUrl);
+ //return httpGetDoResult(remoteUrl, Overview.class);
}
}
return null;
}
@JsonIgnore
- public Overview httpOverview(FlinkEnv env, FlinkCluster flinkCluster) throws IOException {
- final String flinkUrl = "overview";
- if (appId != null) {
- if (ExecutionMode.isYarnMode(executionMode)) {
+ public JobsOverview httpJobsOverview(FlinkEnv env, FlinkCluster flinkCluster) throws Exception {
+ final String flinkUrl = "jobs/overview";
+ if (ExecutionMode.isYarnMode(executionMode)) {
+ if (appId != null) {
String format = "%s/proxy/%s/" + flinkUrl;
+ JobsOverview jobsOverview;
try {
String url = String.format(format, HadoopUtils.getRMWebAppURL(false), appId);
- return httpGetDoResult(url, Overview.class);
+ jobsOverview = httpGetDoResult(url, JobsOverview.class);
} catch (IOException e) {
String url = String.format(format, HadoopUtils.getRMWebAppURL(true), appId);
- return httpGetDoResult(url, Overview.class);
+ jobsOverview = httpGetDoResult(url, JobsOverview.class);
+ }
+ if (jobsOverview != null && ExecutionMode.YARN_SESSION.equals(getExecutionModeEnum())) {
+ //过滤出当前job
+ List jobs = jobsOverview.getJobs().stream().filter(x -> x.getId().equals(jobId)).collect(Collectors.toList());
+ jobsOverview.setJobs(jobs);
}
- } else if (ExecutionMode.isRemoteMode(executionMode)) {
+ return jobsOverview;
+ }
+ } else if (ExecutionMode.isRemoteMode(executionMode)) {
+ if (jobId != null) {
String remoteUrl = getFlinkClusterRestUrl(flinkCluster, flinkUrl);
- return httpGetDoResult(remoteUrl, Overview.class);
+ JobsOverview jobsOverview = httpGetDoResult(remoteUrl, JobsOverview.class);
+ if (jobsOverview != null) {
+ //过滤出当前job
+ List jobs = jobsOverview.getJobs().stream().filter(x -> x.getId().equals(jobId)).collect(Collectors.toList());
+ jobsOverview.setJobs(jobs);
+ }
+ return jobsOverview;
}
}
return null;
@@ -503,8 +520,8 @@ public Overview httpOverview(FlinkEnv env, FlinkCluster flinkCluster) throws IOE
@JsonIgnore
public CheckPoints httpCheckpoints(FlinkEnv env, FlinkCluster flinkCluster) throws IOException {
final String flinkUrl = "jobs/%s/checkpoints";
- if (appId != null) {
- if (ExecutionMode.isYarnMode(executionMode)) {
+ if (ExecutionMode.isYarnMode(executionMode)) {
+ if (appId != null) {
String format = "%s/proxy/%s/" + flinkUrl;
try {
String url = String.format(format, HadoopUtils.getRMWebAppURL(false), appId, jobId);
@@ -513,7 +530,9 @@ public CheckPoints httpCheckpoints(FlinkEnv env, FlinkCluster flinkCluster) thro
String url = String.format(format, HadoopUtils.getRMWebAppURL(true), appId, jobId);
return httpGetDoResult(url, CheckPoints.class);
}
- } else if (ExecutionMode.isRemoteMode(executionMode)) {
+ }
+ } else if (ExecutionMode.isRemoteMode(executionMode)) {
+ if (jobId != null) {
String remoteUrl = getFlinkClusterRestUrl(flinkCluster, String.format(flinkUrl, jobId));
return httpGetDoResult(remoteUrl, CheckPoints.class);
}
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/AppBuildPipeService.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/AppBuildPipeService.java
index 3189cbef48..fd4af4f374 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/AppBuildPipeService.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/AppBuildPipeService.java
@@ -64,4 +64,9 @@ public interface AppBuildPipeService extends IService {
*/
Map listPipelineStatus(List appIds);
+ /**
+ * delete appBuildPipeline By application
+ * @param appId
+ */
+ void removeApp(Long appId);
}
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/ApplicationBackUpService.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/ApplicationBackUpService.java
index 7855f31526..3003a68b99 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/ApplicationBackUpService.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/ApplicationBackUpService.java
@@ -34,7 +34,7 @@ public interface ApplicationBackUpService extends IService {
Boolean delete(Long id) throws ServiceException;
- void backup(Application app);
+ void backup(Application application, FlinkSql flinkSql);
IPage page(ApplicationBackUp backUp, RestRequest request);
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/FlinkSqlService.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/FlinkSqlService.java
index da75588e11..4df1a637d6 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/FlinkSqlService.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/FlinkSqlService.java
@@ -32,13 +32,11 @@
*/
public interface FlinkSqlService extends IService {
- //TODO 所有的历史记录和版本相关功能,需要重构,重新讨论实现
-
/**
* @param flinkSql
* @param latest 是否latest
*/
- void create(FlinkSql flinkSql, CandidateType type);
+ void create(FlinkSql flinkSql);
/**
* @param latest true 表示设置新增的的记录为 "latest"
@@ -46,7 +44,7 @@ public interface FlinkSqlService extends IService {
* @param sqlId
* @param appId
*/
- void setCandidateOrEffective(CandidateType candidateType, Long appId, Long sqlId);
+ void setCandidate(CandidateType candidateType, Long appId, Long sqlId);
/**
* @param appId
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplBuildPipeServiceImpl.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplBuildPipeServiceImpl.java
index 54a9a9da5c..2b23d7cb09 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplBuildPipeServiceImpl.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplBuildPipeServiceImpl.java
@@ -26,6 +26,8 @@
import com.google.common.collect.Maps;
import com.streamxhub.streamx.common.conf.ConfigConst;
import com.streamxhub.streamx.common.conf.Workspace;
+import com.streamxhub.streamx.common.enums.ApplicationType;
+import com.streamxhub.streamx.common.enums.DevelopmentMode;
import com.streamxhub.streamx.common.enums.ExecutionMode;
import com.streamxhub.streamx.common.fs.FsOperator;
import com.streamxhub.streamx.common.util.ExceptionUtils;
@@ -36,12 +38,13 @@
import com.streamxhub.streamx.console.core.entity.Application;
import com.streamxhub.streamx.console.core.entity.FlinkEnv;
import com.streamxhub.streamx.console.core.entity.FlinkSql;
+import com.streamxhub.streamx.console.core.entity.Message;
import com.streamxhub.streamx.console.core.enums.CandidateType;
import com.streamxhub.streamx.console.core.enums.LaunchState;
-import com.streamxhub.streamx.console.core.entity.Message;
import com.streamxhub.streamx.console.core.enums.NoticeType;
import com.streamxhub.streamx.console.core.enums.OptionState;
import com.streamxhub.streamx.console.core.service.ApplicationBackUpService;
+import com.streamxhub.streamx.console.core.service.ApplicationConfigService;
import com.streamxhub.streamx.console.core.service.AppBuildPipeService;
import com.streamxhub.streamx.console.core.service.ApplicationService;
import com.streamxhub.streamx.console.core.service.CommonService;
@@ -59,7 +62,7 @@
import com.streamxhub.streamx.flink.packer.pipeline.DockerResolvedSnapshot;
import com.streamxhub.streamx.flink.packer.pipeline.FlinkK8sApplicationBuildRequest;
import com.streamxhub.streamx.flink.packer.pipeline.FlinkK8sSessionBuildRequest;
-import com.streamxhub.streamx.flink.packer.pipeline.FlinkRemoteBuildRequest;
+import com.streamxhub.streamx.flink.packer.pipeline.FlinkRemotePerJobBuildRequest;
import com.streamxhub.streamx.flink.packer.pipeline.FlinkYarnApplicationBuildRequest;
import com.streamxhub.streamx.flink.packer.pipeline.PipeSnapshot;
import com.streamxhub.streamx.flink.packer.pipeline.PipelineStatus;
@@ -122,6 +125,9 @@ public class ApplBuildPipeServiceImpl
@Autowired
private ApplicationService applicationService;
+ @Autowired
+ private ApplicationConfigService applicationConfigService;
+
private final ExecutorService executorService = new ThreadPoolExecutor(
Runtime.getRuntime().availableProcessors() * 2,
300,
@@ -144,38 +150,35 @@ public class ApplBuildPipeServiceImpl
@Override
public boolean buildApplication(@Nonnull Application app) throws Exception {
- AppBuildPipeline appBuildPipeline = getById(app.getAppId());
-
// 1) flink sql setDependency
+ FlinkSql newFlinkSql = flinkSqlService.getCandidate(app.getId(), CandidateType.NEW);
+ FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(app.getId(), false);
if (app.isFlinkSqlJob()) {
- FlinkSql flinkSql = flinkSqlService.getCandidate(app.getId(), CandidateType.NEW);
- if (flinkSql == null) {
- flinkSql = flinkSqlService.getEffective(app.getId(), false);
- }
+ FlinkSql flinkSql = newFlinkSql == null ? effectiveFlinkSql : newFlinkSql;
assert flinkSql != null;
app.setDependency(flinkSql.getDependency());
}
- // 2) create pipeline instance
+ // create pipeline instance
BuildPipeline pipeline = createPipelineInstance(app);
+ // clear history
+ removeApp(app.getId());
// register pipeline progress event watcher.
// save snapshot of pipeline to db when status of pipeline was changed.
pipeline.registerWatcher(new PipeWatcher() {
@Override
public void onStart(PipeSnapshot snapshot) throws Exception {
+ AppBuildPipeline buildPipeline = AppBuildPipeline.fromPipeSnapshot(snapshot).setAppId(app.getId());
+ saveEntity(buildPipeline);
+
app.setLaunch(LaunchState.LAUNCHING.get());
applicationService.updateLaunch(app);
// 1) checkEnv
applicationService.checkEnv(app);
- // 2) backup.
- if (appBuildPipeline != null) {
- backUpService.backup(app);
- }
-
- // 3) some preparatory work
+ // 2) some preparatory work
String appUploads = app.getWorkspace().APP_UPLOADS();
if (app.isCustomCodeJob()) {
@@ -199,7 +202,7 @@ public void onStart(PipeSnapshot snapshot) throws Exception {
break;
default:
throw new IllegalArgumentException("[StreamX] unsupported ApplicationType of custom code: "
- + app.getApplicationType());
+ + app.getApplicationType());
}
} else {
fsOperator.upload(app.getDistHome(), appHome);
@@ -216,9 +219,6 @@ public void onStart(PipeSnapshot snapshot) throws Exception {
}
}
}
-
- AppBuildPipeline buildPipeline = AppBuildPipeline.fromPipeSnapshot(snapshot).setAppId(app.getId());
- saveEntity(buildPipeline);
}
@Override
@@ -238,12 +238,27 @@ public void onFinish(PipeSnapshot snapshot, BuildResult result) {
app.setOptionState(OptionState.NONE.getValue());
app.setLaunch(LaunchState.DONE.get());
}
+
//如果当前任务未运行,或者刚刚新增的任务,则直接将候选版本的设置为正式版本
- FlinkSql flinkSql = flinkSqlService.getEffective(app.getId(), false);
- if (!app.isRunning() || flinkSql == null) {
- applicationService.toEffective(app);
+ if (!app.isRunning()) {
+ if (app.isFlinkSqlJob()) {
+ applicationService.toEffective(app);
+ } else {
+ applicationConfigService.toEffective(app.getId(), app.getConfigId());
+ }
+ }
+
+ // backup.
+ if (!app.isNeedRollback()) {
+ if (app.isFlinkSqlJob() && newFlinkSql != null) {
+ backUpService.backup(app, newFlinkSql);
+ } else {
+ backUpService.backup(app, null);
+ }
}
+
app.setBuild(false);
+
} else {
Message message = new Message(
commonService.getCurrentUser().getUserId(),
@@ -301,11 +316,18 @@ private BuildPipeline createPipelineInstance(@Nonnull Application app) {
String mainClass = ConfigConst.STREAMX_FLINKSQL_CLIENT_CLASS();
switch (executionMode) {
case YARN_APPLICATION:
+ String yarnProvidedPath = app.getAppLib();
+ String localWorkspace = app.getLocalAppHome().concat("/lib");
+ if (app.getDevelopmentMode().equals(DevelopmentMode.CUSTOMCODE)
+ && app.getApplicationType().equals(ApplicationType.APACHE_FLINK)) {
+ yarnProvidedPath = app.getAppHome();
+ localWorkspace = app.getLocalAppHome();
+ }
FlinkYarnApplicationBuildRequest yarnAppRequest = new FlinkYarnApplicationBuildRequest(
app.getJobName(),
mainClass,
- app.getLocalAppHome().concat("/lib"),
- app.getAppLib(),
+ localWorkspace,
+ yarnProvidedPath,
app.getDevelopmentMode(),
app.getDependencyInfo()
);
@@ -314,20 +336,23 @@ private BuildPipeline createPipelineInstance(@Nonnull Application app) {
case YARN_PER_JOB:
case YARN_SESSION:
case REMOTE:
- FlinkRemoteBuildRequest remoteBuildRequest = new FlinkRemoteBuildRequest(
+ FlinkRemotePerJobBuildRequest buildRequest = new FlinkRemotePerJobBuildRequest(
app.getJobName(),
+ app.getLocalAppHome(),
mainClass,
flinkUserJar,
+ app.isCustomCodeJob(),
app.getExecutionModeEnum(),
app.getDevelopmentMode(),
flinkEnv.getFlinkVersion(),
app.getDependencyInfo()
);
- log.info("Submit params to building pipeline : {}", remoteBuildRequest);
- return FlinkRemoteBuildPipeline.of(remoteBuildRequest);
+ log.info("Submit params to building pipeline : {}", buildRequest);
+ return FlinkRemoteBuildPipeline.of(buildRequest);
case KUBERNETES_NATIVE_SESSION:
FlinkK8sSessionBuildRequest k8sSessionBuildRequest = new FlinkK8sSessionBuildRequest(
app.getJobName(),
+ app.getLocalAppHome(),
mainClass,
flinkUserJar,
app.getExecutionModeEnum(),
@@ -341,6 +366,7 @@ private BuildPipeline createPipelineInstance(@Nonnull Application app) {
case KUBERNETES_NATIVE_APPLICATION:
FlinkK8sApplicationBuildRequest k8sApplicationBuildRequest = new FlinkK8sApplicationBuildRequest(
app.getJobName(),
+ app.getLocalAppHome(),
mainClass,
flinkUserJar,
app.getExecutionModeEnum(),
@@ -429,6 +455,11 @@ public Map listPipelineStatus(List appIds) {
e -> PipelineStatus.of((Integer) e.get("pipe_status"))));
}
+ @Override
+ public void removeApp(Long appId) {
+ baseMapper.delete(new QueryWrapper().lambda().eq(AppBuildPipeline::getAppId, appId));
+ }
+
public boolean saveEntity(AppBuildPipeline pipe) {
AppBuildPipeline old = getById(pipe.getAppId());
if (old == null) {
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationBackUpServiceImpl.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationBackUpServiceImpl.java
index 2d0e352759..85de2367dd 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationBackUpServiceImpl.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationBackUpServiceImpl.java
@@ -114,7 +114,12 @@ public void rollback(ApplicationBackUp backParam) {
// 1) 在回滚时判断当前生效的项目是否需要备份.如需要则先执行备份...
if (backParam.isBackup()) {
application.setBackUpDescription(backParam.getDescription());
- backup(application);
+ if (application.isFlinkSqlJob()) {
+ FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false);
+ backup(application, flinkSql);
+ } else {
+ backup(application, null);
+ }
}
//2) 恢复 配置和SQL
@@ -232,7 +237,7 @@ public Boolean delete(Long id) throws ServiceException {
@Override
@Transactional(rollbackFor = {Exception.class})
- public void backup(Application application) {
+ public void backup(Application application, FlinkSql flinkSql) {
//1) 基础的配置文件备份
String appHome = (application.isCustomCodeJob() && application.isCICDJob()) ? application.getDistHome() : application.getAppHome();
FsOperator fsOperator = application.getFsOperator();
@@ -244,9 +249,7 @@ public void backup(Application application) {
}
//2) FlinkSQL任务需要备份sql和依赖.
int version = 1;
- if (application.isFlinkSqlJob()) {
- FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false);
- assert flinkSql != null;
+ if (flinkSql != null) {
application.setSqlId(flinkSql.getId());
version = flinkSql.getVersion();
} else if (config != null) {
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationServiceImpl.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationServiceImpl.java
index 6faf0942e8..51d4edff37 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationServiceImpl.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/ApplicationServiceImpl.java
@@ -32,6 +32,9 @@
import com.streamxhub.streamx.common.enums.DevelopmentMode;
import com.streamxhub.streamx.common.enums.ExecutionMode;
import com.streamxhub.streamx.common.enums.ResolveOrder;
+import com.streamxhub.streamx.common.enums.StorageType;
+import com.streamxhub.streamx.common.fs.HdfsOperator;
+import com.streamxhub.streamx.common.util.DeflaterUtils;
import com.streamxhub.streamx.common.util.ExceptionUtils;
import com.streamxhub.streamx.common.util.ThreadUtils;
import com.streamxhub.streamx.common.util.Utils;
@@ -80,7 +83,6 @@
import com.streamxhub.streamx.flink.kubernetes.K8sFlinkTrkMonitor;
import com.streamxhub.streamx.flink.kubernetes.model.FlinkMetricCV;
import com.streamxhub.streamx.flink.kubernetes.model.TrkId;
-import com.streamxhub.streamx.flink.packer.pipeline.PipelineStatus;
import com.streamxhub.streamx.flink.submit.FlinkSubmitter;
import com.streamxhub.streamx.flink.submit.bean.KubernetesSubmitParam;
import com.streamxhub.streamx.flink.submit.bean.StopRequest;
@@ -91,6 +93,7 @@
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.configuration.PipelineOptions;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.runtime.jobgraph.SavepointConfigOptions;
import org.springframework.beans.factory.annotation.Autowired;
@@ -112,6 +115,7 @@
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@@ -129,20 +133,20 @@
@Service
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
public class ApplicationServiceImpl extends ServiceImpl
- implements ApplicationService {
+ implements ApplicationService {
private final Map tailOutMap = new ConcurrentHashMap<>();
private final Map tailBeginning = new ConcurrentHashMap<>();
private final ExecutorService executorService = new ThreadPoolExecutor(
- Runtime.getRuntime().availableProcessors() * 2,
- 200,
- 60L,
- TimeUnit.SECONDS,
- new LinkedBlockingQueue<>(1024),
- ThreadUtils.threadFactory("streamx-deploy-executor"),
- new ThreadPoolExecutor.AbortPolicy()
+ Runtime.getRuntime().availableProcessors() * 2,
+ 200,
+ 60L,
+ TimeUnit.SECONDS,
+ new LinkedBlockingQueue<>(1024),
+ ThreadUtils.threadFactory("streamx-deploy-executor"),
+ new ThreadPoolExecutor.AbortPolicy()
);
private static final Pattern JOB_NAME_PATTERN = Pattern.compile("^[.\\x{4e00}-\\x{9fa5}A-Za-z0-9_\\-\\s]+$");
@@ -156,19 +160,19 @@ public class ApplicationServiceImpl extends ServiceImpl().lambda()
- .eq(Application::getJobName, appParam.getJobName())) > 0;
+ new QueryWrapper().lambda()
+ .eq(Application::getJobName, appParam.getJobName())) > 0;
if (appParam.getId() != null) {
Application app = getById(appParam.getId());
@@ -488,19 +501,19 @@ public AppExistsState checkExists(Application appParam) {
FlinkAppState state = FlinkAppState.of(app.getState());
//当前任务已停止的状态
if (state.equals(FlinkAppState.ADDED) ||
- state.equals(FlinkAppState.CREATED) ||
- state.equals(FlinkAppState.FAILED) ||
- state.equals(FlinkAppState.CANCELED) ||
- state.equals(FlinkAppState.LOST) ||
- state.equals(FlinkAppState.KILLED)) {
+ state.equals(FlinkAppState.CREATED) ||
+ state.equals(FlinkAppState.FAILED) ||
+ state.equals(FlinkAppState.CANCELED) ||
+ state.equals(FlinkAppState.LOST) ||
+ state.equals(FlinkAppState.KILLED)) {
// check whether jobName exists on yarn
if (ExecutionMode.isYarnMode(appParam.getExecutionMode())
- && YarnUtils.isContains(appParam.getJobName())) {
+ && YarnUtils.isContains(appParam.getJobName())) {
return AppExistsState.IN_YARN;
}
// check whether clusterId, namespace, jobId on kubernetes
else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode())
- && k8sFlinkTrkMonitor.checkIsInRemoteCluster(toTrkId(appParam))) {
+ && k8sFlinkTrkMonitor.checkIsInRemoteCluster(toTrkId(appParam))) {
return AppExistsState.IN_KUBERNETES;
}
}
@@ -511,12 +524,12 @@ else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode())
// check whether jobName exists on yarn
if (ExecutionMode.isYarnMode(appParam.getExecutionMode())
- && YarnUtils.isContains(appParam.getJobName())) {
+ && YarnUtils.isContains(appParam.getJobName())) {
return AppExistsState.IN_YARN;
}
// check whether clusterId, namespace, jobId on kubernetes
else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode())
- && k8sFlinkTrkMonitor.checkIsInRemoteCluster(toTrkId(appParam))) {
+ && k8sFlinkTrkMonitor.checkIsInRemoteCluster(toTrkId(appParam))) {
return AppExistsState.IN_KUBERNETES;
}
}
@@ -542,7 +555,7 @@ public boolean create(Application appParam) {
if (saved) {
if (appParam.isFlinkSqlJob()) {
FlinkSql flinkSql = new FlinkSql(appParam);
- flinkSqlService.create(flinkSql, CandidateType.NEW);
+ flinkSqlService.create(flinkSql);
}
if (appParam.getConfig() != null) {
configService.create(appParam, true);
@@ -580,7 +593,7 @@ public boolean update(Application appParam) {
//部署模式发生了变化.
if (!application.getExecutionMode().equals(appParam.getExecutionMode())) {
if (appParam.getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION) ||
- application.getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION)) {
+ application.getExecutionModeEnum().equals(ExecutionMode.YARN_APPLICATION)) {
application.setBuild(true);
}
}
@@ -644,20 +657,12 @@ public boolean update(Application appParam) {
* @param appParam
*/
private void updateFlinkSqlJob(Application application, Application appParam) {
- AppBuildPipeline buildPipeline = appBuildPipeService.getById(application.getId());
- //从来没有上线过的新任务.直接保存
- if (buildPipeline == null) {
- FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW);
- flinkSqlService.removeById(newFlinkSql.getId());
+ FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(application.getId(), true);
+ if (effectiveFlinkSql == null) {
+ effectiveFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW);
+ flinkSqlService.removeById(effectiveFlinkSql.getId());
FlinkSql sql = new FlinkSql(appParam);
- flinkSqlService.create(sql, CandidateType.NEW);
- flinkSqlService.toEffective(application.getId(), sql.getId());
- } else if (buildPipeline.getPipeStatus().equals(PipelineStatus.failure)) {
- FlinkSql newFlinkSql = flinkSqlService.getEffective(application.getId(), false);
- flinkSqlService.removeById(newFlinkSql.getId());
- FlinkSql sql = new FlinkSql(appParam);
- flinkSqlService.create(sql, CandidateType.NEW);
- flinkSqlService.toEffective(application.getId(), sql.getId());
+ flinkSqlService.create(sql);
application.setBuild(true);
} else {
//1) 获取copy的源FlinkSql
@@ -688,22 +693,18 @@ private void updateFlinkSqlJob(Application application, Application appParam) {
flinkSqlService.cleanCandidate(historyFlinkSql.getId());
}
FlinkSql sql = new FlinkSql(appParam);
- CandidateType type = (changedType.isDependencyChanged() || application.isRunning()) ? CandidateType.NEW : CandidateType.NONE;
- flinkSqlService.create(sql, type);
-
+ flinkSqlService.create(sql);
if (changedType.isDependencyChanged()) {
application.setBuild(true);
}
} else {
// 2) 判断版本是否发生变化
//获取正式版本的flinkSql
- FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(application.getId(), true);
- assert effectiveFlinkSql != null;
boolean versionChanged = !effectiveFlinkSql.getId().equals(appParam.getSqlId());
if (versionChanged) {
//sql和依赖未发生变更,但是版本号发生了变化,说明是回滚到某个版本了
CandidateType type = CandidateType.HISTORY;
- flinkSqlService.setCandidateOrEffective(type, appParam.getId(), appParam.getSqlId());
+ flinkSqlService.setCandidate(type, appParam.getId(), appParam.getSqlId());
//直接回滚到某个历史版本(rollback)
application.setLaunch(LaunchState.NEED_ROLLBACK.get());
application.setBuild(true);
@@ -746,11 +747,21 @@ public boolean checkBuildAndUpdate(Application application) {
updateWrapper.set(Application::getOptionState, OptionState.NONE.getValue());
}
baseMapper.update(application, updateWrapper);
+
+ // backup.
+ if (application.isFlinkSqlJob()) {
+ FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW);
+ if (!application.isNeedRollback() && newFlinkSql != null) {
+ backUpService.backup(application, newFlinkSql);
+ }
+ }
+
//如果当前任务未运行,或者刚刚新增的任务,则直接将候选版本的设置为正式版本
FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false);
if (!application.isRunning() || flinkSql == null) {
this.toEffective(application);
}
+
}
return build;
}
@@ -779,6 +790,10 @@ public Application getApp(Application appParam) {
}
if (application.isFlinkSqlJob()) {
FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), true);
+ if (flinkSql == null) {
+ flinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW);
+ flinkSql.setSql(DeflaterUtils.unzipString(flinkSql.getSql()));
+ }
flinkSql.setToApplication(application);
} else {
if (application.isCICDJob()) {
@@ -805,6 +820,7 @@ public Application getApp(Application appParam) {
}
}
}
+
if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) {
if (!application.getHotParamsMap().isEmpty()) {
if (application.getHotParamsMap().containsKey(ConfigConst.KEY_YARN_APP_ID())) {
@@ -868,13 +884,15 @@ public void cancel(Application appParam) {
String customSavepoint = "";
if (isKubernetesApp(application)) {
customSavepoint = StringUtils.isNotBlank(appParam.getSavePoint()) ? appParam.getSavePoint() :
- FlinkSubmitter
- .extractDynamicOptionAsJava(application.getDynamicOptions())
- .getOrDefault(ConfigConst.KEY_FLINK_SAVEPOINT_PATH(), "");
+ FlinkSubmitter
+ .extractDynamicOptionAsJava(application.getDynamicOptions())
+ .getOrDefault(ConfigConst.KEY_FLINK_SAVEPOINT_PATH(), "");
}
Map extraParameter = new HashMap<>();
+ Map optionMap = application.getOptionMap();
+
if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) {
FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId());
assert cluster != null;
@@ -882,8 +900,12 @@ public void cancel(Application appParam) {
extraParameter.put(RestOptions.ADDRESS.key(), activeAddress.getHost());
extraParameter.put(RestOptions.PORT.key(), activeAddress.getPort());
}
- if (ExecutionMode.isYarnSessionMode(application.getExecutionModeEnum())) {
- if (!application.getHotParamsMap().isEmpty()) {
+
+ if (ExecutionMode.isYarnMode(application.getExecutionModeEnum())) {
+ String yarnQueue = (String) application.getHotParamsMap().get(ConfigConst.KEY_YARN_APP_QUEUE());
+ optionMap.put(ConfigConst.KEY_YARN_APP_QUEUE(), yarnQueue);
+
+ if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) {
String yarnSessionClusterId = (String) application.getHotParamsMap().get(ConfigConst.KEY_YARN_APP_ID());
assert yarnSessionClusterId != null;
extraParameter.put(ConfigConst.KEY_YARN_APP_ID(), yarnSessionClusterId);
@@ -891,16 +913,16 @@ public void cancel(Application appParam) {
}
StopRequest stopInfo = new StopRequest(
- flinkEnv.getFlinkVersion(),
- ExecutionMode.of(application.getExecutionMode()),
- application.getAppId(),
- application.getJobId(),
- appParam.getSavePointed(),
- appParam.getDrain(),
- customSavepoint,
- application.getK8sNamespace(),
- application.getDynamicOptions(),
- extraParameter
+ flinkEnv.getFlinkVersion(),
+ ExecutionMode.of(application.getExecutionMode()),
+ application.getAppId(),
+ application.getJobId(),
+ appParam.getSavePointed(),
+ appParam.getDrain(),
+ customSavepoint,
+ application.getK8sNamespace(),
+ application.getDynamicOptions(),
+ extraParameter
);
StopResponse stopResponse = FlinkSubmitter.stop(stopInfo);
@@ -982,7 +1004,7 @@ public boolean start(Application appParam, boolean auto) throws Exception {
//1) 真正执行启动相关的操作..
String appConf;
- String flinkUserJar;
+ String flinkUserJar = null;
ApplicationLog applicationLog = new ApplicationLog();
applicationLog.setAppId(application.getId());
applicationLog.setStartTime(new Date());
@@ -998,32 +1020,32 @@ public boolean start(Application appParam, boolean auto) throws Exception {
assert executionMode != null;
if (application.isUploadJob()) {
appConf = String.format("json://{\"%s\":\"%s\"}",
- ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(),
- application.getMainClass()
+ ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(),
+ application.getMainClass()
);
- if (ExecutionMode.isKubernetesSessionMode(application.getExecutionMode())) {
- flinkUserJar = String.format("%s/%s", application.getAppHome(), application.getJar());
- } else {
- flinkUserJar = String.format("%s/%s", application.getAppLib(), application.getJar());
- }
} else {
switch (application.getApplicationType()) {
case STREAMX_FLINK:
String format = applicationConfig.getFormat() == 1 ? "yaml" : "prop";
appConf = String.format("%s://%s", format, applicationConfig.getContent());
- flinkUserJar = String.format("%s/%s", application.getAppLib(), application.getModule().concat(".jar"));
break;
case APACHE_FLINK:
- appConf = String.format("json://{\"%s\":\"%s\"}",
- ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(),
- application.getMainClass()
- );
- flinkUserJar = String.format("%s/%s", application.getAppHome(), application.getJar());
+ appConf = String.format("json://{\"%s\":\"%s\"}", ConfigConst.KEY_FLINK_APPLICATION_MAIN_CLASS(), application.getMainClass());
break;
default:
throw new IllegalArgumentException("[StreamX] ApplicationType must be (StreamX flink | Apache flink)... ");
}
}
+ switch (application.getApplicationType()) {
+ case STREAMX_FLINK:
+ flinkUserJar = String.format("%s/%s", application.getAppLib(), application.getModule().concat(".jar"));
+ break;
+ case APACHE_FLINK:
+ flinkUserJar = String.format("%s/%s", application.getAppHome(), application.getJar());
+ break;
+ default:
+ throw new IllegalArgumentException("[StreamX] ApplicationType must be (StreamX flink | Apache flink)... ");
+ }
} else if (application.isFlinkSqlJob()) {
FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false);
assert flinkSql != null;
@@ -1033,35 +1055,19 @@ public boolean start(Application appParam, boolean auto) throws Exception {
appConf = applicationConfig == null ? null : String.format("yaml://%s", applicationConfig.getContent());
assert executionMode != null;
//3) client
- switch (executionMode) {
- case REMOTE:
- case YARN_PER_JOB:
- case YARN_SESSION:
- case KUBERNETES_NATIVE_SESSION:
- case KUBERNETES_NATIVE_APPLICATION:
- flinkUserJar = Workspace.local().APP_CLIENT().concat("/").concat(sqlDistJar);
- break;
- case YARN_APPLICATION:
- String clientPath = Workspace.remote().APP_CLIENT();
- flinkUserJar = String.format("%s/%s", clientPath, sqlDistJar);
- break;
- default:
- throw new UnsupportedOperationException("Unsupported..." + executionMode);
+ if (executionMode.equals(ExecutionMode.YARN_APPLICATION)) {
+ String clientPath = Workspace.remote().APP_CLIENT();
+ flinkUserJar = String.format("%s/%s", clientPath, sqlDistJar);
}
} else {
throw new UnsupportedOperationException("Unsupported...");
}
- Map optionMap = application.getOptionMap();
- if (ExecutionMode.YARN_APPLICATION.equals(application.getExecutionModeEnum())
- || ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) {
- optionMap.putAll(application.getHotParamsMap());
- }
-
String[] dynamicOption = CommonUtils.notEmpty(application.getDynamicOptions()) ? application.getDynamicOptions().split("\\s+") : new String[0];
- Map extraParameter = new HashMap<>();
+ Map extraParameter = new HashMap<>(0);
extraParameter.put(ConfigConst.KEY_JOB_ID(), application.getId());
+ extraParameter.put(PipelineOptions.JARS.key(), flinkUserJar);
if (appParam.getAllowNonRestored()) {
extraParameter.put(SavepointConfigOptions.SAVEPOINT_IGNORE_UNCLAIMED_STATE.key(), true);
@@ -1075,6 +1081,12 @@ public boolean start(Application appParam, boolean auto) throws Exception {
extraParameter.put(RestOptions.PORT.key(), activeAddress.getPort());
}
+ if (ExecutionMode.YARN_SESSION.equals(application.getExecutionModeEnum())) {
+ String yarnSessionClusterId = (String) application.getHotParamsMap().get(ConfigConst.KEY_YARN_APP_ID());
+ assert yarnSessionClusterId != null;
+ extraParameter.put(ConfigConst.KEY_YARN_APP_ID(), yarnSessionClusterId);
+ }
+
if (application.isFlinkSqlJob()) {
FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), false);
extraParameter.put(ConfigConst.KEY_FLINK_SQL(null), flinkSql.getSql());
@@ -1083,9 +1095,9 @@ public boolean start(Application appParam, boolean auto) throws Exception {
ResolveOrder resolveOrder = ResolveOrder.of(application.getResolveOrder());
KubernetesSubmitParam kubernetesSubmitParam = new KubernetesSubmitParam(
- application.getClusterId(),
- application.getK8sNamespace(),
- application.getK8sRestExposedTypeEnum());
+ application.getClusterId(),
+ application.getK8sNamespace(),
+ application.getK8sRestExposedTypeEnum());
FlinkEnv flinkEnv = flinkEnvService.getByIdOrDefault(application.getVersionId());
if (flinkEnv == null) {
@@ -1095,26 +1107,27 @@ public boolean start(Application appParam, boolean auto) throws Exception {
AppBuildPipeline buildPipeline = appBuildPipeService.getById(application.getId());
SubmitRequest submitRequest = new SubmitRequest(
- flinkEnv.getFlinkVersion(),
- flinkEnv.getFlinkConf(),
- flinkUserJar,
- DevelopmentMode.of(application.getJobType()),
- ExecutionMode.of(application.getExecutionMode()),
- resolveOrder,
- application.getJobName(),
- appConf,
- application.getApplicationType(),
- getSavePointed(appParam),
- appParam.getFlameGraph() ? getFlameGraph(application) : null,
- optionMap,
- dynamicOption,
- application.getArgs(),
- buildPipeline == null ? null : buildPipeline.getBuildResult(),
- kubernetesSubmitParam,
- extraParameter
+ flinkEnv.getFlinkVersion(),
+ flinkEnv.getFlinkConf(),
+ DevelopmentMode.of(application.getJobType()),
+ ExecutionMode.of(application.getExecutionMode()),
+ resolveOrder,
+ application.getJobName(),
+ appConf,
+ application.getApplicationType(),
+ getSavePointed(appParam),
+ appParam.getFlameGraph() ? getFlameGraph(application) : null,
+ application.getOptionMap(),
+ dynamicOption,
+ application.getArgs(),
+ buildPipeline.getBuildResult(),
+ kubernetesSubmitParam,
+ extraParameter
);
- SubmitResponse submitResponse = FlinkSubmitter.submit(submitRequest);
+ Future future = executorService.submit(() -> FlinkSubmitter.submit(submitRequest));
+
+ SubmitResponse submitResponse = future.get(60, TimeUnit.SECONDS);
assert submitResponse != null;
@@ -1142,12 +1155,10 @@ public boolean start(Application appParam, boolean auto) throws Exception {
updateById(application);
//2) 启动完成将任务加入到监控中...
- // 更改操作状态...x
if (isKubernetesApp(application)) {
k8sFlinkTrkMonitor.trackingJob(toTrkId(application));
} else {
FlinkTrackingTask.setOptionState(appParam.getId(), OptionState.STARTING);
- // 加入到跟踪监控中...
FlinkTrackingTask.addTracking(application);
}
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/FlinkSqlServiceImpl.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/FlinkSqlServiceImpl.java
index 24210ffa20..c4b5f901ad 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/FlinkSqlServiceImpl.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/service/impl/FlinkSqlServiceImpl.java
@@ -73,7 +73,7 @@ public class FlinkSqlServiceImpl extends ServiceImpl i
@Override
public FlinkSql getEffective(Long appId, boolean decode) {
FlinkSql flinkSql = baseMapper.getEffective(appId);
- if (decode) {
+ if (flinkSql != null && decode) {
flinkSql.setSql(DeflaterUtils.unzipString(flinkSql.getSql()));
}
return flinkSql;
@@ -81,34 +81,25 @@ public FlinkSql getEffective(Long appId, boolean decode) {
@Override
@Transactional(rollbackFor = {Exception.class})
- public void create(FlinkSql flinkSql, CandidateType type) {
+ public void create(FlinkSql flinkSql) {
Integer version = this.baseMapper.getLastVersion(flinkSql.getAppId());
flinkSql.setVersion(version == null ? 1 : version + 1);
String sql = DeflaterUtils.zipString(flinkSql.getSql());
flinkSql.setSql(sql);
this.save(flinkSql);
- this.setCandidateOrEffective(type, flinkSql.getAppId(), flinkSql.getId());
+ this.setCandidate(CandidateType.NEW, flinkSql.getAppId(), flinkSql.getId());
}
@Override
- public void setCandidateOrEffective(CandidateType candidateType, Long appId, Long sqlId) {
- if (CandidateType.NONE.equals(candidateType)) {
- this.toEffective(appId, sqlId);
- } else {
- this.setCandidate(appId, sqlId, candidateType);
- }
- }
-
- @Transactional(rollbackFor = {Exception.class})
- public void setCandidate(Long appId, Long sqlId, CandidateType candidateType) {
+ public void setCandidate(CandidateType candidateType, Long appId, Long sqlId) {
LambdaUpdateWrapper updateWrapper = new UpdateWrapper().lambda();
updateWrapper.set(FlinkSql::getCandidate, 0)
- .eq(FlinkSql::getAppId, appId);
+ .eq(FlinkSql::getAppId, appId);
this.update(updateWrapper);
updateWrapper = new UpdateWrapper().lambda();
updateWrapper.set(FlinkSql::getCandidate, candidateType.get())
- .eq(FlinkSql::getId, sqlId);
+ .eq(FlinkSql::getId, sqlId);
this.update(updateWrapper);
}
@@ -120,14 +111,14 @@ public List history(Application application) {
List sqlList = this.baseMapper.selectList(wrapper);
FlinkSql effective = getEffective(application.getId(), false);
-
- for (FlinkSql sql : sqlList) {
- if (sql.getId().equals(effective.getId())) {
- sql.setEffective(true);
- break;
+ if (effective != null && !sqlList.isEmpty()) {
+ for (FlinkSql sql : sqlList) {
+ if (sql.getId().equals(effective.getId())) {
+ sql.setEffective(true);
+ break;
+ }
}
}
-
return sqlList;
}
@@ -164,12 +155,6 @@ public void rollback(Application application) {
//检查并备份当前的任务.
FlinkSql effectiveSql = getEffective(application.getId(), false);
assert effectiveSql != null;
- if (!isFlinkSqlBacked(effectiveSql)) {
- log.info("current job version:{}, Backing up...", sql.getVersion());
- backUpService.backup(application);
- } else {
- log.info("current job version:{}, already backed", sql.getVersion());
- }
//回滚历史版本的任务
backUpService.rollbackFlinkSql(application, sql);
} catch (Exception e) {
diff --git a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/task/FlinkTrackingTask.java b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/task/FlinkTrackingTask.java
index be1d412179..7a861df85b 100644
--- a/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/task/FlinkTrackingTask.java
+++ b/streamx-console/streamx-console-service/src/main/java/com/streamxhub/streamx/console/core/task/FlinkTrackingTask.java
@@ -141,14 +141,10 @@ public class FlinkTrackingTask {
private static final Map OPTIONING = new ConcurrentHashMap<>();
- private static final Long STARTING_INTERVAL = 1000L * 30;
-
private Long lastTrackTime = 0L;
private Long lastOptionTime = 0L;
- private static Long optioningTime = 0L;
-
private static final Byte DEFAULT_FLAG_BYTE = Byte.valueOf("0");
private static final ExecutorService EXECUTOR = new ThreadPoolExecutor(
@@ -200,8 +196,7 @@ public void execute() {
}
private void tracking() {
- Long now = System.currentTimeMillis();
- lastTrackTime = now;
+ lastTrackTime = System.currentTimeMillis();
TRACKING_MAP.entrySet().stream()
.filter(trkElement -> !isKubernetesMode(trkElement.getValue().getExecutionMode()))
.forEach(trkElement -> EXECUTOR.execute(() -> {
@@ -222,11 +217,8 @@ private void tracking() {
* 3) 从flink的restAPI和yarn的restAPI都查询失败
* 此时需要根据管理端正在操作的状态来决定是否返回最终状态,需满足:
* 1: 操作状态为为取消和正常的状态跟踪(操作状态不为STARTING)
- * 2: 如果操作状态为STARTING,则需要判断操作间隔是否在30秒之内(启动可能需要时间,这里给足够多的时间去完成启动)
*/
- if (optionState == null
- || !optionState.equals(OptionState.STARTING)
- || now - optioningTime >= STARTING_INTERVAL) {
+ if (optionState == null || !optionState.equals(OptionState.STARTING)) {
//非正在手动映射appId
if (application.getState() != FlinkAppState.MAPPING.getValue()) {
log.error("flinkTrackingTask getFromFlinkRestApi and getFromYarnRestApi error,job failed,savePoint obsoleted!");
@@ -338,7 +330,7 @@ private void handleJobOverview(Application application, JobsOverview.Job jobOver
FlinkEnv flinkEnv = getFlinkEnvCache(application);
FlinkCluster flinkCluster = getFlinkClusterCache(application);
Overview override = application.httpOverview(flinkEnv, flinkCluster);
- if (override.getSlotsTotal() > 0) {
+ if (override != null && override.getSlotsTotal() > 0) {
STARTING_CACHE.invalidate(application.getId());
application.setTotalTM(override.getTaskmanagers());
application.setTotalSlot(override.getSlotsTotal());
@@ -524,7 +516,9 @@ private void getFromYarnRestApi(Application application, StopFrom stopFrom) thro
// 2)到yarn的restApi中查询状态
AppInfo appInfo = application.httpYarnAppInfo();
if (appInfo == null) {
- throw new RuntimeException("flinkTrackingTask getFromYarnRestApi failed ");
+ if (!ExecutionMode.REMOTE.equals(application.getExecutionModeEnum())) {
+ throw new RuntimeException("flinkTrackingTask getFromYarnRestApi failed ");
+ }
} else {
try {
String state = appInfo.getApp().getFinalStatus();
@@ -556,7 +550,9 @@ private void getFromYarnRestApi(Application application, StopFrom stopFrom) thro
}
}
} catch (Exception e) {
- throw new RuntimeException("flinkTrackingTask getFromYarnRestApi error,", e);
+ if (!ExecutionMode.REMOTE.equals(application.getExecutionModeEnum())) {
+ throw new RuntimeException("flinkTrackingTask getFromYarnRestApi error,", e);
+ }
}
}
}
@@ -613,7 +609,7 @@ public static void setOptionState(Long appId, OptionState state) {
return;
}
log.info("flinkTrackingTask setOptioning");
- optioningTime = System.currentTimeMillis();
+ Long optioningTime = System.currentTimeMillis();
OPTIONING.put(appId, state);
//从streamx停止
if (state.equals(OptionState.CANCELLING)) {
diff --git a/streamx-console/streamx-console-service/src/main/resources/email.html b/streamx-console/streamx-console-service/src/main/resources/email.html
index e01afbd139..66a28a9ca6 100644
--- a/streamx-console/streamx-console-service/src/main/resources/email.html
+++ b/streamx-console/streamx-console-service/src/main/resources/email.html
@@ -1,7 +1,7 @@
- Push Email
+ StreamX Email