Skip to content

Commit

Permalink
[Optimization-2655][core] Optimize data preview in data studio
Browse files Browse the repository at this point in the history
  • Loading branch information
aiwenmo committed Dec 15, 2023
1 parent eb0b571 commit 2663f3b
Show file tree
Hide file tree
Showing 10 changed files with 15 additions and 55 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import org.dinky.data.result.Result;
import org.dinky.data.result.SelectResult;
import org.dinky.explainer.lineage.LineageResult;
import org.dinky.metadata.result.JdbcSelectResult;
import org.dinky.service.StudioService;

import java.util.List;
Expand Down Expand Up @@ -90,19 +89,6 @@ public Result<SelectResult> getJobData(@RequestParam String jobId) {
return Result.succeed(studioService.getJobData(jobId));
}

/** 根据jobId获取数据 */
@GetMapping("/getCommonSqlData")
@ApiOperation("Get Common Sql Data")
@ApiImplicitParam(
name = "taskId",
value = "Get Common Sql Data",
required = true,
dataType = "Integer",
paramType = "query")
public Result<JdbcSelectResult> getJobData(@RequestParam Integer taskId) {
return Result.succeed(studioService.getCommonSqlData(taskId));
}

/** 获取单任务实例的血缘分析 */
@PostMapping("/getLineage")
@ApiOperation("Get Job Lineage")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.dinky.data.result.IResult;
import org.dinky.data.result.SelectResult;
import org.dinky.explainer.lineage.LineageResult;
import org.dinky.metadata.result.JdbcSelectResult;

import java.util.List;

Expand All @@ -43,8 +42,6 @@ public interface StudioService {

IResult executeDDL(StudioDDLDTO studioDDLDTO);

JdbcSelectResult getCommonSqlData(Integer taskId);

SelectResult getJobData(String jobId);

LineageResult getLineage(StudioLineageDTO studioCADTO);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,7 @@ private String buildRemoteEnvironmentAddress(Integer id) {
}

private String buildLocalEnvironmentAddress(int port) {
String host = IpUtils.getHostIp();
return host + ":" + port;
return "0.0.0.0:" + port;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import org.dinky.data.model.Table;
import org.dinky.data.result.DDLResult;
import org.dinky.data.result.IResult;
import org.dinky.data.result.ResultPool;
import org.dinky.data.result.SelectResult;
import org.dinky.executor.CustomTableEnvironment;
import org.dinky.explainer.lineage.LineageBuilder;
Expand All @@ -42,7 +41,6 @@
import org.dinky.job.JobConfig;
import org.dinky.job.JobManager;
import org.dinky.metadata.driver.Driver;
import org.dinky.metadata.result.JdbcSelectResult;
import org.dinky.service.ClusterInstanceService;
import org.dinky.service.DataBaseService;
import org.dinky.service.StudioService;
Expand Down Expand Up @@ -87,11 +85,6 @@ private IResult executeMSFlinkSql(StudioMetaStoreDTO studioMetaStoreDTO) {
return jobResult;
}

@Override
public JdbcSelectResult getCommonSqlData(Integer taskId) {
return (JdbcSelectResult) ResultPool.getCommonSqlCache(taskId);
}

@Override
public IResult executeDDL(StudioDDLDTO studioDDLDTO) {
JobConfig config = studioDDLDTO.getJobConfig();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,7 @@ public JobResult executeJob(TaskDTO task) throws Exception {
return jobResult;
}

// Submit and export task
@ProcessStep(type = ProcessStepType.SUBMIT_BUILD_CONFIG)
public JobConfig buildJobSubmitConfig(TaskDTO task) {
task.setStatement(buildEnvSql(task) + task.getStatement());
Expand Down Expand Up @@ -234,6 +235,7 @@ public JobConfig buildJobSubmitConfig(TaskDTO task) {
return config;
}

// Savepoint and cancel task
@ProcessStep(type = ProcessStepType.SUBMIT_BUILD_CONFIG)
public JobConfig buildJobConfig(TaskDTO task) {
JobConfig config = task.getJobConfig();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.dinky.data.annotations.SupportDialect;
import org.dinky.data.dto.SqlDTO;
import org.dinky.data.dto.TaskDTO;
import org.dinky.data.result.ResultPool;
import org.dinky.data.result.SqlExplainResult;
import org.dinky.job.JobResult;
import org.dinky.service.DataBaseService;
Expand Down Expand Up @@ -65,7 +64,6 @@ public JobResult execute() {
SqlDTO sqlDTO = SqlDTO.build(task.getStatement(), task.getDatabaseId(), null);
DataBaseService dataBaseService = SpringUtil.getBean(DataBaseService.class);
JobResult jobResult = dataBaseService.executeCommonSql(sqlDTO);
ResultPool.putCommonSqlCache(task.getId(), jobResult.getResult());
return jobResult;
}

Expand Down
10 changes: 0 additions & 10 deletions dinky-core/src/main/java/org/dinky/data/result/ResultPool.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,6 @@ public final class ResultPool {

private ResultPool() {}

private static final Cache<Integer, IResult> COMMON_SQL_SEARCH_CACHE =
new TimedCache<>(TimeUnit.MINUTES.toMillis(10));
private static final Cache<String, SelectResult> results = new TimedCache<>(TimeUnit.MINUTES.toMillis(10));

public static boolean containsKey(String key) {
Expand All @@ -45,14 +43,6 @@ public static void put(SelectResult result) {
results.put(result.getJobId(), result);
}

public static void putCommonSqlCache(Integer taskId, IResult result) {
COMMON_SQL_SEARCH_CACHE.put(taskId, result);
}

public static IResult getCommonSqlCache(Integer taskId) {
return COMMON_SQL_SEARCH_CACHE.get(taskId);
}

public static SelectResult get(String key) {
if (containsKey(key)) {
return results.get(key);
Expand Down
10 changes: 5 additions & 5 deletions dinky-core/src/main/java/org/dinky/job/Job.java
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,11 @@ public class Job {

@Getter
public enum JobStatus {
INITIALIZE(1),
RUNNING(2),
SUCCESS(3),
FAILED(4),
CANCEL(5);
INITIALIZE(0),
RUNNING(1),
SUCCESS(2),
FAILED(3),
CANCEL(4);
final int code;

JobStatus(int code) {
Expand Down
13 changes: 2 additions & 11 deletions dinky-web/src/pages/DataStudio/BottomContainer/Result/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -130,24 +130,15 @@ const Result = (props: any) => {
if (consoleData.result && !isRefresh) {
setData(consoleData.result);
} else {
if (isSql(current.dialect)) {
// common sql
const res = await handleGetOption('api/studio/getCommonSqlData', l('global.getdata.tips'), {
taskId: params.taskId
});
if (res.data) {
consoleData.result = res.data;
setData(res.data);
}
} else {
if (!isSql(current.dialect)) {
// flink sql
// to do: get job data by history id list, not flink jid
if (current.id) {
const res = await handleGetOptionWithoutMsg(API_CONSTANTS.GET_LATEST_HISTORY_BY_ID, {
id: current.id
});
const historyData = res.data;
if (historyData && '2' == historyData.status) {
if (historyData) {
const historyId = historyData.id;
const tableData = await handleGetOption(
'api/studio/getJobData',
Expand Down
6 changes: 5 additions & 1 deletion dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import {
changeTaskLife,
debugTask,
executeSql,
getJobPlan
getJobPlan, isSql
} from '@/pages/DataStudio/HeaderContainer/service';
import { DataStudioTabsItemType, StateType, TabsPageType, VIEW } from '@/pages/DataStudio/model';
import { JOB_LIFE_CYCLE, JOB_STATUS } from '@/pages/DevOps/constants';
Expand Down Expand Up @@ -177,6 +177,10 @@ const HeaderContainer = (props: any) => {
});
await SuccessMessageAsync(l('pages.datastudio.editor.exec.success'));
currentData.status = JOB_STATUS.RUNNING;
// Common sql task is synchronized, so it needs to automatically update the status to finished.
if(isSql(currentData.dialect)){
currentData.status = JOB_STATUS.FINISHED;
}
if (currentTab) currentTab.console.result = res.data.result;
saveTabs({ ...props.tabs });
};
Expand Down

0 comments on commit 2663f3b

Please sign in to comment.