diff --git a/.idea/icon.svg b/.idea/icon.svg deleted file mode 100644 index b4d5df8343..0000000000 --- a/.idea/icon.svg +++ /dev/null @@ -1,1400 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/.idea/vcs.xml b/.idea/vcs.xml index a6b68d70dc..35eb1ddfbb 100644 --- a/.idea/vcs.xml +++ b/.idea/vcs.xml @@ -1,17 +1,6 @@ - - - - \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000000..07efc4682f --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,87 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Dinky is a real-time data development platform based on Apache Flink, enabling agile data development, deployment and operation. It's a full-stack application with a Java/Spring Boot backend and React/TypeScript frontend. + +## Build and Development Commands + +### Backend (Java/Maven) +- **Build**: `./mvnw clean package -Dmaven.test.skip=true -P aliyun,prod,web,flink-1.14` +- **Build script**: `./build.sh` (uses Flink 1.14 profile by default) +- **Test**: `./mvnw test` +- **Format code**: `./mvnw spotless:apply` +- **Check code style**: `./mvnw spotless:check` + +### Frontend (React/TypeScript) +Navigate to `dinky-web/` directory: +- **Development server**: `npm run dev` or `npm run start:dev` +- **Build**: `npm run build` +- **Lint**: `npm run lint` +- **Format**: `npm run prettier` +- **Type check**: `npm run tsc` + +### Profiles and Configurations +- **Flink versions**: Supports Flink 1.14-1.19 via Maven profiles (`flink-1.14`, `flink-1.15`, etc.) +- **Environment**: Use `-P dev` for development (compile scope) or `-P prod` for production (provided scope) +- **Repositories**: Default uses Aliyun mirror (`-P aliyun`), can switch to Maven Central (`-P maven-central`) + +## Code Architecture + +### Backend Structure +- **dinky-admin**: Main Spring Boot application entry point (`org.dinky.Dinky`) +- **dinky-core**: Core execution engine and Flink integration +- **dinky-flink**: Flink version-specific implementations (1.14-1.19) +- **dinky-gateway**: Gateway and cluster management +- **dinky-metadata**: Database metadata providers (MySQL, PostgreSQL, ClickHouse, etc.) +- **dinky-cdc**: Change Data Capture functionality +- **dinky-alert**: Alert system with multiple providers (DingTalk, WeChat, Email, etc.) +- **dinky-web**: React frontend application + +### Frontend Structure +- **DataStudio**: Main FlinkSQL development interface with editor, console, and results +- **DevOps**: Job monitoring, metrics, and operations +- **RegCenter**: Registration center for clusters, datasources, UDFs, etc. +- **AuthCenter**: User management, roles, and permissions +- **SettingCenter**: System configuration and settings + +### Key Components +- **Executor Framework**: Abstracts different Flink execution modes (Local, Standalone, Yarn, Kubernetes) +- **Multi-version Support**: Supports multiple Flink versions through modular architecture +- **SQL Enhancement**: Extends FlinkSQL with custom statements (CDC, variables, etc.) +- **Catalog Integration**: Supports various data catalogs and metadata discovery + +## Development Notes + +### Multi-module Maven Project +- Root POM manages all module dependencies and versions +- Each Flink version has its own module for compatibility +- Uses dependency management for consistent versioning across modules + +### Frontend Technology Stack +- **Framework**: React 18 + TypeScript + UMI 4 +- **UI Library**: Ant Design + Pro Components +- **Editor**: Monaco Editor for SQL development +- **Charts**: ECharts, G2, Ant Design Charts +- **State Management**: Built-in UMI models + +### Code Quality +- **Java**: Uses Spotless with Palantir Java format +- **Frontend**: ESLint + Prettier configuration +- **License**: All files must include Apache 2.0 license header + +## Testing + +- **Backend**: JUnit 5 + Mockito for unit tests +- **Integration**: TestContainers for database testing +- **Frontend**: Jest configuration available + +## Database Support + +Supports multiple databases with dedicated metadata modules: +- MySQL, PostgreSQL (primary) +- ClickHouse, Doris, StarRocks +- Oracle, SQL Server +- H2 (for testing/development) \ No newline at end of file diff --git a/dinky-admin/pom.xml b/dinky-admin/pom.xml index e5868759ea..99e603df93 100644 --- a/dinky-admin/pom.xml +++ b/dinky-admin/pom.xml @@ -36,6 +36,10 @@ + + + + org.xerial sqlite-jdbc @@ -183,28 +187,28 @@ org.springframework.boot spring-boot-starter-actuator - - org.dinky - dinky-gateway - - - org.dinky - dinky-core - - - cn.hutool - hutool-crypto - - - cn.hutool - hutool-http - - - cn.hutool - hutool-json - - - + + + + + + + + + + + + + + + + + + + + + + org.dinky dinky-daemon @@ -225,10 +229,10 @@ org.dinky dinky-alert-base - - org.dinky - dinky-client-base - + + + + org.dinky dinky-client-hadoop diff --git a/dinky-admin/src/main/java/org/dinky/context/SqlGatewayWsContext.java b/dinky-admin/src/main/java/org/dinky/context/SqlGatewayWsContext.java index fe1d142a53..c0639f754d 100644 --- a/dinky-admin/src/main/java/org/dinky/context/SqlGatewayWsContext.java +++ b/dinky-admin/src/main/java/org/dinky/context/SqlGatewayWsContext.java @@ -18,242 +18,242 @@ */ package org.dinky.context; - -import org.dinky.crypto.CryptoComponent; -import org.dinky.data.model.FragmentVariable; -import org.dinky.executor.VariableManager; -import org.dinky.gateway.SqlCliMode; -import org.dinky.gateway.SqlClientOptions; -import org.dinky.gateway.sqlgateway.cli.SqlClientAdapter; -import org.dinky.utils.CloseUtil; -import org.dinky.utils.FragmentVariableUtils; -import org.dinky.utils.JsonUtils; -import org.dinky.utils.LogUtil; -import org.dinky.utils.SqlUtil; - -import java.io.IOException; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.nio.ByteBuffer; -import java.sql.SQLException; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; - -import javax.websocket.OnClose; -import javax.websocket.OnMessage; -import javax.websocket.OnOpen; -import javax.websocket.Session; -import javax.websocket.server.ServerEndpoint; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Component; - -import cn.hutool.core.thread.ThreadUtil; -import cn.hutool.db.Db; -import cn.hutool.db.Entity; -import cn.hutool.db.ds.simple.SimpleDataSource; -import lombok.extern.slf4j.Slf4j; - -@Component -@Slf4j -@ServerEndpoint("/ws/sql-gateway/") -public class SqlGatewayWsContext { - - private Session session; - - private SqlClientAdapter client; - - private PipedInputStream in2web; - - private long lastHeartTime = System.currentTimeMillis(); - private volatile boolean isRunning = true; - - private static String url; - private static String username; - private static String password; - private static Db db; - - private static CryptoComponent cryptoComponent; - - /** - * 最大化减少线程占用,默认线程为0,无最大限制,不保持线程,任务直接提交给线程 - * */ - private static final ExecutorService executor = ThreadUtil.newExecutor(); - - private void startClient(SqlClientOptions options) { - try { - PipedInputStream in2client = new PipedInputStream(); - - in2web = new PipedInputStream(); - PipedOutputStream clientWrite2web = new PipedOutputStream(in2web); - clientWrite2web.write("Dinky Sql Client\n".getBytes()); - - client = new SqlClientAdapter(in2client, clientWrite2web, options); - - executor.execute(() -> { - try { - log.info("Sql Client Start : " + options.getConnectAddress()); - client.startClient(); - } catch (Exception e) { - sendError(e); - } - }); - executor.execute(() -> { - while (isRunning) { - try { - int data; - byte[] bytes = new byte[1024]; - while ((data = in2web.read(bytes)) != -1) { - session.getBasicRemote().sendBinary(ByteBuffer.wrap(bytes, 0, data)); - } - log.info("Sql Client Read Terminal Thread Closed :" + options.getConnectAddress()); - onClose(); - } catch (IOException e) { - log.error("sql client receive error", e); - try { - Thread.sleep(1000); - } catch (InterruptedException interruptedException) { - log.error("Sql Client Thread Interrupted Error: ", e); - } - } - } - }); - } catch (Exception e) { - sendError(e); - } - log.info("Sql Client Start Success : " + options.getConnectAddress()); - } - - private void sendError(Throwable err) { - try { - log.error("send error to client", err); - ByteBuffer byteBuffer = ByteBuffer.wrap(LogUtil.getError(err).getBytes()); - session.getBasicRemote().sendBinary(byteBuffer); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @OnOpen - public void onOpen(Session session) throws UnsupportedEncodingException, SQLException { - this.session = session; - - String cols = getParameter("cols", true); - String rows = getParameter("rows", true); - SqlClientOptions.TerminalSize size = - new SqlClientOptions.TerminalSize(Integer.parseInt(cols), Integer.parseInt(rows)); - - if (db == null) { - db = Db.use(new SimpleDataSource(url, username, password)); - } - Entity option = Entity.create("dinky_fragment").set("enabled", true); - List entities = db.find(option, FragmentVariable.class); - Map variableMap = new LinkedHashMap<>(); - if (entities != null) { - for (FragmentVariable variable : entities) { - if (FragmentVariableUtils.isSensitive(variable.getName()) && variable.getFragmentValue() != null) { - variableMap.put(variable.getName(), cryptoComponent.decryptText(variable.getFragmentValue())); - } else { - variableMap.put(variable.getName(), variable.getFragmentValue()); - } - } - } - VariableManager variableManager = new VariableManager(); - variableManager.registerVariable(variableMap); - String initSql = URLDecoder.decode(getParameter("initSql"), "UTF-8"); - initSql = SqlUtil.removeNote(initSql); - initSql = variableManager.parseVariable(initSql); - - SqlClientOptions options = SqlClientOptions.builder() - .mode(SqlCliMode.fromString(getParameter("mode", true))) - .sessionId(getParameter("sessionId")) - .connectAddress(getParameter("connectAddress", true)) - .initSql(initSql) - .historyFilePath("./tmp/flink-sql-history/history") - .terminalSize(size) - .build(); - - startClient(options); - - executor.execute(() -> { - while (isRunning) { - try { - Thread.sleep(1000); - if (System.currentTimeMillis() - lastHeartTime > 1000 * 60) { - onClose(); - } - } catch (Exception e) { - log.error("SQl Client Heart Thread Error: ", e); - } - } - log.info("Sql Client Heart Thread Closed :"); - }); - } - - @OnClose - public void onClose() { - isRunning = false; - CloseUtil.closeNoErrorPrint(client, in2web, session); - } - - @OnMessage - public void onMessage(String messages) { - SqlClientAdapter.WsEvent wsEvent = JsonUtils.parseObject(messages, SqlClientAdapter.WsEvent.class); - if (wsEvent == null) { - throw new RuntimeException("parse wsEvent error"); - } else { - SqlClientAdapter.WsEvent.EventType eventType = - SqlClientAdapter.WsEvent.EventType.getEventType(wsEvent.getType()); - if (eventType == SqlClientAdapter.WsEvent.EventType.TERM_HEART_EVENT) { - lastHeartTime = System.currentTimeMillis(); - } else { - try { - client.onMessage(wsEvent, eventType); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - } - - private String getParameter(String key) { - return getParameter(key, false); - } - - private String getParameter(String key, boolean required) { - List list = session.getRequestParameterMap().get(key); - if (list == null || list.size() == 0) { - if (required) { - throw new RuntimeException("parameter " + key + " is required"); - } else { - return ""; - } - } - return list.get(0); - } - - @Autowired - public void setCryptoComponent(CryptoComponent cryptoComponent) { - SqlGatewayWsContext.cryptoComponent = cryptoComponent; - } - - @Value("${spring.datasource.url}") - public void setUrl(String url) { - SqlGatewayWsContext.url = url; - } - - @Value("${spring.datasource.username}") - public void setUsername(String username) { - SqlGatewayWsContext.username = username; - } - - @Value("${spring.datasource.password}") - public void setPassword(String password) { - SqlGatewayWsContext.password = password; - } -} +// +//import org.dinky.crypto.CryptoComponent; +//import org.dinky.data.model.FragmentVariable; +//import org.dinky.executor.VariableManager; +//import org.dinky.gateway.SqlCliMode; +//import org.dinky.gateway.SqlClientOptions; +//import org.dinky.gateway.sqlgateway.cli.SqlClientAdapter; +//import org.dinky.utils.CloseUtil; +//import org.dinky.utils.FragmentVariableUtils; +//import org.dinky.utils.JsonUtils; +//import org.dinky.utils.LogUtil; +//import org.dinky.utils.SqlUtil; +// +//import java.io.IOException; +//import java.io.PipedInputStream; +//import java.io.PipedOutputStream; +//import java.io.UnsupportedEncodingException; +//import java.net.URLDecoder; +//import java.nio.ByteBuffer; +//import java.sql.SQLException; +//import java.util.LinkedHashMap; +//import java.util.List; +//import java.util.Map; +//import java.util.concurrent.ExecutorService; +// +//import javax.websocket.OnClose; +//import javax.websocket.OnMessage; +//import javax.websocket.OnOpen; +//import javax.websocket.Session; +//import javax.websocket.server.ServerEndpoint; +// +//import org.springframework.beans.factory.annotation.Autowired; +//import org.springframework.beans.factory.annotation.Value; +//import org.springframework.stereotype.Component; +// +//import cn.hutool.core.thread.ThreadUtil; +//import cn.hutool.db.Db; +//import cn.hutool.db.Entity; +//import cn.hutool.db.ds.simple.SimpleDataSource; +//import lombok.extern.slf4j.Slf4j; +// +//@Component +//@Slf4j +//@ServerEndpoint("/ws/sql-gateway/") +//public class SqlGatewayWsContext { +// +// private Session session; +// +// private SqlClientAdapter client; +// +// private PipedInputStream in2web; +// +// private long lastHeartTime = System.currentTimeMillis(); +// private volatile boolean isRunning = true; +// +// private static String url; +// private static String username; +// private static String password; +// private static Db db; +// +// private static CryptoComponent cryptoComponent; +// +// /** +// * 最大化减少线程占用,默认线程为0,无最大限制,不保持线程,任务直接提交给线程 +// * */ +// private static final ExecutorService executor = ThreadUtil.newExecutor(); +// +// private void startClient(SqlClientOptions options) { +// try { +// PipedInputStream in2client = new PipedInputStream(); +// +// in2web = new PipedInputStream(); +// PipedOutputStream clientWrite2web = new PipedOutputStream(in2web); +// clientWrite2web.write("Dinky Sql Client\n".getBytes()); +// +// client = new SqlClientAdapter(in2client, clientWrite2web, options); +// +// executor.execute(() -> { +// try { +// log.info("Sql Client Start : " + options.getConnectAddress()); +// client.startClient(); +// } catch (Exception e) { +// sendError(e); +// } +// }); +// executor.execute(() -> { +// while (isRunning) { +// try { +// int data; +// byte[] bytes = new byte[1024]; +// while ((data = in2web.read(bytes)) != -1) { +// session.getBasicRemote().sendBinary(ByteBuffer.wrap(bytes, 0, data)); +// } +// log.info("Sql Client Read Terminal Thread Closed :" + options.getConnectAddress()); +// onClose(); +// } catch (IOException e) { +// log.error("sql client receive error", e); +// try { +// Thread.sleep(1000); +// } catch (InterruptedException interruptedException) { +// log.error("Sql Client Thread Interrupted Error: ", e); +// } +// } +// } +// }); +// } catch (Exception e) { +// sendError(e); +// } +// log.info("Sql Client Start Success : " + options.getConnectAddress()); +// } +// +// private void sendError(Throwable err) { +// try { +// log.error("send error to client", err); +// ByteBuffer byteBuffer = ByteBuffer.wrap(LogUtil.getError(err).getBytes()); +// session.getBasicRemote().sendBinary(byteBuffer); +// } catch (IOException e) { +// throw new RuntimeException(e); +// } +// } +// +// @OnOpen +// public void onOpen(Session session) throws UnsupportedEncodingException, SQLException { +// this.session = session; +// +// String cols = getParameter("cols", true); +// String rows = getParameter("rows", true); +// SqlClientOptions.TerminalSize size = +// new SqlClientOptions.TerminalSize(Integer.parseInt(cols), Integer.parseInt(rows)); +// +// if (db == null) { +// db = Db.use(new SimpleDataSource(url, username, password)); +// } +// Entity option = Entity.create("dinky_fragment").set("enabled", true); +// List entities = db.find(option, FragmentVariable.class); +// Map variableMap = new LinkedHashMap<>(); +// if (entities != null) { +// for (FragmentVariable variable : entities) { +// if (FragmentVariableUtils.isSensitive(variable.getName()) && variable.getFragmentValue() != null) { +// variableMap.put(variable.getName(), cryptoComponent.decryptText(variable.getFragmentValue())); +// } else { +// variableMap.put(variable.getName(), variable.getFragmentValue()); +// } +// } +// } +// VariableManager variableManager = new VariableManager(); +// variableManager.registerVariable(variableMap); +// String initSql = URLDecoder.decode(getParameter("initSql"), "UTF-8"); +// initSql = SqlUtil.removeNote(initSql); +// initSql = variableManager.parseVariable(initSql); +// +// SqlClientOptions options = SqlClientOptions.builder() +// .mode(SqlCliMode.fromString(getParameter("mode", true))) +// .sessionId(getParameter("sessionId")) +// .connectAddress(getParameter("connectAddress", true)) +// .initSql(initSql) +// .historyFilePath("./tmp/flink-sql-history/history") +// .terminalSize(size) +// .build(); +// +// startClient(options); +// +// executor.execute(() -> { +// while (isRunning) { +// try { +// Thread.sleep(1000); +// if (System.currentTimeMillis() - lastHeartTime > 1000 * 60) { +// onClose(); +// } +// } catch (Exception e) { +// log.error("SQl Client Heart Thread Error: ", e); +// } +// } +// log.info("Sql Client Heart Thread Closed :"); +// }); +// } +// +// @OnClose +// public void onClose() { +// isRunning = false; +// CloseUtil.closeNoErrorPrint(client, in2web, session); +// } +// +// @OnMessage +// public void onMessage(String messages) { +// SqlClientAdapter.WsEvent wsEvent = JsonUtils.parseObject(messages, SqlClientAdapter.WsEvent.class); +// if (wsEvent == null) { +// throw new RuntimeException("parse wsEvent error"); +// } else { +// SqlClientAdapter.WsEvent.EventType eventType = +// SqlClientAdapter.WsEvent.EventType.getEventType(wsEvent.getType()); +// if (eventType == SqlClientAdapter.WsEvent.EventType.TERM_HEART_EVENT) { +// lastHeartTime = System.currentTimeMillis(); +// } else { +// try { +// client.onMessage(wsEvent, eventType); +// } catch (IOException e) { +// throw new RuntimeException(e); +// } +// } +// } +// } +// +// private String getParameter(String key) { +// return getParameter(key, false); +// } +// +// private String getParameter(String key, boolean required) { +// List list = session.getRequestParameterMap().get(key); +// if (list == null || list.size() == 0) { +// if (required) { +// throw new RuntimeException("parameter " + key + " is required"); +// } else { +// return ""; +// } +// } +// return list.get(0); +// } +// +// @Autowired +// public void setCryptoComponent(CryptoComponent cryptoComponent) { +// SqlGatewayWsContext.cryptoComponent = cryptoComponent; +// } +// +// @Value("${spring.datasource.url}") +// public void setUrl(String url) { +// SqlGatewayWsContext.url = url; +// } +// +// @Value("${spring.datasource.username}") +// public void setUsername(String username) { +// SqlGatewayWsContext.username = username; +// } +// +// @Value("${spring.datasource.password}") +// public void setPassword(String password) { +// SqlGatewayWsContext.password = password; +// } +//} diff --git a/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java b/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java index 65794a7fb3..023bf04daa 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java @@ -25,7 +25,6 @@ import org.dinky.data.model.FlinkUdfManifest; import org.dinky.function.constant.PathConstant; import org.dinky.function.util.ZipWriter; -import org.dinky.resource.BaseResourceManager; import java.io.File; import java.io.InputStream; @@ -118,7 +117,8 @@ public void downloadAppJar(@PathVariable String version, HttpServletResponse res @GetMapping("downloadFromRs") @ApiOperation("Download From Resource") public void downloadJavaUDF(String path, HttpServletResponse resp) { - InputStream inputStream = BaseResourceManager.getInstance().readFile(path); - ServletUtil.write(resp, inputStream); + // TODO: 2024/3/31 + // InputStream inputStream = BaseResourceManager.getInstance().readFile(path); + // ServletUtil.write(resp, inputStream); } } diff --git a/dinky-admin/src/main/java/org/dinky/controller/FlinkController.java b/dinky-admin/src/main/java/org/dinky/controller/FlinkController.java index f50e2178f2..cfcdd96d35 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/FlinkController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/FlinkController.java @@ -22,7 +22,8 @@ import org.dinky.data.model.CheckPointReadTable; import org.dinky.data.result.Result; import org.dinky.data.vo.CascaderVO; -import org.dinky.flink.checkpoint.CheckpointRead; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.service.FlinkService; import java.util.List; @@ -46,13 +47,12 @@ @RequiredArgsConstructor public class FlinkController { - protected static final CheckpointRead INSTANCE = new CheckpointRead(); private final FlinkService flinkService; @GetMapping("/readCheckPoint") @ApiOperation("Read Checkpoint") public Result>> readCheckPoint(String path, String operatorId) { - return Result.data(INSTANCE.readCheckpoint(path, operatorId)); + return Result.data(JobManager.build(new JobConfig()).readCheckpoint(path, operatorId)); } @GetMapping("/configOptions") diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 0a41af36d7..c088f35a68 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -23,7 +23,8 @@ import org.dinky.data.result.Result; import org.dinky.function.constant.PathConstant; import org.dinky.function.data.model.UDF; -import org.dinky.function.util.UDFUtil; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.service.TaskService; import org.apache.flink.table.catalog.FunctionLanguage; @@ -70,7 +71,7 @@ public Result>> generateJar() { FunctionLanguage.valueOf(task.getDialect().toUpperCase())) .build()) .collect(Collectors.toList()); - Map> resultMap = UDFUtil.buildJar(udfCodes); + Map> resultMap = JobManager.build(new JobConfig()).buildJar(udfCodes); String msg = StrUtil.format( "udf jar生成成功,jar文件在{};\n本次成功 class:{}。\n失败 class:{}", PathConstant.UDF_JAR_TMP_PATH, diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java index cfb419983d..c4d275f37e 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java @@ -21,6 +21,7 @@ import org.dinky.data.annotations.ProcessId; import org.dinky.data.annotations.TaskId; +import org.dinky.data.model.SystemConfiguration; import org.dinky.data.model.Task; import org.dinky.data.model.alert.AlertGroup; import org.dinky.data.model.ext.TaskExtConfig; @@ -249,7 +250,7 @@ public JobConfig getJobConfig() { jobConfig.setUdfRefer(udfRefers); jobConfig.setTaskId(id); jobConfig.setJobName(name); - + jobConfig.setSystemConfiguration(SystemConfiguration.getInstances()); return jobConfig; } diff --git a/dinky-admin/src/main/java/org/dinky/init/SystemInit.java b/dinky-admin/src/main/java/org/dinky/init/SystemInit.java index 68c9936b1e..c1d55a4afd 100644 --- a/dinky-admin/src/main/java/org/dinky/init/SystemInit.java +++ b/dinky-admin/src/main/java/org/dinky/init/SystemInit.java @@ -33,10 +33,10 @@ import org.dinky.data.model.job.JobInstance; import org.dinky.data.model.rbac.Tenant; import org.dinky.function.constant.PathConstant; -import org.dinky.function.pool.UdfCodePool; import org.dinky.job.ClearJobHistoryTask; import org.dinky.job.FlinkJobTask; -import org.dinky.resource.BaseResourceManager; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.scheduler.client.ProjectClient; import org.dinky.scheduler.exception.SchedulerException; import org.dinky.scheduler.model.Project; @@ -132,7 +132,7 @@ private void initResources() { if (Boolean.TRUE.equals( systemConfiguration.getResourcesEnable().getValue())) { try { - BaseResourceManager.initResourceManager(); + JobManager.build(new JobConfig()).initResourceManager(systemConfiguration); } catch (Exception e) { log.error("Init resource error: ", e); } @@ -205,9 +205,10 @@ public static Project getProject() { public void registerUDF() { List allUDF = taskService.getReleaseUDF(); if (CollUtil.isNotEmpty(allUDF)) { - UdfCodePool.registerPool(allUDF.stream().map(UDFUtils::taskToUDF).collect(Collectors.toList())); + JobManager.build(new JobConfig()) + .registerPool(allUDF.stream().map(UDFUtils::taskToUDF).collect(Collectors.toList())); } - UdfCodePool.updateGitPool(gitProjectService.getGitPool()); + JobManager.build(new JobConfig()).updateGitPool(gitProjectService.getGitPool()); } public void updateGitBuildState() { diff --git a/dinky-admin/src/main/java/org/dinky/interceptor/TenantInterceptor.java b/dinky-admin/src/main/java/org/dinky/interceptor/TenantInterceptor.java index 330509e29f..5a7ec9a8cd 100644 --- a/dinky-admin/src/main/java/org/dinky/interceptor/TenantInterceptor.java +++ b/dinky-admin/src/main/java/org/dinky/interceptor/TenantInterceptor.java @@ -34,8 +34,8 @@ import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import javax.validation.constraints.NotNull; -import org.jetbrains.annotations.NotNull; import org.springframework.web.servlet.AsyncHandlerInterceptor; import cn.dev33.satoken.SaManager; diff --git a/dinky-admin/src/main/java/org/dinky/job/JobManager.java b/dinky-admin/src/main/java/org/dinky/job/JobManager.java new file mode 100644 index 0000000000..c8d723125e --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/job/JobManager.java @@ -0,0 +1,460 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.job; + +import org.dinky.cluster.FlinkClusterInfo; +import org.dinky.data.annotations.ProcessStep; +import org.dinky.data.enums.JobStatus; +import org.dinky.data.enums.ProcessStepType; +import org.dinky.data.model.Catalog; +import org.dinky.data.model.CheckPointReadTable; +import org.dinky.data.model.Column; +import org.dinky.data.model.ResourcesVO; +import org.dinky.data.model.Schema; +import org.dinky.data.model.SystemConfiguration; +import org.dinky.data.model.Table; +import org.dinky.data.result.ExplainResult; +import org.dinky.data.result.IResult; +import org.dinky.data.result.ResultPool; +import org.dinky.data.result.SelectResult; +import org.dinky.explainer.lineage.LineageResult; +import org.dinky.function.data.model.UDF; +import org.dinky.function.data.model.UDFPath; +import org.dinky.gateway.config.GatewayConfig; +import org.dinky.gateway.enums.SavePointType; +import org.dinky.gateway.result.GatewayResult; +import org.dinky.gateway.result.SavePointResult; +import org.dinky.metadata.config.DriverConfig; +import org.dinky.remote.ServerExecutorService; + +import java.rmi.RemoteException; +import java.rmi.registry.LocateRegistry; +import java.rmi.registry.Registry; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import com.fasterxml.jackson.databind.node.ObjectNode; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class JobManager { + private static ServerExecutorService serverExecutorService; + JobHandler handler; + + static { + registerRemote(); + } + + private JobManager(JobConfig config, boolean isPlanMode) { + try { + + serverExecutorService.init(config, isPlanMode); + if (isPlanMode) { + handler = JobHandler.build(); + } + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + private static void registerRemote() { + try { + Registry registry = LocateRegistry.getRegistry("localhost"); + + // 从Registry中检索远程对象的存根/代理 + serverExecutorService = (ServerExecutorService) registry.lookup("Dinky"); + } catch (Exception exception) { + throw new RuntimeException(exception); + } + } + + public static JobManager build(JobConfig config) { + return build(config, false); + } + + public static JobManager build(JobConfig config, boolean isPlanMode) { + return new JobManager(config, isPlanMode); + } + + public boolean close() { + try { + return serverExecutorService.close(); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public ObjectNode getJarStreamGraphJson(String statement) { + try { + return serverExecutorService.getJarStreamGraphJson(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void prepare(String statement) { + try { + serverExecutorService.prepare(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + @ProcessStep(type = ProcessStepType.SUBMIT_EXECUTE) + public JobResult executeJarSql(String statement) throws Exception { + return serverExecutorService.executeJarSql(statement); + } + + @ProcessStep(type = ProcessStepType.SUBMIT_EXECUTE) + public JobResult executeSql(String statement) throws Exception { + return serverExecutorService.executeSql(statement); + } + + public IResult executeDDL(String statement) { + try { + return serverExecutorService.executeDDL(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public static SelectResult getJobData(String jobId) { + return ResultPool.get(jobId); + } + + public ExplainResult explainSql(String statement) { + try { + return serverExecutorService.explainSql(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public ObjectNode getStreamGraph(String statement) { + try { + return serverExecutorService.getStreamGraph(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public String getJobPlanJson(String statement) { + try { + return serverExecutorService.getJobPlanJson(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public boolean cancelNormal(String jobId) { + try { + return serverExecutorService.cancelNormal(jobId); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public SavePointResult savepoint( + String jobId, SavePointType savePointType, String savePoint, boolean isUseRestAPI) { + try { + return serverExecutorService.savepoint(jobId, savePointType, savePoint, isUseRestAPI); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public String exportSql(String sql) { + try { + return serverExecutorService.exportSql(sql); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public Job getJob() { + try { + return serverExecutorService.getJob(); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public List getPythonUdfList(String udfFile) { + try { + return serverExecutorService.getPythonUdfList(udfFile); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public JobStatus getJobStatus(GatewayConfig gatewayConfig, String appId) { + try { + return serverExecutorService.getJobStatus(gatewayConfig, appId); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void onJobGatewayFinishCallback(JobConfig jobConfig, String status) { + try { + serverExecutorService.onJobGatewayFinishCallback(jobConfig, status); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public List getUdfClassNameByJarPath(String path) { + try { + return serverExecutorService.getUdfClassNameByJarPath(path); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void putFile(String fullName, byte[] context) { + try { + serverExecutorService.putFile(fullName, context); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public List getFullDirectoryStructure(int rootId) { + try { + return serverExecutorService.getFullDirectoryStructure(rootId); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void rename(String path, String newPath) { + try { + serverExecutorService.rename(path, newPath); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public String getFileContent(String path) { + try { + return serverExecutorService.getFileContent(path); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void updateGitPool(Map newPool) { + try { + serverExecutorService.updateGitPool(newPool); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public UDFPath initUDF(List udfClassList, Integer missionId) { + try { + return serverExecutorService.initUDF(udfClassList, missionId); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public LineageResult getColumnLineageByLogicalPlan(String statement) { + try { + return serverExecutorService.getColumnLineageByLogicalPlan(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public LineageResult getSqlLineage(String statement, String mysql, DriverConfig> driverConfig) { + try { + return serverExecutorService.getSqlLineage(statement, mysql, driverConfig); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public List getCatalog() { + try { + return serverExecutorService.getCatalog(); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void setSchemaInfo(String catalogName, String databaseName, Schema schema, List tables) { + try { + serverExecutorService.setSchemaInfo(catalogName, databaseName, schema, tables); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public List getColumnList(String catalogName, String databaseName, String tableName) { + try { + return serverExecutorService.getColumnList(catalogName, databaseName, tableName); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public Map> readCheckpoint(String path, String operatorId) { + try { + return serverExecutorService.readCheckpoint(path, operatorId); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public byte[] readFIle(String path) { + try { + return serverExecutorService.readFile(path); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public Map> buildJar(List udfCodes) { + try { + return serverExecutorService.buildJar(udfCodes); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void buildRowPermission(ConcurrentHashMap permission) { + try { + serverExecutorService.buildRowPermission(permission); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public List getPrintTable(String statement) { + try { + return serverExecutorService.getPrintTables(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public FlinkClusterInfo testFlinkJobManagerIP(String hosts, String host) { + try { + return serverExecutorService.testFlinkJobManagerIP(hosts, host); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void killCluster(GatewayConfig gatewayConfig) { + try { + serverExecutorService.killCluster(gatewayConfig); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public GatewayResult deployCluster(GatewayConfig gatewayConfig) { + try { + return serverExecutorService.deployCluster(gatewayConfig); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void addOrUpdateUdfCodePool(UDF udf) { + try { + serverExecutorService.addOrUpdate(udf); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void removeUdfCodePool(String className) { + try { + serverExecutorService.removeUdfCodePool(className); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public String templateParse(String dialect, String templateCode, String className) { + try { + return serverExecutorService.templateParse(dialect, templateCode, className); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void registerPool(List collect) { + try { + serverExecutorService.registerPool(collect); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public void initResourceManager(SystemConfiguration systemConfiguration) { + try { + serverExecutorService.initResourceManager(systemConfiguration); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public String getPyUDFAttr(String statement) { + try { + return serverExecutorService.getPyUDFAttr(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public String getScalaFullClassName(String statement) { + try { + return serverExecutorService.getScalaFullClassName(statement); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public String getLatestJobManageHost(String appId, String oldJobManagerHost, GatewayConfig gatewayConfig) { + try { + return serverExecutorService.getLatestJobManageHost(appId, oldJobManagerHost, gatewayConfig); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public List getCustomStaticUdfs() { + try { + return serverExecutorService.getCustomStaticUdfs(); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + } + + public JobHandler getHandler() { + return handler; + } +} diff --git a/dinky-admin/src/main/java/org/dinky/job/ServerExecutorService.java b/dinky-admin/src/main/java/org/dinky/job/ServerExecutorService.java new file mode 100644 index 0000000000..d84f55ea1c --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/job/ServerExecutorService.java @@ -0,0 +1,60 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.job; + +import org.dinky.data.result.ExplainResult; +import org.dinky.data.result.IResult; +import org.dinky.gateway.enums.SavePointType; +import org.dinky.gateway.result.SavePointResult; + +import java.rmi.Remote; +import java.rmi.RemoteException; + +import com.fasterxml.jackson.databind.node.ObjectNode; + +public interface ServerExecutorService extends Remote { + void init(JobConfig config, boolean isPlanMode) throws RemoteException; + + boolean close() throws RemoteException; + + ObjectNode getJarStreamGraphJson(String statement) throws RemoteException; + + JobResult executeJarSql(String statement) throws RemoteException; + + JobResult executeSql(String statement) throws RemoteException; + + IResult executeDDL(String statement) throws RemoteException; + + ExplainResult explainSql(String statement) throws RemoteException; + + ObjectNode getStreamGraph(String statement) throws RemoteException; + + String getJobPlanJson(String statement) throws RemoteException; + + boolean cancelNormal(String jobId) throws RemoteException; + + SavePointResult savepoint(String jobId, SavePointType savePointType, String savePoint) throws RemoteException; + + String exportSql(String sql) throws RemoteException; + + Job getJob() throws RemoteException; + + void prepare(String statement) throws RemoteException; +} diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/AbsJobHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/AbsJobHandler.java index 55fa6ab630..126a7890ba 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/AbsJobHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/AbsJobHandler.java @@ -19,9 +19,6 @@ package org.dinky.job.handler; -import org.dinky.job.Job; import org.dinky.job.JobHandler; -public abstract class AbsJobHandler implements JobHandler { - protected Job job; -} +public abstract class AbsJobHandler implements JobHandler {} diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/Job2MysqlHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/Job2MysqlHandler.java index a52f906d43..6330864879 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/Job2MysqlHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/Job2MysqlHandler.java @@ -86,7 +86,6 @@ public class Job2MysqlHandler extends AbsJobHandler { @Override public boolean init(Job job) { - this.job = job; History history = new History(); history.setType(job.getType().getLongValue()); if (job.isUseGateway()) { @@ -108,18 +107,13 @@ public boolean init(Job job) { return true; } - @Override - public boolean ready() { - return true; - } - @Override public boolean running() { return true; } @Override - public boolean success() { + public boolean success(Job job) { Integer taskId = job.getJobConfig().getTaskId(); History history = new History(); @@ -218,7 +212,7 @@ public boolean success() { } @Override - public boolean failed() { + public boolean failed(Job job) { History history = new History(); history.setBatchModel(job.getJobConfig().isBatchModel()); history.setId(job.getId()); @@ -231,11 +225,6 @@ public boolean failed() { return true; } - @Override - public boolean callback() { - return true; - } - @Override public boolean close() { return true; diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java index 36f15358c5..589d863039 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java @@ -38,11 +38,10 @@ import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.ext.JobInfoDetail; import org.dinky.data.model.job.JobInstance; -import org.dinky.gateway.Gateway; import org.dinky.gateway.config.GatewayConfig; -import org.dinky.gateway.exception.NotSupportGetStatusException; import org.dinky.gateway.model.FlinkClusterConfig; import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.service.ClusterInstanceService; import org.dinky.service.HistoryService; import org.dinky.service.JobHistoryService; @@ -289,9 +288,8 @@ private static Optional getJobStatus(JobInfoDetail jobInfoDetail) { .getFlinkConfig() .setJobName(jobInfoDetail.getInstance().getName()); - Gateway gateway = Gateway.build(gatewayConfig); - return Optional.of(gateway.getJobStatusById(appId)); - } catch (NotSupportGetStatusException ignored) { + return Optional.of(JobManager.build(new JobConfig()).getJobStatus(gatewayConfig, appId)); + } catch (Exception ignored) { // if the gateway does not support get status, then use the api to get job status // ignore to do something here } @@ -315,7 +313,7 @@ private static void handleJobDone(JobInfoDetail jobInfoDetail) { jobConfig.buildGatewayConfig(configJson); jobConfig.getGatewayConfig().setType(GatewayType.get(clusterType)); jobConfig.getGatewayConfig().getFlinkConfig().setJobName(jobInstance.getName()); - Gateway.build(jobConfig.getGatewayConfig()).onJobFinishCallback(jobInstance.getStatus()); + JobManager.build(new JobConfig()).onJobGatewayFinishCallback(jobConfig, jobInstance.getStatus()); } } @@ -347,9 +345,8 @@ private static void checkAndRefreshCluster(JobInfoDetail jobInfoDetail) { .getFlinkConfig() .setJobName(jobInfoDetail.getInstance().getName()); - Gateway gateway = Gateway.build(gatewayConfig); - String latestJobManageHost = gateway.getLatestJobManageHost(appId, clusterInstance.getJobManagerHost()); - + String latestJobManageHost = JobManager.build(new JobConfig()) + .getLatestJobManageHost(appId, clusterInstance.getJobManagerHost(), gatewayConfig); if (Asserts.isNotNull(latestJobManageHost)) { clusterInstance.setHosts(latestJobManageHost); clusterInstance.setJobManagerHost(latestJobManageHost); diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/ClusterConfigurationServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/ClusterConfigurationServiceImpl.java index c814bb44f7..6a7c87f0e5 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/ClusterConfigurationServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/ClusterConfigurationServiceImpl.java @@ -29,7 +29,6 @@ import org.dinky.gateway.config.GatewayConfig; import org.dinky.gateway.model.FlinkClusterConfig; import org.dinky.gateway.result.TestResult; -import org.dinky.job.JobManager; import org.dinky.mapper.ClusterConfigurationMapper; import org.dinky.mybatis.service.impl.SuperServiceImpl; import org.dinky.service.ClusterConfigurationService; @@ -82,7 +81,13 @@ public FlinkClusterConfig getFlinkClusterCfg(Integer id) { @Override public TestResult testGateway(ClusterConfigurationDTO config) { config.getConfig().setType(GatewayType.get(config.getType())); - return JobManager.testGateway(GatewayConfig.build(config.getConfig())); + return testGateway(GatewayConfig.build(config.getConfig())); + } + + public static TestResult testGateway(GatewayConfig gatewayConfig) { + // TODO: 2024/3/31 + // return Gateway.build(gatewayConfig).test(); + return null; } /** diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java index c6d87b512d..bf9a332da5 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/ClusterInstanceServiceImpl.java @@ -21,7 +21,6 @@ import org.dinky.assertion.Asserts; import org.dinky.assertion.DinkyAssert; -import org.dinky.cluster.FlinkCluster; import org.dinky.cluster.FlinkClusterInfo; import org.dinky.data.dto.ClusterInstanceDTO; import org.dinky.data.enums.GatewayType; @@ -32,7 +31,6 @@ import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.Task; import org.dinky.gateway.config.GatewayConfig; -import org.dinky.gateway.exception.GatewayException; import org.dinky.gateway.model.FlinkClusterConfig; import org.dinky.gateway.result.GatewayResult; import org.dinky.job.JobConfig; @@ -82,15 +80,15 @@ public class ClusterInstanceServiceImpl extends SuperServiceImpl UdfCodePool.updateGitPool(getGitPool())); + ThreadUtil.execAsync(() -> JobManager.build(new JobConfig()).updateGitPool(getGitPool())); } /** @param gitProjectDTOList */ diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java index fcb2f2fcc2..fbaadd38b6 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java @@ -41,9 +41,10 @@ import org.dinky.data.model.mapping.ClusterInstanceMapping; import org.dinky.data.result.ProTableResult; import org.dinky.data.vo.task.JobInstanceVo; -import org.dinky.explainer.lineage.LineageBuilder; import org.dinky.explainer.lineage.LineageResult; import org.dinky.job.FlinkJobTask; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.mapper.JobInstanceMapper; import org.dinky.mybatis.service.impl.SuperServiceImpl; import org.dinky.mybatis.util.ProTableUtil; @@ -258,7 +259,7 @@ public void refreshJobByTaskIds(Integer... taskIds) { @Override public LineageResult getLineage(Integer id) { History history = getJobInfoDetail(id).getHistory(); - return LineageBuilder.getColumnLineageByLogicalPlan(history.getStatement()); + return JobManager.build(new JobConfig()).getColumnLineageByLogicalPlan(history.getStatement()); } @Override diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/PrintTableServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/PrintTableServiceImpl.java index 1ecc09d816..4fd7e60a1c 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/PrintTableServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/PrintTableServiceImpl.java @@ -22,18 +22,15 @@ import org.dinky.context.SseSessionContextHolder; import org.dinky.data.enums.SseTopic; import org.dinky.data.vo.PrintTableVo; -import org.dinky.explainer.print_table.PrintStatementExplainer; -import org.dinky.parser.SqlType; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.service.PrintTableService; -import org.dinky.trans.Operations; -import org.dinky.utils.SqlUtil; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketException; import java.net.UnknownHostException; -import java.util.Arrays; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -61,12 +58,7 @@ public PrintTableServiceImpl() { @Override public List getPrintTables(String statement) { // TODO: 2023/4/7 this function not support variable sql, because, JobManager and executor - // couple function - // and status and task execute. - final String[] statements = SqlUtil.getStatements(SqlUtil.removeNote(statement)); - return Arrays.stream(statements) - .filter(t -> SqlType.PRINT.equals(Operations.getOperationType(t))) - .flatMap(t -> Arrays.stream(PrintStatementExplainer.splitTableNames(t))) + return JobManager.build(new JobConfig()).getPrintTable(statement).stream() .map(t -> new PrintTableVo(t, getFullTableName(t))) .collect(Collectors.toList()); } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java index e87e03ed73..a40be719ce 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/StudioServiceImpl.java @@ -31,13 +31,9 @@ import org.dinky.data.model.DataBase; import org.dinky.data.model.Schema; import org.dinky.data.model.Table; -import org.dinky.data.result.DDLResult; import org.dinky.data.result.IResult; import org.dinky.data.result.SelectResult; -import org.dinky.executor.CustomTableEnvironment; -import org.dinky.explainer.lineage.LineageBuilder; import org.dinky.explainer.lineage.LineageResult; -import org.dinky.explainer.sqllineage.SQLLineageBuilder; import org.dinky.job.JobConfig; import org.dinky.job.JobManager; import org.dinky.metadata.driver.Driver; @@ -45,12 +41,9 @@ import org.dinky.service.DataBaseService; import org.dinky.service.StudioService; import org.dinky.service.TaskService; -import org.dinky.utils.FlinkTableMetadataUtil; -import org.dinky.utils.RunTimeUtil; import java.util.ArrayList; import java.util.List; -import java.util.Map; import org.springframework.stereotype.Service; @@ -75,16 +68,6 @@ public class StudioServiceImpl implements StudioService { private final Cache jobManagerCache = CacheUtil.newTimedCache(1000 * 60 * 2); private final String DEFAULT_CATALOG = "default_catalog"; - private IResult executeMSFlinkSql(StudioMetaStoreDTO studioMetaStoreDTO) { - String envSql = taskService.buildEnvSql(studioMetaStoreDTO); - studioMetaStoreDTO.setStatement(studioMetaStoreDTO.getStatement() + envSql); - JobConfig config = studioMetaStoreDTO.getJobConfig(); - JobManager jobManager = JobManager.build(config); - IResult jobResult = jobManager.executeDDL(studioMetaStoreDTO.getStatement()); - RunTimeUtil.recovery(jobManager); - return jobResult; - } - @Override public IResult executeDDL(StudioDDLDTO studioDDLDTO) { JobConfig config = studioDDLDTO.getJobConfig(); @@ -100,6 +83,7 @@ public SelectResult getJobData(String jobId) { @Override public LineageResult getLineage(StudioLineageDTO studioCADTO) { // TODO 添加ProcessStep + JobManager jobManager = JobManager.build(new JobConfig()); if (Asserts.isNotNullString(studioCADTO.getDialect()) && !Dialect.FLINK_SQL.isDialect(studioCADTO.getDialect())) { if (Asserts.isNull(studioCADTO.getDatabaseId())) { @@ -112,15 +96,15 @@ public LineageResult getLineage(StudioLineageDTO studioCADTO) { return null; } if (Dialect.DORIS.isDialect(studioCADTO.getDialect())) { - return SQLLineageBuilder.getSqlLineage(studioCADTO.getStatement(), "mysql", dataBase.getDriverConfig()); + return jobManager.getSqlLineage(studioCADTO.getStatement(), "mysql", dataBase.getDriverConfig()); } else { - return SQLLineageBuilder.getSqlLineage( + return jobManager.getSqlLineage( studioCADTO.getStatement(), studioCADTO.getDialect().toLowerCase(), dataBase.getDriverConfig()); } } else { String envSql = taskService.buildEnvSql(studioCADTO); studioCADTO.setStatement(studioCADTO.getStatement() + envSql); - return LineageBuilder.getColumnLineageByLogicalPlan(studioCADTO.getStatement()); + return JobManager.build(new JobConfig()).getColumnLineageByLogicalPlan(studioCADTO.getStatement()); } } @@ -150,9 +134,7 @@ public List getMSCatalogs(StudioMetaStoreDTO studioMetaStoreDTO) { } else { String envSql = taskService.buildEnvSql(studioMetaStoreDTO); JobManager jobManager = getJobManager(studioMetaStoreDTO, envSql); - CustomTableEnvironment customTableEnvironment = - jobManager.getExecutor().getCustomTableEnvironment(); - catalogs.addAll(FlinkTableMetadataUtil.getCatalog(customTableEnvironment)); + catalogs.addAll(jobManager.getCatalog()); } return catalogs; } @@ -171,10 +153,7 @@ public Schema getMSSchemaInfo(StudioMetaStoreDTO studioMetaStoreDTO) { } else { String envSql = taskService.buildEnvSql(studioMetaStoreDTO); JobManager jobManager = getJobManager(studioMetaStoreDTO, envSql); - CustomTableEnvironment customTableEnvironment = - jobManager.getExecutor().getCustomTableEnvironment(); - FlinkTableMetadataUtil.setSchemaInfo( - customTableEnvironment, studioMetaStoreDTO.getCatalog(), database, schema, tables); + jobManager.setSchemaInfo(studioMetaStoreDTO.getCatalog(), database, schema, tables); } schema.setTables(tables); return schema; @@ -193,13 +172,9 @@ public List getMSColumns(StudioMetaStoreDTO studioMetaStoreDTO) { columns.addAll(driver.listColumns(database, tableName)); } } else { - String envSql = taskService.buildEnvSql(studioMetaStoreDTO); JobManager jobManager = getJobManager(studioMetaStoreDTO, envSql); - CustomTableEnvironment customTableEnvironment = - jobManager.getExecutor().getCustomTableEnvironment(); - columns.addAll( - FlinkTableMetadataUtil.getColumnList(customTableEnvironment, catalogName, database, tableName)); + jobManager.getColumnList(catalogName, database, tableName); } return columns; } @@ -213,19 +188,4 @@ private JobManager getJobManager(StudioMetaStoreDTO studioMetaStoreDTO, String e }); return jobManager; } - - private List showInfo(StudioMetaStoreDTO studioMetaStoreDTO, String baseStatement, String statement) { - List infos = new ArrayList<>(); - studioMetaStoreDTO.setStatement(baseStatement + statement); - IResult result = executeMSFlinkSql(studioMetaStoreDTO); - if (result instanceof DDLResult) { - DDLResult ddlResult = (DDLResult) result; - ddlResult.getColumns().stream().findFirst().ifPresent(key -> { - for (Map item : ddlResult.getRowData()) { - infos.add(item.get(key).toString()); - } - }); - } - return infos; - } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index d2a85c6b8f..2ad00df1f1 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -57,14 +57,9 @@ import org.dinky.data.model.udf.UDFTemplate; import org.dinky.data.result.Result; import org.dinky.data.result.SqlExplainResult; -import org.dinky.explainer.lineage.LineageBuilder; import org.dinky.explainer.lineage.LineageResult; -import org.dinky.explainer.sqllineage.SQLLineageBuilder; -import org.dinky.function.FunctionFactory; import org.dinky.function.compiler.CustomStringJavaCompiler; import org.dinky.function.data.model.UDF; -import org.dinky.function.pool.UdfCodePool; -import org.dinky.function.util.UDFUtil; import org.dinky.gateway.enums.SavePointStrategy; import org.dinky.gateway.enums.SavePointType; import org.dinky.gateway.model.FlinkClusterConfig; @@ -472,7 +467,8 @@ public SavePointResult savepointTaskJob(TaskDTO task, SavePointType savePointTyp JobManager jobManager = JobManager.build(buildJobConfig(task)); String jobId = jobInstance.getJid(); - SavePointResult savePointResult = jobManager.savepoint(jobId, savePointType, null); + SavePointResult savePointResult = jobManager.savepoint( + jobId, savePointType, null, SystemConfiguration.getInstances().isUseRestAPI()); Assert.notNull(savePointResult.getJobInfos()); for (JobInfo item : savePointResult.getJobInfos()) { if (Asserts.isEqualsIgnoreCase(jobId, item.getJobId()) && Asserts.isNotNull(jobInstance.getTaskId())) { @@ -502,7 +498,7 @@ public ObjectNode getJobPlan(TaskDTO task) { @Override public ObjectNode getStreamGraph(TaskDTO taskDTO) { JobConfig config = taskDTO.getJobConfig(); - JobManager jobManager = JobManager.buildPlanMode(config); + JobManager jobManager = JobManager.build(config, true); ObjectNode streamGraph = jobManager.getStreamGraph(taskDTO.getStatement()); RunTimeUtil.recovery(jobManager); return streamGraph; @@ -577,20 +573,16 @@ public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) thro if (Dialect.isUDF(task.getDialect())) { // compile udf class UDF udf = UDFUtils.taskToUDF(task.buildTask()); - try { - FunctionFactory.initUDF(Collections.singletonList(udf), task.getId()); - } catch (Throwable e) { - throw new BusException( - "UDF compilation failed and cannot be published. The error message is as follows:" - + e.getMessage()); - } - UdfCodePool.addOrUpdate(udf); + JobManager jobManager = JobManager.build(new JobConfig()); + jobManager.initUDF(Collections.singletonList(udf), task.getId()); + jobManager.addOrUpdateUdfCodePool(UDFUtils.taskToUDF(task.buildTask())); } } else { if (Dialect.isUDF(task.getDialect()) && Asserts.isNotNull(task.getConfigJson()) && Asserts.isNotNull(task.getConfigJson().getUdfConfig())) { - UdfCodePool.remove(task.getConfigJson().getUdfConfig().getClassName()); + JobManager.build(new JobConfig()) + .removeUdfCodePool(task.getConfigJson().getUdfConfig().getClassName()); } } boolean saved = saveOrUpdate(task.buildTask()); @@ -612,6 +604,7 @@ public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) thro @Override @Transactional(rollbackFor = Exception.class) public boolean saveOrUpdateTask(Task task) { + JobManager jobManager = JobManager.build(new JobConfig()); Task byId = getById(task.getId()); if (byId != null && JobLifeCycle.PUBLISH.equalsValue(byId.getStep())) { throw new BusException(Status.TASK_IS_ONLINE.getMessage()); @@ -628,7 +621,7 @@ public boolean saveOrUpdateTask(Task task) { UDFTemplate template = udfTemplateService.getById(taskConfigJson.getUdfConfig().getTemplateId()); if (template != null) { - String code = UDFUtil.templateParse( + String code = jobManager.templateParse( task.getDialect(), template.getTemplateCode(), taskConfigJson.getUdfConfig().getClassName()); @@ -643,18 +636,18 @@ public boolean saveOrUpdateTask(Task task) { CustomStringJavaCompiler compiler = new CustomStringJavaCompiler(task.getStatement()); className = compiler.getFullClassName(); } else if (Dialect.PYTHON.isDialect(task.getDialect())) { - className = task.getName() + "." + UDFUtil.getPyUDFAttr(task.getStatement()); + className = task.getName() + "." + jobManager.getPyUDFAttr(task.getStatement()); } else if (Dialect.SCALA.isDialect(task.getDialect())) { - className = UDFUtil.getScalaFullClassName(task.getStatement()); + className = jobManager.getScalaFullClassName(task.getStatement()); } if (!task.getConfigJson().getUdfConfig().getClassName().equals(className)) { - UdfCodePool.remove(task.getConfigJson().getUdfConfig().getClassName()); + jobManager.removeUdfCodePool(task.getConfigJson().getUdfConfig().getClassName()); } task.getConfigJson().getUdfConfig().setClassName(className); if (task.getStep().equals(JobLifeCycle.PUBLISH.getValue())) { - UdfCodePool.addOrUpdate(UDFUtils.taskToUDF(task)); + jobManager.addOrUpdateUdfCodePool(UDFUtils.taskToUDF(task)); } else { - UdfCodePool.remove(task.getConfigJson().getUdfConfig().getClassName()); + jobManager.removeUdfCodePool(task.getConfigJson().getUdfConfig().getClassName()); } } @@ -975,6 +968,7 @@ public Result> queryAllCatalogue() { @Override public LineageResult getTaskLineage(Integer id) { + JobManager jobManager = JobManager.build(new JobConfig()); TaskDTO task = getTaskInfoById(id); if (!Dialect.isCommonSql(task.getDialect())) { if (Asserts.isNull(task.getDatabaseId())) { @@ -985,13 +979,13 @@ public LineageResult getTaskLineage(Integer id) { return null; } if (task.getDialect().equalsIgnoreCase("doris") || task.getDialect().equalsIgnoreCase("starrocks")) { - return SQLLineageBuilder.getSqlLineage(task.getStatement(), "mysql", dataBase.getDriverConfig()); + return jobManager.getSqlLineage(task.getStatement(), "mysql", dataBase.getDriverConfig()); } else { - return SQLLineageBuilder.getSqlLineage( + return jobManager.getSqlLineage( task.getStatement(), task.getDialect().toLowerCase(), dataBase.getDriverConfig()); } } else { - return LineageBuilder.getColumnLineageByLogicalPlan(buildEnvSql(task)); + return jobManager.getColumnLineageByLogicalPlan(buildEnvSql(task)); } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 4819893d68..73b2ae94d9 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -25,10 +25,11 @@ import org.dinky.data.vo.CascaderVO; import org.dinky.data.vo.UDFManageVO; import org.dinky.function.data.model.UDF; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.mapper.UDFManageMapper; import org.dinky.service.UDFService; import org.dinky.service.resource.ResourcesService; -import org.dinky.trans.Operations; import org.dinky.utils.UDFUtils; import org.apache.flink.table.catalog.FunctionLanguage; @@ -117,24 +118,25 @@ public void addOrUpdateByResourceId(List resourceIds) { resourceIds.stream().filter(x -> !udfManageIdList.contains(x)).collect(Collectors.toList()); if (CollUtil.isNotEmpty(needAddList)) { List resources = resourcesService.listByIds(needAddList); + JobManager jm = JobManager.build(new JobConfig()); List manageList = resources.stream() .flatMap(x -> { String suffix = FileUtil.getSuffix(x.getFileName()); if ("jar".equals(suffix)) { File file = resourcesService.getFile(x.getId()); - List> classes = UDFUtils.getUdfClassByJar(file); + List classes = jm.getUdfClassNameByJarPath(file.getPath()); return classes.stream().map(clazz -> { UDFManage udfManage = UDFManage.builder() - .className(clazz.getName()) + .className(clazz) .language(FunctionLanguage.JAVA.name()) .resourcesId(x.getId()) .build(); - udfManage.setName(StrUtil.toUnderlineCase(getSimpleClassName(clazz.getName()))); + udfManage.setName(StrUtil.toUnderlineCase(getSimpleClassName(clazz))); return udfManage; }); } else if ("py".equals(suffix) || "zip".equals(suffix)) { File file = resourcesService.getFile(x.getId()); - List pythonUdfList = UDFUtils.getPythonUdfList(file.getAbsolutePath()); + List pythonUdfList = jm.getPythonUdfList(file.getAbsolutePath()); return pythonUdfList.stream().map(className -> { UDFManage udfManage = UDFManage.builder() .className(className) @@ -182,7 +184,7 @@ public List getUDFFromUdfManage() { @Override public List getAllUdfsToCascader(List userDefinedReleaseUdfs) { // Get all UDFs of static UDFs and dynamic UDFs - List staticUdfs = Operations.getCustomStaticUdfs(); + List staticUdfs = JobManager.build(new JobConfig()).getCustomStaticUdfs(); // get all UDFs of UDFManage table List udfManageDynamic = getUDFFromUdfManage().stream() diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java index e280b9f510..45ae3c00df 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java @@ -20,7 +20,6 @@ package org.dinky.service.impl; import org.dinky.assertion.Asserts; -import org.dinky.context.RowLevelPermissionsContext; import org.dinky.context.TenantContextHolder; import org.dinky.context.UserInfoContextHolder; import org.dinky.data.dto.AssignRoleDTO; @@ -44,6 +43,8 @@ import org.dinky.data.model.rbac.UserTenant; import org.dinky.data.result.Result; import org.dinky.data.vo.UserVo; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.mapper.TokenMapper; import org.dinky.mapper.UserMapper; import org.dinky.mybatis.service.impl.SuperServiceImpl; @@ -437,7 +438,8 @@ public void buildRowPermission() { permission.put(roleSelectPermissions.getTableName(), roleSelectPermissions.getExpression()); } } - RowLevelPermissionsContext.set(permission); + + JobManager.build(new JobConfig()).buildRowPermission(permission); } } diff --git a/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java index 38c73da3ea..923c5fa016 100644 --- a/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java @@ -25,8 +25,9 @@ import org.dinky.data.exception.BusException; import org.dinky.data.model.Resources; import org.dinky.data.result.Result; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.mapper.ResourcesMapper; -import org.dinky.resource.BaseResourceManager; import org.dinky.service.resource.ResourcesService; import org.dinky.utils.URLUtils; @@ -50,6 +51,7 @@ import cn.hutool.cache.impl.TimedCache; import cn.hutool.core.collection.CollUtil; import cn.hutool.core.collection.CollectionUtil; +import cn.hutool.core.io.FileUtil; import cn.hutool.core.lang.Assert; import cn.hutool.core.lang.Opt; import cn.hutool.core.util.StrUtil; @@ -58,6 +60,7 @@ public class ResourceServiceImpl extends ServiceImpl implements ResourcesService { private static final TimedCache RESOURCES_CACHE = new TimedCache<>(30 * 1000); private static final long ALLOW_MAX_CAT_CONTENT_SIZE = 10 * 1024 * 1024; + private static final JobManager jobManager = JobManager.build(new JobConfig()); @Override @Transactional(rollbackFor = Exception.class) @@ -70,21 +73,20 @@ public boolean syncRemoteDirectoryStructure() { Map localMap = local.stream().collect(Collectors.toMap(Resources::getId, Function.identity())); - List resourcesList = - getBaseResourceManager().getFullDirectoryStructure(rootResource.getId()).stream() - .filter(x -> x.getPid() != -1) - .map(Resources::of) - .peek(x -> { - // Restore the existing information. If the remotmap is not available, - // it means that the configuration is out of sync and no processing will be done. - Resources resources = localMap.get(x.getFileName().hashCode()); - if (resources != null) { - x.setDescription(resources.getDescription()); - x.setType(resources.getType()); - x.setUserId(resources.getUserId()); - } - }) - .collect(Collectors.toList()); + List resourcesList = jobManager.getFullDirectoryStructure(rootResource.getId()).stream() + .filter(x -> x.getPid() != -1) + .map(Resources::of) + .peek(x -> { + // Restore the existing information. If the remotmap is not available, + // it means that the configuration is out of sync and no processing will be done. + Resources resources = localMap.get(x.getFileName().hashCode()); + if (resources != null) { + x.setDescription(resources.getDescription()); + x.setType(resources.getType()); + x.setUserId(resources.getUserId()); + } + }) + .collect(Collectors.toList()); // not delete root directory this.remove(new LambdaQueryWrapper().ne(Resources::getPid, -1)); this.saveBatch(resourcesList); @@ -172,7 +174,7 @@ public void rename(Integer id, String fileName, String desc) { } } if (isRunStorageMove) { - getBaseResourceManager().rename(sourceFullName, fullName); + jobManager.rename(sourceFullName, fullName); } } @@ -221,7 +223,7 @@ public String getContentByResourceId(Integer id) { Resources resources = getById(id); DinkyAssert.checkNull(resources, Status.RESOURCE_DIR_OR_FILE_NOT_EXIST); Assert.isFalse(resources.getSize() > ALLOW_MAX_CAT_CONTENT_SIZE, () -> new BusException("file is too large!")); - return getBaseResourceManager().getFileContent(resources.getFullName()); + return jobManager.getFileContent(resources.getFullName()); } @Override @@ -243,7 +245,9 @@ public void uploadFile(Integer pid, String desc, File file) { } long size = file.length(); String fileName = file.getName(); - upload(pid, desc, (fullName) -> getBaseResourceManager().putFile(fullName, file), fileName, pResource, size); + // get file context, and assign to context + byte[] context = FileUtil.readBytes(file); + upload(pid, desc, (fullName) -> jobManager.putFile(fullName, context), fileName, pResource, size); } /** @@ -302,7 +306,8 @@ public void uploadFile(Integer pid, String desc, MultipartFile file) { desc, (fullName) -> { try { - getBaseResourceManager().putFile(fullName, file.getInputStream()); + byte[] context = file.getBytes(); + jobManager.putFile(fullName, context); } catch (IOException e) { throw new RuntimeException(e); } @@ -484,8 +489,4 @@ private List getChildList(List list, Resources resources) private boolean hasChild(List resourcesList, Resources resources) { return !getChildList(resourcesList, resources).isEmpty(); } - - private BaseResourceManager getBaseResourceManager() { - return BaseResourceManager.getInstance(); - } } diff --git a/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java b/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java index de2716d3fb..3f28bfea74 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java @@ -23,6 +23,8 @@ import org.dinky.data.annotations.SupportDialect; import org.dinky.data.dto.TaskDTO; import org.dinky.data.result.SqlExplainResult; +import org.dinky.job.Job; +import org.dinky.job.JobHandler; import org.dinky.job.JobResult; import java.util.List; @@ -42,8 +44,17 @@ public List explain() { @Override public JobResult execute() throws Exception { - - return jobManager.executeJarSql(task.getStatement()); + JobHandler handler = JobHandler.build(); + jobManager.prepare(task.getStatement()); + handler.init(jobManager.getJob()); + JobResult result = jobManager.executeJarSql(task.getStatement()); + Job afterJob = jobManager.getJob(); + if (result.isSuccess()) { + handler.success(afterJob); + } else { + handler.failed(afterJob); + } + return result; } @Override diff --git a/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java b/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java index 481f25fe41..b23bf7cdf0 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java @@ -25,6 +25,8 @@ import org.dinky.data.dto.TaskDTO; import org.dinky.data.enums.GatewayType; import org.dinky.data.result.SqlExplainResult; +import org.dinky.job.Job; +import org.dinky.job.JobHandler; import org.dinky.job.JobManager; import org.dinky.job.JobResult; import org.dinky.service.TaskService; @@ -64,8 +66,18 @@ public ObjectNode getJobPlan() { @Override public JobResult execute() throws Exception { - log.info("Initializing Flink job config..."); - return jobManager.executeSql(task.getStatement()); + JobHandler handler = JobHandler.build(); + jobManager.prepare(task.getStatement()); + handler.init(jobManager.getJob()); + JobResult result = jobManager.executeSql(task.getStatement()); + // Job class maybe change at remote server + Job afterJob = jobManager.getJob(); + if (result.isSuccess()) { + handler.success(afterJob); + } else { + handler.failed(afterJob); + } + return result; } protected JobManager getJobManager() { diff --git a/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java b/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java index 094670fdc2..e3d9b8d8ad 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java @@ -23,9 +23,10 @@ import org.dinky.data.annotations.SupportDialect; import org.dinky.data.dto.TaskDTO; import org.dinky.data.model.Task; -import org.dinky.function.FunctionFactory; import org.dinky.function.data.model.UDF; import org.dinky.job.Job; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.job.JobResult; import org.dinky.utils.UDFUtils; @@ -47,7 +48,7 @@ public JobResult execute() throws Exception { jobResult.setStatus(Job.JobStatus.SUCCESS); try { UDF udf = UDFUtils.taskToUDF(BeanUtil.toBean(task, Task.class)); - FunctionFactory.initUDF(Collections.singletonList(udf), task.getId()); + JobManager.build(new JobConfig()).initUDF(Collections.singletonList(udf), task.getId()); } catch (Exception e) { jobResult.setSuccess(false); jobResult.setError(ExceptionUtil.getRootCauseMessage(e)); diff --git a/dinky-admin/src/main/java/org/dinky/sse/DoneStepSse.java b/dinky-admin/src/main/java/org/dinky/sse/DoneStepSse.java index 6271c89718..f1567ac270 100644 --- a/dinky-admin/src/main/java/org/dinky/sse/DoneStepSse.java +++ b/dinky-admin/src/main/java/org/dinky/sse/DoneStepSse.java @@ -19,7 +19,8 @@ package org.dinky.sse; -import org.dinky.function.pool.UdfCodePool; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.service.GitProjectService; import java.util.List; @@ -50,7 +51,7 @@ public DoneStepSse( public void exec() { addFileMsgCusLog("Updating UDF pool"); GitProjectService gitProjectService = SpringUtil.getBean(GitProjectService.class); - UdfCodePool.updateGitPool(gitProjectService.getGitPool()); + JobManager.build(new JobConfig()).updateGitPool(gitProjectService.getGitPool()); addFileMsgCusLog("The UDF pool has been updated"); } } diff --git a/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfClassStepSse.java b/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfClassStepSse.java index 83802aca04..8adb286015 100644 --- a/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfClassStepSse.java +++ b/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfClassStepSse.java @@ -22,7 +22,8 @@ import org.dinky.data.dto.GitAnalysisJarDTO; import org.dinky.data.exception.DinkyException; import org.dinky.data.model.GitProject; -import org.dinky.function.util.UDFUtil; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.sse.StepSse; import org.dinky.utils.URLUtils; @@ -34,7 +35,6 @@ import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; import org.springframework.web.servlet.mvc.method.annotation.SseEmitter; @@ -63,14 +63,15 @@ public void exec() { List pathList = (List) params.get("jarPath"); List dataList = new ArrayList<>(); - Map>> udfMap = new TreeMap<>(); + Map> udfMap = new TreeMap<>(); try { Thread.currentThread().getContextClassLoader().loadClass("org.apache.flink.table.api.ValidationException"); } catch (ClassNotFoundException e) { throw new DinkyException("flink dependency not found"); } pathList.parallelStream().forEach(jar -> { - List> udfClassByJar = UDFUtil.getUdfClassByJar(URLUtils.toFile(jar)); + List udfClassByJar = JobManager.build(new JobConfig()) + .getUdfClassNameByJarPath(URLUtils.toFile(jar).getPath()); udfMap.put(jar, udfClassByJar); sendMsg(Dict.create().set(jar, udfClassByJar)); }); @@ -79,7 +80,7 @@ public void exec() { udfMap.forEach((k, v) -> { GitAnalysisJarDTO gitAnalysisJarDTO = new GitAnalysisJarDTO(); gitAnalysisJarDTO.setJarPath(k); - gitAnalysisJarDTO.setClassList(v.stream().map(Class::getName).collect(Collectors.toList())); + gitAnalysisJarDTO.setClassList(new ArrayList<>(v)); gitAnalysisJarDTO.setOrderLine(index.get()); index.getAndIncrement(); dataList.add(gitAnalysisJarDTO); diff --git a/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfPythonStepSse.java b/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfPythonStepSse.java index 18abff2a4b..52804bf6be 100644 --- a/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfPythonStepSse.java +++ b/dinky-admin/src/main/java/org/dinky/sse/git/AnalysisUdfPythonStepSse.java @@ -22,8 +22,8 @@ import org.dinky.data.dto.GitAnalysisJarDTO; import org.dinky.data.exception.DinkyException; import org.dinky.data.model.GitProject; -import org.dinky.data.model.SystemConfiguration; -import org.dinky.function.util.UDFUtil; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import org.dinky.sse.StepSse; import java.io.File; @@ -59,8 +59,7 @@ public void exec() { } catch (ClassNotFoundException e) { throw new DinkyException("flink dependency not found"); } - List pythonUdfList = - UDFUtil.getPythonUdfList(SystemConfiguration.getInstances().getPythonHome(), zipFile.getAbsolutePath()); + List pythonUdfList = JobManager.build(new JobConfig()).getPythonUdfList(zipFile.getAbsolutePath()); GitAnalysisJarDTO gitAnalysisJarDTO = new GitAnalysisJarDTO(); gitAnalysisJarDTO.setJarPath(zipFilePath); gitAnalysisJarDTO.setClassList(pythonUdfList); diff --git a/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java b/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java index 007a5b0f73..a225aa07ad 100644 --- a/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java +++ b/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java @@ -19,29 +19,27 @@ package org.dinky.url; -import org.dinky.data.exception.BusException; -import org.dinky.resource.BaseResourceManager; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManager; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; +import cn.hutool.core.io.IoUtil; + public class RsURLConnection extends URLConnection { - private InputStream inputStream; + private byte[] context; @Override public void connect() { - BaseResourceManager instance = BaseResourceManager.getInstance(); - if (instance == null) { - throw BusException.valueOf("ResourceManager is disabled"); - } - inputStream = instance.readFile(getURL().getPath()); + context = JobManager.build(new JobConfig()).readFIle(getURL().getPath()); } @Override public InputStream getInputStream() { connect(); - return inputStream; + return IoUtil.toStream(context); } public RsURLConnection(URL url) { diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 5345f81968..f6ac66f592 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -24,11 +24,10 @@ import org.dinky.data.model.Task; import org.dinky.data.model.udf.UDFManage; import org.dinky.function.data.model.UDF; -import org.dinky.function.util.UDFUtil; import org.apache.flink.table.catalog.FunctionLanguage; -public class UDFUtils extends UDFUtil { +public class UDFUtils { public static UDF taskToUDF(Task task) { if (Asserts.isNotNull(task.getConfigJson()) diff --git a/dinky-admin/src/main/resources/application-mysql.yml b/dinky-admin/src/main/resources/application-mysql.yml index 6c71564216..9e59bddd99 100644 --- a/dinky-admin/src/main/resources/application-mysql.yml +++ b/dinky-admin/src/main/resources/application-mysql.yml @@ -18,6 +18,6 @@ spring: datasource: url: jdbc:mysql://${MYSQL_ADDR:127.0.0.1:3306}/${MYSQL_DATABASE:dinky}?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true - username: ${MYSQL_USERNAME:dinky} - password: ${MYSQL_PASSWORD:dinky} + username: ${MYSQL_USERNAME:root} + password: ${MYSQL_PASSWORD:123456} driver-class-name: com.mysql.cj.jdbc.Driver diff --git a/dinky-core/src/test/java/org/dinky/core/JobManagerTest.java b/dinky-admin/src/test/java/org/dinky/job/JobManagerTest.java similarity index 95% rename from dinky-core/src/test/java/org/dinky/core/JobManagerTest.java rename to dinky-admin/src/test/java/org/dinky/job/JobManagerTest.java index 978a92024b..5cc0cd2c32 100644 --- a/dinky-core/src/test/java/org/dinky/core/JobManagerTest.java +++ b/dinky-admin/src/test/java/org/dinky/job/JobManagerTest.java @@ -17,14 +17,11 @@ * */ -package org.dinky.core; +package org.dinky.job; import org.dinky.data.enums.GatewayType; import org.dinky.data.result.ResultPool; import org.dinky.data.result.SelectResult; -import org.dinky.job.JobConfig; -import org.dinky.job.JobManager; -import org.dinky.job.JobResult; import org.junit.Ignore; import org.junit.Test; diff --git a/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java b/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java index 5a5c599c1e..213f95bd95 100644 --- a/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java +++ b/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java @@ -63,7 +63,6 @@ import java.io.File; import java.io.IOException; -import java.lang.ref.WeakReference; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; @@ -107,7 +106,7 @@ private static void initSystemConfiguration() throws SQLException { public static void submit(AppParamConfig config) throws SQLException { initSystemConfiguration(); - BaseResourceManager.initResourceManager(); + BaseResourceManager.initResourceManager(SystemConfiguration.getInstances()); URL.setURLStreamHandlerFactory(new RsURLStreamHandlerFactory()); log.info("{} Start Submit Job:{}", LocalDateTime.now(), config.getTaskId()); @@ -126,8 +125,7 @@ public static void submit(AppParamConfig config) throws SQLException { // .config(JsonUtils.toMap(appTask.getConfigJson())) .build(); - executor = ExecutorFactory.buildAppStreamExecutor( - executorConfig, new WeakReference<>(DinkyClassLoader.build()).get()); + executor = ExecutorFactory.buildAppStreamExecutor(executorConfig); // 加载第三方jar //TODO 这里有问题,需要修一修 loadDep(appTask.getType(), config.getTaskId(), executorConfig); diff --git a/dinky-app/dinky-app-base/src/main/java/org/dinky/resource/impl/LocalResourceManager.java b/dinky-app/dinky-app-base/src/main/java/org/dinky/resource/impl/LocalResourceManager.java index a762dd2b7c..33d446b0a6 100644 --- a/dinky-app/dinky-app-base/src/main/java/org/dinky/resource/impl/LocalResourceManager.java +++ b/dinky-app/dinky-app-base/src/main/java/org/dinky/resource/impl/LocalResourceManager.java @@ -21,7 +21,6 @@ import org.dinky.data.exception.BusException; import org.dinky.data.model.ResourcesVO; -import org.dinky.data.model.SystemConfiguration; import org.dinky.resource.BaseResourceManager; import java.io.BufferedInputStream; @@ -46,7 +45,6 @@ @Slf4j public class LocalResourceManager implements BaseResourceManager { - SystemConfiguration systemConfiguration = SystemConfiguration.getInstances(); @Override public void remove(String path) { @@ -130,8 +128,7 @@ public List getFullDirectoryStructure(int rootId) { @Override public InputStream readFile(String path) { try (HttpResponse exec = HttpUtil.createGet( - systemConfiguration.getDinkyAddr().getValue() + "/download/downloadFromRs?path=" - + URLUtil.encode(path)) + instances.getDinkyAddr().getValue() + "/download/downloadFromRs?path=" + URLUtil.encode(path)) .execute()) { return exec.bodyStream(); } catch (Exception e) { diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/BaseResourceManager.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/BaseResourceManager.java index de6c733a3a..026590b043 100644 --- a/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/BaseResourceManager.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/BaseResourceManager.java @@ -48,6 +48,10 @@ public interface BaseResourceManager { void rename(String path, String newPath); + default void putFile(String path, byte[] fileContext) { + putFile(path, IoUtil.toStream(fileContext)); + } + void putFile(String path, InputStream fileStream); void putFile(String path, File file); @@ -58,8 +62,12 @@ public interface BaseResourceManager { InputStream readFile(String path); + default byte[] readFileContext(String path) { + return IoUtil.readBytes(readFile(path)); + } + static BaseResourceManager getInstance() { - switch (SystemConfiguration.getInstances().getResourcesModel().getValue()) { + switch (instances.getResourcesModel().getValue()) { case HDFS: return Singleton.get(HdfsResourceManager.class); case OSS: @@ -71,7 +79,9 @@ static BaseResourceManager getInstance() { } } - static void initResourceManager() { + static void initResourceManager(SystemConfiguration other) { + // the executor not at admin server + other.copyTo(BaseResourceManager.instances); switch (instances.getResourcesModel().getValue()) { case LOCAL: Singleton.get(LocalResourceManager.class); diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/HdfsResourceManager.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/HdfsResourceManager.java index b413b55f26..6c1d1d4920 100644 --- a/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/HdfsResourceManager.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/HdfsResourceManager.java @@ -140,6 +140,11 @@ public InputStream readFile(String path) { } } + @Override + public byte[] readFileContext(String path) { + return new byte[0]; + } + public FileSystem getHdfs() { if (hdfs == null && instances.getResourcesEnable().getValue()) { throw new BusException(Status.RESOURCE_HDFS_CONFIGURATION_ERROR); diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/OssResourceManager.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/OssResourceManager.java index 6639b53051..2740905490 100644 --- a/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/OssResourceManager.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/resource/impl/OssResourceManager.java @@ -134,6 +134,11 @@ public InputStream readFile(String path) { .getObjectContent(); } + @Override + public byte[] readFileContext(String path) { + return new byte[0]; + } + public OssTemplate getOssTemplate() { if (ossTemplate == null && instances.getResourcesEnable().getValue()) { throw new BusException(Status.RESOURCE_OSS_CONFIGURATION_ERROR); diff --git a/dinky-function/src/main/java/org/apache/flink/table/catalog/FunctionLanguage.java b/dinky-common/src/main/java/org/apache/flink/table/catalog/FunctionLanguage.java similarity index 92% rename from dinky-function/src/main/java/org/apache/flink/table/catalog/FunctionLanguage.java rename to dinky-common/src/main/java/org/apache/flink/table/catalog/FunctionLanguage.java index 22aadb01c1..e91bcaa0d9 100644 --- a/dinky-function/src/main/java/org/apache/flink/table/catalog/FunctionLanguage.java +++ b/dinky-common/src/main/java/org/apache/flink/table/catalog/FunctionLanguage.java @@ -19,7 +19,6 @@ package org.apache.flink.table.catalog; -/** Categorizes the language semantics of a {@link CatalogFunction}. */ public enum FunctionLanguage { JAVA, diff --git a/dinky-core/src/main/java/org/dinky/api/FlinkAPI.java b/dinky-common/src/main/java/org/dinky/api/FlinkAPI.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/api/FlinkAPI.java rename to dinky-common/src/main/java/org/dinky/api/FlinkAPI.java diff --git a/dinky-core/src/main/java/org/dinky/cluster/FlinkClusterInfo.java b/dinky-common/src/main/java/org/dinky/cluster/FlinkClusterInfo.java similarity index 95% rename from dinky-core/src/main/java/org/dinky/cluster/FlinkClusterInfo.java rename to dinky-common/src/main/java/org/dinky/cluster/FlinkClusterInfo.java index 9e24abe2bd..084be8cfa8 100644 --- a/dinky-core/src/main/java/org/dinky/cluster/FlinkClusterInfo.java +++ b/dinky-common/src/main/java/org/dinky/cluster/FlinkClusterInfo.java @@ -19,6 +19,8 @@ package org.dinky.cluster; +import java.io.Serializable; + import lombok.Getter; import lombok.Setter; @@ -29,7 +31,7 @@ */ @Getter @Setter -public class FlinkClusterInfo { +public class FlinkClusterInfo implements Serializable { private boolean isEffective; private String jobManagerAddress; diff --git a/dinky-core/src/main/java/org/dinky/constant/FlinkSQLConstant.java b/dinky-common/src/main/java/org/dinky/constant/FlinkSQLConstant.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/constant/FlinkSQLConstant.java rename to dinky-common/src/main/java/org/dinky/constant/FlinkSQLConstant.java diff --git a/dinky-core/src/main/java/org/dinky/data/constant/FlinkHistoryConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/FlinkHistoryConstant.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/data/constant/FlinkHistoryConstant.java rename to dinky-common/src/main/java/org/dinky/data/constant/FlinkHistoryConstant.java diff --git a/dinky-core/src/main/java/org/dinky/data/constant/FlinkRestAPIConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/FlinkRestAPIConstant.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/data/constant/FlinkRestAPIConstant.java rename to dinky-common/src/main/java/org/dinky/data/constant/FlinkRestAPIConstant.java diff --git a/dinky-core/src/main/java/org/dinky/data/constant/FlinkRestResultConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/FlinkRestResultConstant.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/data/constant/FlinkRestResultConstant.java rename to dinky-common/src/main/java/org/dinky/data/constant/FlinkRestResultConstant.java diff --git a/dinky-common/src/main/java/org/dinky/data/model/Configuration.java b/dinky-common/src/main/java/org/dinky/data/model/Configuration.java index fe3892fb28..89a238f10b 100644 --- a/dinky-common/src/main/java/org/dinky/data/model/Configuration.java +++ b/dinky-common/src/main/java/org/dinky/data/model/Configuration.java @@ -77,11 +77,15 @@ public Configuration note(Status status) { } public void setValue(Object value) { - if (getType() == Enum.class) { - this.value = (T) EnumUtil.fromString((Class) type, (String) value); - return; + try { + if (getType() == Enum.class) { + this.value = (T) EnumUtil.fromString((Class) type, (String) value); + return; + } + this.value = type.isInstance(value) ? (T) value : Convert.convert(getType(), value); + } catch (Exception e) { + System.out.println("Configuration.setValue, this" + this + ", value: " + value); } - this.value = type.isInstance(value) ? (T) value : Convert.convert(getType(), value); } public Configuration desensitizedHandler(Function desensitizedHandler) { @@ -125,7 +129,7 @@ public static OptionBuilder key(String key) { return new OptionBuilder(key); } - public static class OptionBuilder { + public static class OptionBuilder implements Serializable { private final String key; diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/FunctionResult.java b/dinky-common/src/main/java/org/dinky/data/model/FunctionResult.java similarity index 100% rename from dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/FunctionResult.java rename to dinky-common/src/main/java/org/dinky/data/model/FunctionResult.java diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java b/dinky-common/src/main/java/org/dinky/data/model/LineageRel.java similarity index 100% rename from dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java rename to dinky-common/src/main/java/org/dinky/data/model/LineageRel.java diff --git a/dinky-common/src/main/java/org/dinky/data/model/ResourcesVO.java b/dinky-common/src/main/java/org/dinky/data/model/ResourcesVO.java index e7644f600f..ed99b32e51 100644 --- a/dinky-common/src/main/java/org/dinky/data/model/ResourcesVO.java +++ b/dinky-common/src/main/java/org/dinky/data/model/ResourcesVO.java @@ -19,6 +19,7 @@ package org.dinky.data.model; +import java.io.Serializable; import java.time.LocalDateTime; import com.fasterxml.jackson.annotation.JsonFormat; @@ -35,7 +36,7 @@ @Builder @NoArgsConstructor @AllArgsConstructor -public class ResourcesVO { +public class ResourcesVO implements Serializable { @ApiModelProperty(value = "ID", dataType = "Integer", example = "1", notes = "Unique identifier for the resource") private Integer id; diff --git a/dinky-common/src/main/java/org/dinky/data/model/SystemConfiguration.java b/dinky-common/src/main/java/org/dinky/data/model/SystemConfiguration.java index 62a1af8f17..3ebe757435 100644 --- a/dinky-common/src/main/java/org/dinky/data/model/SystemConfiguration.java +++ b/dinky-common/src/main/java/org/dinky/data/model/SystemConfiguration.java @@ -26,8 +26,10 @@ import org.dinky.data.enums.TaskOwnerLockStrategyEnum; import org.dinky.data.properties.OssProperties; +import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; @@ -47,7 +49,7 @@ * @since 2021/11/18 */ @Getter -public class SystemConfiguration { +public class SystemConfiguration implements Serializable { private static final SystemConfiguration systemConfiguration = new SystemConfiguration(); @@ -61,10 +63,19 @@ public static Configuration.OptionBuilder key(Status status) { return new Configuration.OptionBuilder(status.getKey()); } - private static final List> CONFIGURATION_LIST = Arrays.stream( - ReflectUtil.getFields(SystemConfiguration.class, f -> f.getType() == Configuration.class)) - .map(f -> (Configuration) ReflectUtil.getFieldValue(systemConfiguration, f)) - .collect(Collectors.toList()); + private static final List> CONFIGURATION_LIST = getConfigurationList(); + + private static List> getConfigurationList() { + return Arrays.stream(ReflectUtil.getFields(SystemConfiguration.class, f -> f.getType() == Configuration.class)) + .map(f -> (Configuration) ReflectUtil.getFieldValue(systemConfiguration, f)) + .collect(Collectors.toList()); + } + + private List> getThisConfigurationList() { + return Arrays.stream(ReflectUtil.getFields(SystemConfiguration.class, f -> f.getType() == Configuration.class)) + .map(f -> (Configuration) ReflectUtil.getFieldValue(this, f)) + .collect(Collectors.toList()); + } private final Configuration useRestAPI = key(Status.SYS_FLINK_SETTINGS_USERESTAPI) .booleanType() @@ -337,6 +348,14 @@ public void initSetConfiguration(Map configMap) { CONFIGURATION_LIST.stream().peek(Configuration::runParameterCheck).forEach(Configuration::runChangeEvent); } + public void copyTo(SystemConfiguration other) { + Map configMap = new HashMap<>(); + for (Configuration config : getThisConfigurationList()) { + configMap.put(config.getKey(), String.valueOf(config.getValue())); + } + other.initSetConfiguration(configMap); + } + public void initExpressionVariableList(Map configMap) { CONFIGURATION_LIST.forEach(item -> { if (item.getKey().equals(expressionVariable.getKey())) { diff --git a/dinky-core/src/main/java/org/dinky/data/result/DDLResult.java b/dinky-common/src/main/java/org/dinky/data/result/DDLResult.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/data/result/DDLResult.java rename to dinky-common/src/main/java/org/dinky/data/result/DDLResult.java diff --git a/dinky-core/src/main/java/org/dinky/data/result/ResultPool.java b/dinky-common/src/main/java/org/dinky/data/result/ResultPool.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/data/result/ResultPool.java rename to dinky-common/src/main/java/org/dinky/data/result/ResultPool.java diff --git a/dinky-core/src/main/java/org/dinky/data/result/SelectResult.java b/dinky-common/src/main/java/org/dinky/data/result/SelectResult.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/data/result/SelectResult.java rename to dinky-common/src/main/java/org/dinky/data/result/SelectResult.java diff --git a/dinky-core/src/main/java/org/dinky/executor/ExecutorConfig.java b/dinky-common/src/main/java/org/dinky/executor/ExecutorConfig.java similarity index 99% rename from dinky-core/src/main/java/org/dinky/executor/ExecutorConfig.java rename to dinky-common/src/main/java/org/dinky/executor/ExecutorConfig.java index 64471d44e8..c3ae51f8bc 100644 --- a/dinky-core/src/main/java/org/dinky/executor/ExecutorConfig.java +++ b/dinky-common/src/main/java/org/dinky/executor/ExecutorConfig.java @@ -22,6 +22,7 @@ import org.dinky.assertion.Asserts; import org.dinky.data.enums.GatewayType; +import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -50,7 +51,7 @@ @Builder @AllArgsConstructor @ApiModel(value = "ExecutorConfig", description = "Executor config for a job") -public class ExecutorConfig { +public class ExecutorConfig implements Serializable { private static final Logger log = LoggerFactory.getLogger(ExecutorConfig.class); private static final ObjectMapper mapper = new ObjectMapper(); diff --git a/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageColumn.java b/dinky-common/src/main/java/org/dinky/explainer/lineage/LineageColumn.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/explainer/lineage/LineageColumn.java rename to dinky-common/src/main/java/org/dinky/explainer/lineage/LineageColumn.java diff --git a/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageRelation.java b/dinky-common/src/main/java/org/dinky/explainer/lineage/LineageRelation.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/explainer/lineage/LineageRelation.java rename to dinky-common/src/main/java/org/dinky/explainer/lineage/LineageRelation.java diff --git a/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageResult.java b/dinky-common/src/main/java/org/dinky/explainer/lineage/LineageResult.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/explainer/lineage/LineageResult.java rename to dinky-common/src/main/java/org/dinky/explainer/lineage/LineageResult.java diff --git a/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageTable.java b/dinky-common/src/main/java/org/dinky/explainer/lineage/LineageTable.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/explainer/lineage/LineageTable.java rename to dinky-common/src/main/java/org/dinky/explainer/lineage/LineageTable.java diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java b/dinky-common/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java similarity index 100% rename from dinky-function/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java rename to dinky-common/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java diff --git a/dinky-function/src/main/java/org/dinky/function/constant/PathConstant.java b/dinky-common/src/main/java/org/dinky/function/constant/PathConstant.java similarity index 100% rename from dinky-function/src/main/java/org/dinky/function/constant/PathConstant.java rename to dinky-common/src/main/java/org/dinky/function/constant/PathConstant.java diff --git a/dinky-function/src/main/java/org/dinky/function/data/model/UDF.java b/dinky-common/src/main/java/org/dinky/function/data/model/UDF.java similarity index 100% rename from dinky-function/src/main/java/org/dinky/function/data/model/UDF.java rename to dinky-common/src/main/java/org/dinky/function/data/model/UDF.java diff --git a/dinky-function/src/main/java/org/dinky/function/data/model/UDFPath.java b/dinky-common/src/main/java/org/dinky/function/data/model/UDFPath.java similarity index 100% rename from dinky-function/src/main/java/org/dinky/function/data/model/UDFPath.java rename to dinky-common/src/main/java/org/dinky/function/data/model/UDFPath.java diff --git a/dinky-function/src/main/java/org/dinky/function/util/ZipWriter.java b/dinky-common/src/main/java/org/dinky/function/util/ZipWriter.java similarity index 100% rename from dinky-function/src/main/java/org/dinky/function/util/ZipWriter.java rename to dinky-common/src/main/java/org/dinky/function/util/ZipWriter.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/config/AppConfig.java b/dinky-common/src/main/java/org/dinky/gateway/config/AppConfig.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/config/AppConfig.java rename to dinky-common/src/main/java/org/dinky/gateway/config/AppConfig.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/config/ClusterConfig.java b/dinky-common/src/main/java/org/dinky/gateway/config/ClusterConfig.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/config/ClusterConfig.java rename to dinky-common/src/main/java/org/dinky/gateway/config/ClusterConfig.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/config/FlinkConfig.java b/dinky-common/src/main/java/org/dinky/gateway/config/FlinkConfig.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/config/FlinkConfig.java rename to dinky-common/src/main/java/org/dinky/gateway/config/FlinkConfig.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/config/GatewayConfig.java b/dinky-common/src/main/java/org/dinky/gateway/config/GatewayConfig.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/config/GatewayConfig.java rename to dinky-common/src/main/java/org/dinky/gateway/config/GatewayConfig.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/config/K8sConfig.java b/dinky-common/src/main/java/org/dinky/gateway/config/K8sConfig.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/config/K8sConfig.java rename to dinky-common/src/main/java/org/dinky/gateway/config/K8sConfig.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/enums/ActionType.java b/dinky-common/src/main/java/org/dinky/gateway/enums/ActionType.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/enums/ActionType.java rename to dinky-common/src/main/java/org/dinky/gateway/enums/ActionType.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/enums/SavePointStrategy.java b/dinky-common/src/main/java/org/dinky/gateway/enums/SavePointStrategy.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/enums/SavePointStrategy.java rename to dinky-common/src/main/java/org/dinky/gateway/enums/SavePointStrategy.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/enums/SavePointType.java b/dinky-common/src/main/java/org/dinky/gateway/enums/SavePointType.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/enums/SavePointType.java rename to dinky-common/src/main/java/org/dinky/gateway/enums/SavePointType.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/model/FlinkClusterConfig.java b/dinky-common/src/main/java/org/dinky/gateway/model/FlinkClusterConfig.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/model/FlinkClusterConfig.java rename to dinky-common/src/main/java/org/dinky/gateway/model/FlinkClusterConfig.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/model/JobInfo.java b/dinky-common/src/main/java/org/dinky/gateway/model/JobInfo.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/model/JobInfo.java rename to dinky-common/src/main/java/org/dinky/gateway/model/JobInfo.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/result/AbstractGatewayResult.java b/dinky-common/src/main/java/org/dinky/gateway/result/AbstractGatewayResult.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/result/AbstractGatewayResult.java rename to dinky-common/src/main/java/org/dinky/gateway/result/AbstractGatewayResult.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/result/GatewayResult.java b/dinky-common/src/main/java/org/dinky/gateway/result/GatewayResult.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/result/GatewayResult.java rename to dinky-common/src/main/java/org/dinky/gateway/result/GatewayResult.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/result/SavePointResult.java b/dinky-common/src/main/java/org/dinky/gateway/result/SavePointResult.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/result/SavePointResult.java rename to dinky-common/src/main/java/org/dinky/gateway/result/SavePointResult.java diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/result/TestResult.java b/dinky-common/src/main/java/org/dinky/gateway/result/TestResult.java similarity index 100% rename from dinky-gateway/src/main/java/org/dinky/gateway/result/TestResult.java rename to dinky-common/src/main/java/org/dinky/gateway/result/TestResult.java diff --git a/dinky-core/src/main/java/org/dinky/job/Job.java b/dinky-common/src/main/java/org/dinky/job/Job.java similarity index 93% rename from dinky-core/src/main/java/org/dinky/job/Job.java rename to dinky-common/src/main/java/org/dinky/job/Job.java index e352e6d8cc..455dbe06a1 100644 --- a/dinky-core/src/main/java/org/dinky/job/Job.java +++ b/dinky-common/src/main/java/org/dinky/job/Job.java @@ -21,9 +21,9 @@ import org.dinky.data.enums.GatewayType; import org.dinky.data.result.IResult; -import org.dinky.executor.Executor; import org.dinky.executor.ExecutorConfig; +import java.io.Serializable; import java.time.LocalDateTime; import java.util.List; @@ -35,9 +35,9 @@ * * @since 2021/6/26 23:39 */ -@Getter @Setter -public class Job { +@Getter +public class Job implements Serializable { private Integer id; private Integer jobInstanceId; private JobConfig jobConfig; @@ -51,10 +51,11 @@ public class Job { private ExecutorConfig executorConfig; private LocalDateTime startTime; private LocalDateTime endTime; - private Executor executor; private boolean useGateway; private List jids; + public Job() {} + @Getter public enum JobStatus { INITIALIZE(0), @@ -75,7 +76,6 @@ public Job( JobStatus status, String statement, ExecutorConfig executorConfig, - Executor executor, boolean useGateway) { this.jobConfig = jobConfig; this.type = type; @@ -83,7 +83,6 @@ public Job( this.statement = statement; this.executorConfig = executorConfig; this.startTime = LocalDateTime.now(); - this.executor = executor; this.useGateway = useGateway; } @@ -91,10 +90,9 @@ public static Job build( GatewayType type, JobConfig jobConfig, ExecutorConfig executorConfig, - Executor executor, String statement, boolean useGateway) { - Job job = new Job(jobConfig, type, JobStatus.INITIALIZE, statement, executorConfig, executor, useGateway); + Job job = new Job(jobConfig, type, JobStatus.INITIALIZE, statement, executorConfig, useGateway); if (!useGateway) { job.setJobManagerAddress(executorConfig.getJobManagerAddress()); } diff --git a/dinky-core/src/main/java/org/dinky/job/JobConfig.java b/dinky-common/src/main/java/org/dinky/job/JobConfig.java similarity index 91% rename from dinky-core/src/main/java/org/dinky/job/JobConfig.java rename to dinky-common/src/main/java/org/dinky/job/JobConfig.java index 54dd8ff735..3c0489a0bd 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobConfig.java +++ b/dinky-common/src/main/java/org/dinky/job/JobConfig.java @@ -23,16 +23,14 @@ import org.dinky.data.constant.NetConstant; import org.dinky.data.enums.GatewayType; import org.dinky.data.model.CustomConfig; +import org.dinky.data.model.SystemConfiguration; import org.dinky.executor.ExecutorConfig; import org.dinky.gateway.config.FlinkConfig; import org.dinky.gateway.config.GatewayConfig; import org.dinky.gateway.enums.SavePointStrategy; import org.dinky.gateway.model.FlinkClusterConfig; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.CoreOptions; -import org.apache.flink.configuration.RestOptions; - +import java.io.Serializable; import java.util.HashMap; import java.util.Map; @@ -52,7 +50,10 @@ @Builder @AllArgsConstructor @ApiModel(value = "JobConfig", description = "Configuration details of a job") -public class JobConfig { +public class JobConfig implements Serializable { + + private static final String REST_PORT = "rest.port"; + private static final String DEFAULT_PARAllELISM = "parallelism.default"; @ApiModelProperty( value = "Flink run mode", @@ -193,26 +194,26 @@ public class JobConfig { notes = "Map of variables") private Map variables; + private SystemConfiguration systemConfiguration; + public JobConfig() { this.configJson = new HashMap<>(); } public void setAddress(String address) { - if (GatewayType.LOCAL.equalsValue(type) - && Asserts.isNotNull(configJson) - && configJson.containsKey(RestOptions.PORT.key())) { + if (GatewayType.LOCAL.equalsValue(type) && Asserts.isNotNull(configJson) && configJson.containsKey(REST_PORT)) { int colonIndex = address.indexOf(':'); if (colonIndex == -1) { - this.address = address + NetConstant.COLON + configJson.get(RestOptions.PORT.key()); + this.address = address + NetConstant.COLON + configJson.get(REST_PORT); } else { - this.address = address.replaceAll("(?<=:)\\d{0,6}$", configJson.get(RestOptions.PORT.key())); + this.address = address.replaceAll("(?<=:)\\d{0,6}$", configJson.get(REST_PORT)); } } else { this.address = address; } } - public ExecutorConfig getExecutorSetting() { + public ExecutorConfig createExecutorSetting() { Map config = new HashMap<>(32); if (GatewayType.isDeployCluster(type) && gatewayConfig != null && gatewayConfig.getFlinkConfig() != null) { config.putAll(gatewayConfig.getFlinkConfig().getConfiguration()); @@ -243,7 +244,7 @@ public void buildGatewayConfig(FlinkClusterConfig config) { } // Load job configuration content afterwards flinkConfig.getConfiguration().putAll(getConfigJson()); - flinkConfig.getConfiguration().put(CoreOptions.DEFAULT_PARALLELISM.key(), String.valueOf(parallelism)); + flinkConfig.getConfiguration().put(DEFAULT_PARAllELISM, String.valueOf(parallelism)); flinkConfig.setJobName(getJobName()); gatewayConfig = GatewayConfig.build(config); @@ -260,13 +261,6 @@ public void addGatewayConfig(Map config) { } } - public void addGatewayConfig(Configuration config) { - if (Asserts.isNull(gatewayConfig)) { - gatewayConfig = new GatewayConfig(); - } - gatewayConfig.getFlinkConfig().getConfiguration().putAll(config.toMap()); - } - public boolean isUseRemote() { return useRemote || !GatewayType.LOCAL.equalsValue(type); } diff --git a/dinky-core/src/main/java/org/dinky/job/JobHandler.java b/dinky-common/src/main/java/org/dinky/job/JobHandler.java similarity index 94% rename from dinky-core/src/main/java/org/dinky/job/JobHandler.java rename to dinky-common/src/main/java/org/dinky/job/JobHandler.java index 46726ee6f6..200350ae22 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobHandler.java +++ b/dinky-common/src/main/java/org/dinky/job/JobHandler.java @@ -33,15 +33,11 @@ public interface JobHandler { boolean init(Job job); - boolean ready(); - boolean running(); - boolean success(); - - boolean failed(); + boolean success(Job job); - boolean callback(); + boolean failed(Job job); boolean close(); diff --git a/dinky-core/src/main/java/org/dinky/job/JobReadHandler.java b/dinky-common/src/main/java/org/dinky/job/JobReadHandler.java similarity index 100% rename from dinky-core/src/main/java/org/dinky/job/JobReadHandler.java rename to dinky-common/src/main/java/org/dinky/job/JobReadHandler.java diff --git a/dinky-core/src/main/java/org/dinky/job/JobResult.java b/dinky-common/src/main/java/org/dinky/job/JobResult.java similarity index 95% rename from dinky-core/src/main/java/org/dinky/job/JobResult.java rename to dinky-common/src/main/java/org/dinky/job/JobResult.java index 6f127a6c5d..45f44d1875 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobResult.java +++ b/dinky-common/src/main/java/org/dinky/job/JobResult.java @@ -22,6 +22,7 @@ import org.dinky.data.result.IResult; import org.dinky.metadata.result.JdbcSelectResult; +import java.io.Serializable; import java.time.LocalDateTime; import java.util.List; @@ -38,7 +39,7 @@ @Getter @Setter @ApiModel(value = "JobResult", description = "Result of a job execution") -public class JobResult { +public class JobResult implements Serializable { @ApiModelProperty(value = "Unique identifier for the job result", dataType = "Integer", example = "123") private Integer id; @@ -141,12 +142,4 @@ public JobResult( this.startTime = startTime; this.endTime = endTime; } - - public void setStartTimeNow() { - this.setStartTime(LocalDateTime.now()); - } - - public void setEndTimeNow() { - this.setEndTime(LocalDateTime.now()); - } } diff --git a/dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/config/DriverConfig.java b/dinky-common/src/main/java/org/dinky/metadata/config/DriverConfig.java similarity index 100% rename from dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/config/DriverConfig.java rename to dinky-common/src/main/java/org/dinky/metadata/config/DriverConfig.java diff --git a/dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/config/IConnectConfig.java b/dinky-common/src/main/java/org/dinky/metadata/config/IConnectConfig.java similarity index 100% rename from dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/config/IConnectConfig.java rename to dinky-common/src/main/java/org/dinky/metadata/config/IConnectConfig.java diff --git a/dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/result/JdbcSelectResult.java b/dinky-common/src/main/java/org/dinky/metadata/result/JdbcSelectResult.java similarity index 100% rename from dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/result/JdbcSelectResult.java rename to dinky-common/src/main/java/org/dinky/metadata/result/JdbcSelectResult.java diff --git a/dinky-common/src/main/java/org/dinky/remote/ServerExecutorService.java b/dinky-common/src/main/java/org/dinky/remote/ServerExecutorService.java new file mode 100644 index 0000000000..069d0ed47d --- /dev/null +++ b/dinky-common/src/main/java/org/dinky/remote/ServerExecutorService.java @@ -0,0 +1,150 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.remote; + +import org.dinky.cluster.FlinkClusterInfo; +import org.dinky.data.enums.JobStatus; +import org.dinky.data.model.Catalog; +import org.dinky.data.model.CheckPointReadTable; +import org.dinky.data.model.Column; +import org.dinky.data.model.ResourcesVO; +import org.dinky.data.model.Schema; +import org.dinky.data.model.SystemConfiguration; +import org.dinky.data.model.Table; +import org.dinky.data.result.ExplainResult; +import org.dinky.data.result.IResult; +import org.dinky.explainer.lineage.LineageResult; +import org.dinky.function.data.model.UDF; +import org.dinky.function.data.model.UDFPath; +import org.dinky.gateway.config.GatewayConfig; +import org.dinky.gateway.enums.SavePointType; +import org.dinky.gateway.result.GatewayResult; +import org.dinky.gateway.result.SavePointResult; +import org.dinky.job.Job; +import org.dinky.job.JobConfig; +import org.dinky.job.JobResult; +import org.dinky.metadata.config.DriverConfig; + +import java.rmi.Remote; +import java.rmi.RemoteException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import com.fasterxml.jackson.databind.node.ObjectNode; + +public interface ServerExecutorService extends Remote { + void init(JobConfig config, boolean isPlanMode) throws RemoteException; + + boolean close() throws RemoteException; + + ObjectNode getJarStreamGraphJson(String statement) throws RemoteException; + + JobResult executeJarSql(String statement) throws RemoteException; + + JobResult executeSql(String statement) throws RemoteException; + + IResult executeDDL(String statement) throws RemoteException; + + ExplainResult explainSql(String statement) throws RemoteException; + + ObjectNode getStreamGraph(String statement) throws RemoteException; + + String getJobPlanJson(String statement) throws RemoteException; + + boolean cancelNormal(String jobId) throws RemoteException; + + SavePointResult savepoint(String jobId, SavePointType savePointType, String savePoint, boolean isUseRestAPI) + throws RemoteException; + + String exportSql(String sql) throws RemoteException; + + Job getJob() throws RemoteException; + + void prepare(String statement) throws RemoteException; + + List getPythonUdfList(String udfFile) throws RemoteException; + + JobStatus getJobStatus(GatewayConfig gatewayConfig, String appId) throws RemoteException; + + void onJobGatewayFinishCallback(JobConfig jobConfig, String status) throws RemoteException; + + List getUdfClassNameByJarPath(String path) throws RemoteException; + + void putFile(String fullName, byte[] context) throws RemoteException; + + List getFullDirectoryStructure(int rootId) throws RemoteException; + + void rename(String path, String newPath) throws RemoteException; + + String getFileContent(String path) throws RemoteException; + + void updateGitPool(Map newPool) throws RemoteException; + + UDFPath initUDF(List udfClassList, Integer missionId) throws RemoteException; + + LineageResult getColumnLineageByLogicalPlan(String statement) throws RemoteException; + + LineageResult getSqlLineageByOne(String statement, String type) throws RemoteException; + + LineageResult getSqlLineage(String statement, String mysql, DriverConfig> driverConfig) + throws RemoteException; + + List getCatalog() throws RemoteException; + + void setSchemaInfo(String catalogName, String database, Schema schema, List
tables) throws RemoteException; + + List getColumnList(String catalogName, String database, String tableName) throws RemoteException; + + Map> readCheckpoint(String path, String operatorId) throws RemoteException; + + byte[] readFile(String path) throws RemoteException; + + Map> buildJar(List udfCodes) throws RemoteException; + + void buildRowPermission(ConcurrentHashMap permission) throws RemoteException; + + List getPrintTables(String statement) throws RemoteException; + + FlinkClusterInfo testFlinkJobManagerIP(String hosts, String host) throws RemoteException; + + void killCluster(GatewayConfig gatewayConfig) throws RemoteException; + + GatewayResult deployCluster(GatewayConfig gatewayConfig) throws RemoteException; + + void addOrUpdate(UDF udf) throws RemoteException; + + void removeUdfCodePool(String className) throws RemoteException; + + String templateParse(String dialect, String templateCode, String className) throws RemoteException; + + void registerPool(List collect) throws RemoteException; + + void initResourceManager(SystemConfiguration systemConfiguration) throws RemoteException; + + String getPyUDFAttr(String statement) throws RemoteException; + + String getScalaFullClassName(String statement) throws RemoteException; + + String getLatestJobManageHost(String appId, String oldJobManagerHost, GatewayConfig gatewayConfig) + throws RemoteException; + + List getCustomStaticUdfs() throws RemoteException; +} diff --git a/dinky-core/pom.xml b/dinky-core/pom.xml index 819603f2d5..15b90c8501 100644 --- a/dinky-core/pom.xml +++ b/dinky-core/pom.xml @@ -37,6 +37,15 @@ groovy 3.0.9 --> + + org.reflections + reflections + + + org.apache.commons + commons-math3 + 3.6 + org.dinky dinky-common @@ -203,10 +212,6 @@ junit-jupiter test - - org.reflections - reflections - org.slf4j slf4j-log4j12 diff --git a/dinky-core/src/main/java/org/dinky/executor/AbstractExecutor.java b/dinky-core/src/main/java/org/dinky/executor/AbstractExecutor.java new file mode 100644 index 0000000000..13374b9feb --- /dev/null +++ b/dinky-core/src/main/java/org/dinky/executor/AbstractExecutor.java @@ -0,0 +1,399 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.executor; + +import org.dinky.assertion.Asserts; +import org.dinky.classloader.DinkyClassLoader; +import org.dinky.context.CustomTableEnvironmentContext; +import org.dinky.context.FlinkUdfPathContextHolder; +import org.dinky.data.model.LineageRel; +import org.dinky.data.result.SqlExplainResult; +import org.dinky.interceptor.FlinkInterceptor; +import org.dinky.interceptor.FlinkInterceptorResult; +import org.dinky.job.JobParam; +import org.dinky.job.StatementParam; +import org.dinky.trans.dml.ExecuteJarOperation; +import org.dinky.utils.FlinkStreamEnvironmentUtil; +import org.dinky.utils.KerberosUtil; +import org.dinky.utils.URLUtils; + +import org.apache.flink.api.common.ExecutionConfig; +import org.apache.flink.api.common.JobExecutionResult; +import org.apache.flink.api.dag.Pipeline; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.PipelineOptions; +import org.apache.flink.core.execution.JobClient; +import org.apache.flink.core.fs.FileSystem; +import org.apache.flink.python.PythonOptions; +import org.apache.flink.runtime.jobgraph.JobGraph; +import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator; +import org.apache.flink.runtime.rest.messages.JobPlanInfo; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.graph.JSONGenerator; +import org.apache.flink.streaming.api.graph.StreamGraph; +import org.apache.flink.table.api.ExplainDetail; +import org.apache.flink.table.api.StatementSet; +import org.apache.flink.table.api.TableConfig; +import org.apache.flink.table.api.TableResult; + +import java.io.File; +import java.net.URL; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import cn.hutool.core.collection.CollUtil; +import cn.hutool.core.util.ReflectUtil; +import cn.hutool.core.util.URLUtil; +import lombok.extern.slf4j.Slf4j; + +/** + * Executor + * + * @since 2021/11/17 + */ +@Slf4j +public abstract class AbstractExecutor implements Executor { + + private static final Logger logger = LoggerFactory.getLogger(AbstractExecutor.class); + + // Flink stream execution environment, batch model also use it. + protected StreamExecutionEnvironment environment; + + // Dinky table environment. + protected CustomTableEnvironment tableEnvironment; + + // The config of Dinky executor. + protected ExecutorConfig executorConfig; + + protected DinkyClassLoader dinkyClassLoader = DinkyClassLoader.build(); + + // Flink configuration, such as set rest.port = 8086 + protected Map setConfig = new HashMap<>(); + + // Dinky variable manager + protected VariableManager variableManager = new VariableManager(); + + // return dinkyClassLoader + @Override + public DinkyClassLoader getDinkyClassLoader() { + return dinkyClassLoader; + } + + @Override + public VariableManager getVariableManager() { + return variableManager; + } + + @Override + public boolean isUseSqlFragment() { + return executorConfig.isUseSqlFragment(); + } + + @Override + public ExecutionConfig getExecutionConfig() { + return environment.getConfig(); + } + + @Override + public StreamExecutionEnvironment getStreamExecutionEnvironment() { + return environment; + } + + @Override + public void setStreamExecutionEnvironment(StreamExecutionEnvironment environment) { + this.environment = environment; + } + + @Override + public CustomTableEnvironment getCustomTableEnvironment() { + return tableEnvironment; + } + + @Override + public ExecutorConfig getExecutorConfig() { + return executorConfig; + } + + @Override + public Map getSetConfig() { + return setConfig; + } + + @Override + public TableConfig getTableConfig() { + return tableEnvironment.getConfig(); + } + + @Override + public String getTimeZone() { + return getTableConfig().getLocalTimeZone().getId(); + } + + private void initClassloader(DinkyClassLoader classLoader) { + if (classLoader != null) { + try { + StreamExecutionEnvironment env = this.environment; + // Fix the Classloader in the env above to appClassLoader, causing ckp to fail to compile + ReflectUtil.setFieldValue(env, "userClassloader", classLoader); + env.configure(env.getConfiguration(), classLoader); + } catch (Throwable e) { + log.warn( + "The version of flink does not have a Classloader field and the classloader cannot be set.", e); + } + } + } + + protected void init() { + initClassloader(getDinkyClassLoader()); + if (executorConfig.isValidParallelism()) { + environment.setParallelism(executorConfig.getParallelism()); + } + + tableEnvironment = createCustomTableEnvironment(getDinkyClassLoader()); + CustomTableEnvironmentContext.set(tableEnvironment); + + Configuration configuration = tableEnvironment.getConfig().getConfiguration(); + if (executorConfig.isValidJobName()) { + configuration.setString(PipelineOptions.NAME.key(), executorConfig.getJobName()); + setConfig.put(PipelineOptions.NAME.key(), executorConfig.getJobName()); + } + if (executorConfig.isValidConfig()) { + for (Map.Entry entry : executorConfig.getConfig().entrySet()) { + configuration.setString(entry.getKey(), entry.getValue()); + } + } + if (executorConfig.isValidVariables()) { + variableManager.registerVariable(executorConfig.getVariables()); + } + } + + abstract CustomTableEnvironment createCustomTableEnvironment(ClassLoader classLoader); + + @Override + public String pretreatStatement(String statement) { + return FlinkInterceptor.pretreatStatement(this, statement); + } + + private FlinkInterceptorResult pretreatExecute(String statement) { + return FlinkInterceptor.build(this, statement); + } + + @Override + public JobExecutionResult execute(String jobName) throws Exception { + return environment.execute(jobName); + } + + @Override + public JobClient executeAsync(String jobName) throws Exception { + return environment.executeAsync(jobName); + } + + @Override + public void initUDF(String... udfFilePath) { + List jarFiles = DinkyClassLoader.getJarFiles(udfFilePath, null); + getDinkyClassLoader().addURLs(jarFiles); + } + + @Override + public void initPyUDF(String executable, String... udfPyFilePath) { + if (udfPyFilePath == null || udfPyFilePath.length == 0) { + return; + } + + Configuration configuration = tableEnvironment.getConfig().getConfiguration(); + configuration.setString(PythonOptions.PYTHON_FILES, String.join(",", udfPyFilePath)); + configuration.setString(PythonOptions.PYTHON_CLIENT_EXECUTABLE, executable); + } + + private void addJar(String... jarPath) { + Configuration configuration = tableEnvironment.getRootConfiguration(); + List jars = configuration.get(PipelineOptions.JARS); + if (jars == null) { + tableEnvironment.addConfiguration(PipelineOptions.JARS, CollUtil.newArrayList(jarPath)); + } else { + CollUtil.addAll(jars, jarPath); + } + } + + @Override + public void addJar(File... jarPath) { + addJar(Arrays.stream(jarPath).map(URLUtil::getURL).map(URL::toString).toArray(String[]::new)); + } + + @Override + public TableResult executeSql(String statement) { + statement = pretreatStatement(statement); + FlinkInterceptorResult flinkInterceptorResult = pretreatExecute(statement); + if (Asserts.isNotNull(flinkInterceptorResult.getTableResult())) { + return flinkInterceptorResult.getTableResult(); + } + + if (flinkInterceptorResult.isNoExecute()) { + return CustomTableResultImpl.TABLE_RESULT_OK; + } + + KerberosUtil.authenticate(setConfig); + return tableEnvironment.executeSql(statement); + } + + @Override + public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) { + statement = pretreatStatement(statement); + if (Asserts.isNotNullString(statement) && !pretreatExecute(statement).isNoExecute()) { + return tableEnvironment.explainSqlRecord(statement, extraDetails); + } + return null; + } + + @Override + public String getJarStreamingPlanStringJson(String parameter) { + List allFileByAdd = getAllFileSet(); + Pipeline streamGraph = new ExecuteJarOperation(parameter).explain(getCustomTableEnvironment(), allFileByAdd); + return FlinkStreamEnvironmentUtil.getStreamingPlanAsJSON(streamGraph); + } + + @Override + public ObjectNode getStreamGraph(List statements) { + StreamGraph streamGraph = tableEnvironment.getStreamGraphFromInserts(statements); + return getStreamGraphJsonNode(streamGraph); + } + + @Override + public List getAllFileSet() { + return CollUtil.isEmpty(getUdfPathContextHolder().getAllFileSet()) + ? Collections.emptyList() + : Arrays.asList(URLUtils.getURLs( + getUdfPathContextHolder().getAllFileSet().toArray(new File[0]))); + } + + @Override + public FlinkUdfPathContextHolder getUdfPathContextHolder() { + return getDinkyClassLoader().getUdfPathContextHolder(); + } + + private ObjectNode getStreamGraphJsonNode(StreamGraph streamGraph) { + JSONGenerator jsonGenerator = new JSONGenerator(streamGraph); + String json = jsonGenerator.getJSON(); + ObjectMapper mapper = new ObjectMapper(); + ObjectNode objectNode = mapper.createObjectNode(); + try { + objectNode = (ObjectNode) mapper.readTree(json); + } catch (JsonProcessingException e) { + logger.error("Get stream graph json node error.", e); + } + + return objectNode; + } + + @Override + public StreamGraph getStreamGraph() { + return environment.getStreamGraph(); + } + + @Override + public ObjectNode getStreamGraphFromDataStream(List statements) { + statements.forEach(this::executeSql); + return getStreamGraphJsonNode(getStreamGraph()); + } + + @Override + public JobPlanInfo getJobPlanInfo(List statements) { + return tableEnvironment.getJobPlanInfo(statements); + } + + @Override + public JobPlanInfo getJobPlanInfoFromDataStream(List statements) { + statements.forEach(this::executeSql); + StreamGraph streamGraph = getStreamGraph(); + return new JobPlanInfo(JsonPlanGenerator.generatePlan(streamGraph.getJobGraph())); + } + + @Override + public JobGraph getJobGraphFromInserts(List statements) { + return tableEnvironment.getJobGraphFromInserts(statements); + } + + @Override + public TableResult executeStatementSet(List statements) { + StatementSet statementSet = tableEnvironment.createStatementSet(); + statements.forEach(statementSet::addInsertSql); + return statementSet.execute(); + } + + @Override + public String explainStatementSet(List statements) { + StatementSet statementSet = tableEnvironment.createStatementSet(); + statements.forEach(statementSet::addInsertSql); + return statementSet.explain(); + } + + @Override + public JobPlanInfo getJobPlanInfo(JobParam jobParam) { + jobParam.getDdl().forEach(statementParam -> executeSql(statementParam.getValue())); + + if (!jobParam.getTrans().isEmpty()) { + return getJobPlanInfo(jobParam.getTransStatement()); + } + + if (!jobParam.getExecute().isEmpty()) { + List dataStreamPlans = + jobParam.getExecute().stream().map(StatementParam::getValue).collect(Collectors.toList()); + return getJobPlanInfoFromDataStream(dataStreamPlans); + } + throw new RuntimeException("Creating job plan fails because this job doesn't contain an insert statement."); + } + + @Override + public String getJobPlanJson(JobParam jobParam) { + return getJobPlanInfo(jobParam).getJsonPlan(); + } + + @Override + public List getLineage(String statement) { + return tableEnvironment.getLineage(statement); + } + + @Override + public void initializeFileSystem() { + Configuration combinationConfig = getCombinationConfig(); + FileSystem.initialize(combinationConfig, null); + } + + private Configuration getCombinationConfig() { + CustomTableEnvironment cte = getCustomTableEnvironment(); + Configuration rootConfig = cte.getRootConfiguration(); + Configuration config = cte.getConfig().getConfiguration(); + Configuration combinationConfig = new Configuration(); + combinationConfig.addAll(rootConfig); + combinationConfig.addAll(config); + return combinationConfig; + } +} diff --git a/dinky-core/src/main/java/org/dinky/executor/AppBatchExecutor.java b/dinky-core/src/main/java/org/dinky/executor/AppBatchExecutor.java index 414b4fd961..ea32c78134 100644 --- a/dinky-core/src/main/java/org/dinky/executor/AppBatchExecutor.java +++ b/dinky-core/src/main/java/org/dinky/executor/AppBatchExecutor.java @@ -19,8 +19,6 @@ package org.dinky.executor; -import org.dinky.classloader.DinkyClassLoader; - import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; @@ -29,9 +27,9 @@ * * @since 2022/2/7 22:14 */ -public class AppBatchExecutor extends Executor { +public class AppBatchExecutor extends AbstractExecutor { - public AppBatchExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public AppBatchExecutor(ExecutorConfig executorConfig) { this.executorConfig = executorConfig; if (executorConfig.isValidConfig()) { Configuration configuration = Configuration.fromMap(executorConfig.getConfig()); @@ -39,7 +37,7 @@ public AppBatchExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoa } else { this.environment = StreamExecutionEnvironment.getExecutionEnvironment(); } - init(classLoader); + init(); } @Override diff --git a/dinky-core/src/main/java/org/dinky/executor/AppStreamExecutor.java b/dinky-core/src/main/java/org/dinky/executor/AppStreamExecutor.java index d7843e61db..573f7eb4fe 100644 --- a/dinky-core/src/main/java/org/dinky/executor/AppStreamExecutor.java +++ b/dinky-core/src/main/java/org/dinky/executor/AppStreamExecutor.java @@ -19,8 +19,6 @@ package org.dinky.executor; -import org.dinky.classloader.DinkyClassLoader; - import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; @@ -29,9 +27,9 @@ * * @since 2021/11/18 */ -public class AppStreamExecutor extends Executor { +public class AppStreamExecutor extends AbstractExecutor { - public AppStreamExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public AppStreamExecutor(ExecutorConfig executorConfig) { this.executorConfig = executorConfig; if (executorConfig.isValidConfig()) { Configuration configuration = Configuration.fromMap(executorConfig.getConfig()); @@ -39,7 +37,7 @@ public AppStreamExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLo } else { this.environment = StreamExecutionEnvironment.getExecutionEnvironment(); } - init(classLoader); + init(); } @Override diff --git a/dinky-core/src/main/java/org/dinky/executor/Executor.java b/dinky-core/src/main/java/org/dinky/executor/Executor.java index e4febf0782..d376a8b803 100644 --- a/dinky-core/src/main/java/org/dinky/executor/Executor.java +++ b/dinky-core/src/main/java/org/dinky/executor/Executor.java @@ -19,288 +19,97 @@ package org.dinky.executor; -import org.dinky.assertion.Asserts; import org.dinky.classloader.DinkyClassLoader; -import org.dinky.context.CustomTableEnvironmentContext; +import org.dinky.context.FlinkUdfPathContextHolder; import org.dinky.data.model.LineageRel; import org.dinky.data.result.SqlExplainResult; -import org.dinky.interceptor.FlinkInterceptor; -import org.dinky.interceptor.FlinkInterceptorResult; -import org.dinky.utils.KerberosUtil; +import org.dinky.job.JobParam; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobExecutionResult; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.PipelineOptions; import org.apache.flink.core.execution.JobClient; -import org.apache.flink.python.PythonOptions; import org.apache.flink.runtime.jobgraph.JobGraph; -import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator; import org.apache.flink.runtime.rest.messages.JobPlanInfo; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.streaming.api.graph.JSONGenerator; import org.apache.flink.streaming.api.graph.StreamGraph; import org.apache.flink.table.api.ExplainDetail; -import org.apache.flink.table.api.StatementSet; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableResult; import java.io.File; import java.net.URL; -import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; -import cn.hutool.core.collection.CollUtil; -import cn.hutool.core.util.ReflectUtil; -import cn.hutool.core.util.URLUtil; -import lombok.extern.slf4j.Slf4j; +public interface Executor { + // return dinkyClassLoader + DinkyClassLoader getDinkyClassLoader(); -/** - * Executor - * - * @since 2021/11/17 - */ -@Slf4j -public abstract class Executor { + VariableManager getVariableManager(); - private static final Logger logger = LoggerFactory.getLogger(Executor.class); + boolean isUseSqlFragment(); - // Flink stream execution environment, batch model also use it. - protected StreamExecutionEnvironment environment; + ExecutionConfig getExecutionConfig(); - // Dinky table environment. - protected CustomTableEnvironment tableEnvironment; + StreamExecutionEnvironment getStreamExecutionEnvironment(); - // The config of Dinky executor. - protected ExecutorConfig executorConfig; + void setStreamExecutionEnvironment(StreamExecutionEnvironment environment); - protected DinkyClassLoader dinkyClassLoader; + CustomTableEnvironment getCustomTableEnvironment(); - // Flink configuration, such as set rest.port = 8086 - protected Map setConfig = new HashMap<>(); + ExecutorConfig getExecutorConfig(); - // Dinky variable manager - protected VariableManager variableManager = new VariableManager(); + Map getSetConfig(); - // return dinkyClassLoader - public DinkyClassLoader getDinkyClassLoader() { - return dinkyClassLoader; - } - - public VariableManager getVariableManager() { - return variableManager; - } - - public boolean isUseSqlFragment() { - return executorConfig.isUseSqlFragment(); - } - - public ExecutionConfig getExecutionConfig() { - return environment.getConfig(); - } - - public StreamExecutionEnvironment getStreamExecutionEnvironment() { - return environment; - } - - public void setStreamExecutionEnvironment(StreamExecutionEnvironment environment) { - this.environment = environment; - } - - public CustomTableEnvironment getCustomTableEnvironment() { - return tableEnvironment; - } - - public ExecutorConfig getExecutorConfig() { - return executorConfig; - } - - public Map getSetConfig() { - return setConfig; - } - - public TableConfig getTableConfig() { - return tableEnvironment.getConfig(); - } - - public String getTimeZone() { - return getTableConfig().getLocalTimeZone().getId(); - } - - private void initClassloader(DinkyClassLoader classLoader) { - if (classLoader != null) { - try { - StreamExecutionEnvironment env = this.environment; - // Fix the Classloader in the env above to appClassLoader, causing ckp to fail to compile - ReflectUtil.setFieldValue(env, "userClassloader", classLoader); - env.configure(env.getConfiguration(), classLoader); - } catch (Throwable e) { - log.warn( - "The version of flink does not have a Classloader field and the classloader cannot be set.", e); - } - } - } - - protected void init(DinkyClassLoader classLoader) { - initClassloader(classLoader); - this.dinkyClassLoader = classLoader; - Thread.currentThread().setContextClassLoader(classLoader); - if (executorConfig.isValidParallelism()) { - environment.setParallelism(executorConfig.getParallelism()); - } - - tableEnvironment = createCustomTableEnvironment(classLoader); - CustomTableEnvironmentContext.set(tableEnvironment); - - Configuration configuration = tableEnvironment.getConfig().getConfiguration(); - if (executorConfig.isValidJobName()) { - configuration.setString(PipelineOptions.NAME.key(), executorConfig.getJobName()); - setConfig.put(PipelineOptions.NAME.key(), executorConfig.getJobName()); - } - if (executorConfig.isValidConfig()) { - for (Map.Entry entry : executorConfig.getConfig().entrySet()) { - configuration.setString(entry.getKey(), entry.getValue()); - } - } - if (executorConfig.isValidVariables()) { - variableManager.registerVariable(executorConfig.getVariables()); - } - } - - abstract CustomTableEnvironment createCustomTableEnvironment(ClassLoader classLoader); - - public String pretreatStatement(String statement) { - return FlinkInterceptor.pretreatStatement(this, statement); - } - - private FlinkInterceptorResult pretreatExecute(String statement) { - return FlinkInterceptor.build(this, statement); - } - - public JobExecutionResult execute(String jobName) throws Exception { - return environment.execute(jobName); - } - - public JobClient executeAsync(String jobName) throws Exception { - return environment.executeAsync(jobName); - } - - public TableResult executeSql(String statement) { - statement = pretreatStatement(statement); - FlinkInterceptorResult flinkInterceptorResult = pretreatExecute(statement); - if (Asserts.isNotNull(flinkInterceptorResult.getTableResult())) { - return flinkInterceptorResult.getTableResult(); - } - - if (flinkInterceptorResult.isNoExecute()) { - return CustomTableResultImpl.TABLE_RESULT_OK; - } - - KerberosUtil.authenticate(setConfig); - return tableEnvironment.executeSql(statement); - } - - public void initUDF(String... udfFilePath) { - List jarFiles = DinkyClassLoader.getJarFiles(udfFilePath, null); - dinkyClassLoader.addURLs(jarFiles); - } - - public void initPyUDF(String executable, String... udfPyFilePath) { - if (udfPyFilePath == null || udfPyFilePath.length == 0) { - return; - } - - Configuration configuration = tableEnvironment.getConfig().getConfiguration(); - configuration.setString(PythonOptions.PYTHON_FILES, String.join(",", udfPyFilePath)); - configuration.setString(PythonOptions.PYTHON_CLIENT_EXECUTABLE, executable); - } - - private void addJar(String... jarPath) { - Configuration configuration = tableEnvironment.getRootConfiguration(); - List jars = configuration.get(PipelineOptions.JARS); - if (jars == null) { - tableEnvironment.addConfiguration(PipelineOptions.JARS, CollUtil.newArrayList(jarPath)); - } else { - CollUtil.addAll(jars, jarPath); - } - } - - public void addJar(File... jarPath) { - addJar(Arrays.stream(jarPath).map(URLUtil::getURL).map(URL::toString).toArray(String[]::new)); - } - - public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) { - statement = pretreatStatement(statement); - if (Asserts.isNotNullString(statement) && !pretreatExecute(statement).isNoExecute()) { - return tableEnvironment.explainSqlRecord(statement, extraDetails); - } - return null; - } - - public ObjectNode getStreamGraph(List statements) { - StreamGraph streamGraph = tableEnvironment.getStreamGraphFromInserts(statements); - return getStreamGraphJsonNode(streamGraph); - } - - private ObjectNode getStreamGraphJsonNode(StreamGraph streamGraph) { - JSONGenerator jsonGenerator = new JSONGenerator(streamGraph); - String json = jsonGenerator.getJSON(); - ObjectMapper mapper = new ObjectMapper(); - ObjectNode objectNode = mapper.createObjectNode(); - try { - objectNode = (ObjectNode) mapper.readTree(json); - } catch (JsonProcessingException e) { - logger.error("Get stream graph json node error.", e); - } - - return objectNode; - } - - public StreamGraph getStreamGraph() { - return environment.getStreamGraph(); - } - - public ObjectNode getStreamGraphFromDataStream(List statements) { - statements.forEach(this::executeSql); - return getStreamGraphJsonNode(getStreamGraph()); - } - - public JobPlanInfo getJobPlanInfo(List statements) { - return tableEnvironment.getJobPlanInfo(statements); - } - - public JobPlanInfo getJobPlanInfoFromDataStream(List statements) { - statements.forEach(this::executeSql); - StreamGraph streamGraph = getStreamGraph(); - return new JobPlanInfo(JsonPlanGenerator.generatePlan(streamGraph.getJobGraph())); - } - - public JobGraph getJobGraphFromInserts(List statements) { - return tableEnvironment.getJobGraphFromInserts(statements); - } - - public TableResult executeStatementSet(List statements) { - StatementSet statementSet = tableEnvironment.createStatementSet(); - statements.forEach(statementSet::addInsertSql); - return statementSet.execute(); - } - - public String explainStatementSet(List statements) { - StatementSet statementSet = tableEnvironment.createStatementSet(); - statements.forEach(statementSet::addInsertSql); - return statementSet.explain(); - } - - public List getLineage(String statement) { - return tableEnvironment.getLineage(statement); - } + TableConfig getTableConfig(); + + String getTimeZone(); + + String pretreatStatement(String statement); + + JobExecutionResult execute(String jobName) throws Exception; + + JobClient executeAsync(String jobName) throws Exception; + + TableResult executeSql(String statement); + + void initUDF(String... udfFilePath); + + void initPyUDF(String executable, String... udfPyFilePath); + + void addJar(File... jarPath); + + SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails); + + String getJarStreamingPlanStringJson(String parameter); + + ObjectNode getStreamGraph(List statements); + + List getAllFileSet(); + + FlinkUdfPathContextHolder getUdfPathContextHolder(); + + StreamGraph getStreamGraph(); + + ObjectNode getStreamGraphFromDataStream(List statements); + + JobPlanInfo getJobPlanInfo(List statements); + + JobPlanInfo getJobPlanInfoFromDataStream(List statements); + + JobGraph getJobGraphFromInserts(List statements); + + TableResult executeStatementSet(List statements); + + String explainStatementSet(List statements); + + JobPlanInfo getJobPlanInfo(JobParam jobParam); + + String getJobPlanJson(JobParam jobParam); + + void initializeFileSystem(); + + List getLineage(String statement); } diff --git a/dinky-core/src/main/java/org/dinky/executor/ExecutorContext.java b/dinky-core/src/main/java/org/dinky/executor/ExecutorContext.java new file mode 100644 index 0000000000..00682fe64c --- /dev/null +++ b/dinky-core/src/main/java/org/dinky/executor/ExecutorContext.java @@ -0,0 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.executor; + +public class ExecutorContext { + Executor executor; +} diff --git a/dinky-core/src/main/java/org/dinky/executor/ExecutorFactory.java b/dinky-core/src/main/java/org/dinky/executor/ExecutorFactory.java index 237b462996..a6d58d85fd 100644 --- a/dinky-core/src/main/java/org/dinky/executor/ExecutorFactory.java +++ b/dinky-core/src/main/java/org/dinky/executor/ExecutorFactory.java @@ -19,10 +19,6 @@ package org.dinky.executor; -import org.dinky.classloader.DinkyClassLoader; - -import java.lang.ref.WeakReference; - /** * ExecutorFactory * @@ -34,38 +30,38 @@ public final class ExecutorFactory { private ExecutorFactory() {} public static Executor getDefaultExecutor() { - return new LocalStreamExecutor(ExecutorConfig.DEFAULT, new WeakReference<>(DinkyClassLoader.build()).get()); + return new LocalStreamExecutor(ExecutorConfig.DEFAULT); } - public static Executor buildExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public static Executor buildExecutor(ExecutorConfig executorConfig) { if (executorConfig.isRemote()) { - return buildRemoteExecutor(executorConfig, classLoader); + return buildRemoteExecutor(executorConfig); } else { - return buildLocalExecutor(executorConfig, classLoader); + return buildLocalExecutor(executorConfig); } } - public static Executor buildLocalExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public static Executor buildLocalExecutor(ExecutorConfig executorConfig) { if (executorConfig.isUseBatchModel()) { - return new LocalBatchExecutor(executorConfig, classLoader); + return new LocalBatchExecutor(executorConfig); } else { - return new LocalStreamExecutor(executorConfig, classLoader); + return new LocalStreamExecutor(executorConfig); } } - public static Executor buildAppStreamExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public static Executor buildAppStreamExecutor(ExecutorConfig executorConfig) { if (executorConfig.isUseBatchModel()) { - return new AppBatchExecutor(executorConfig, classLoader); + return new AppBatchExecutor(executorConfig); } else { - return new AppStreamExecutor(executorConfig, classLoader); + return new AppStreamExecutor(executorConfig); } } - public static Executor buildRemoteExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public static Executor buildRemoteExecutor(ExecutorConfig executorConfig) { if (executorConfig.isUseBatchModel()) { - return new RemoteBatchExecutor(executorConfig, classLoader); + return new RemoteBatchExecutor(executorConfig); } else { - return new RemoteStreamExecutor(executorConfig, classLoader); + return new RemoteStreamExecutor(executorConfig); } } } diff --git a/dinky-core/src/main/java/org/dinky/executor/LocalBatchExecutor.java b/dinky-core/src/main/java/org/dinky/executor/LocalBatchExecutor.java index 3a56e8c413..ed658fb80d 100644 --- a/dinky-core/src/main/java/org/dinky/executor/LocalBatchExecutor.java +++ b/dinky-core/src/main/java/org/dinky/executor/LocalBatchExecutor.java @@ -19,8 +19,6 @@ package org.dinky.executor; -import org.dinky.classloader.DinkyClassLoader; - import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.PipelineOptions; import org.apache.flink.configuration.RestOptions; @@ -36,9 +34,9 @@ * * @since 2022/2/4 0:04 */ -public class LocalBatchExecutor extends Executor { +public class LocalBatchExecutor extends AbstractExecutor { - public LocalBatchExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public LocalBatchExecutor(ExecutorConfig executorConfig) { this.executorConfig = executorConfig; if (executorConfig.isValidJarFiles()) { executorConfig @@ -58,7 +56,7 @@ public LocalBatchExecutor(ExecutorConfig executorConfig, DinkyClassLoader classL } else { this.environment = StreamExecutionEnvironment.createLocalEnvironment(); } - init(classLoader); + init(); } @Override diff --git a/dinky-core/src/main/java/org/dinky/executor/LocalStreamExecutor.java b/dinky-core/src/main/java/org/dinky/executor/LocalStreamExecutor.java index 6948ac944a..edc1c2eb0c 100644 --- a/dinky-core/src/main/java/org/dinky/executor/LocalStreamExecutor.java +++ b/dinky-core/src/main/java/org/dinky/executor/LocalStreamExecutor.java @@ -20,7 +20,6 @@ package org.dinky.executor; import org.dinky.assertion.Asserts; -import org.dinky.classloader.DinkyClassLoader; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.PipelineOptions; @@ -39,9 +38,9 @@ * * @since 2021/5/25 13:48 */ -public class LocalStreamExecutor extends Executor { +public class LocalStreamExecutor extends AbstractExecutor { - public LocalStreamExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public LocalStreamExecutor(ExecutorConfig executorConfig) { this.executorConfig = executorConfig; if (executorConfig.isValidJarFiles()) { executorConfig @@ -64,7 +63,7 @@ public LocalStreamExecutor(ExecutorConfig executorConfig, DinkyClassLoader class } else { this.environment = StreamExecutionEnvironment.createLocalEnvironment(); } - init(classLoader); + init(); } @Override diff --git a/dinky-core/src/main/java/org/dinky/executor/RemoteBatchExecutor.java b/dinky-core/src/main/java/org/dinky/executor/RemoteBatchExecutor.java index 2562932846..300c53ab8c 100644 --- a/dinky-core/src/main/java/org/dinky/executor/RemoteBatchExecutor.java +++ b/dinky-core/src/main/java/org/dinky/executor/RemoteBatchExecutor.java @@ -19,8 +19,6 @@ package org.dinky.executor; -import org.dinky.classloader.DinkyClassLoader; - import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; @@ -29,9 +27,9 @@ * * @since 2022/2/7 22:10 */ -public class RemoteBatchExecutor extends Executor { +public class RemoteBatchExecutor extends AbstractExecutor { - public RemoteBatchExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public RemoteBatchExecutor(ExecutorConfig executorConfig) { this.executorConfig = executorConfig; if (executorConfig.isValidConfig()) { Configuration configuration = Configuration.fromMap(executorConfig.getConfig()); @@ -41,7 +39,7 @@ public RemoteBatchExecutor(ExecutorConfig executorConfig, DinkyClassLoader class this.environment = StreamExecutionEnvironment.createRemoteEnvironment( executorConfig.getHost(), executorConfig.getPort(), executorConfig.getJarFiles()); } - init(classLoader); + init(); } @Override diff --git a/dinky-core/src/main/java/org/dinky/executor/RemoteStreamExecutor.java b/dinky-core/src/main/java/org/dinky/executor/RemoteStreamExecutor.java index 0a298af9b9..7edc6e205f 100644 --- a/dinky-core/src/main/java/org/dinky/executor/RemoteStreamExecutor.java +++ b/dinky-core/src/main/java/org/dinky/executor/RemoteStreamExecutor.java @@ -19,8 +19,6 @@ package org.dinky.executor; -import org.dinky.classloader.DinkyClassLoader; - import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; @@ -29,9 +27,9 @@ * * @since 2021/5/25 14:05 */ -public class RemoteStreamExecutor extends Executor { +public class RemoteStreamExecutor extends AbstractExecutor { - public RemoteStreamExecutor(ExecutorConfig executorConfig, DinkyClassLoader classLoader) { + public RemoteStreamExecutor(ExecutorConfig executorConfig) { this.executorConfig = executorConfig; if (executorConfig.isValidConfig()) { Configuration configuration = Configuration.fromMap(executorConfig.getConfig()); @@ -41,7 +39,7 @@ public RemoteStreamExecutor(ExecutorConfig executorConfig, DinkyClassLoader clas this.environment = StreamExecutionEnvironment.createRemoteEnvironment( executorConfig.getHost(), executorConfig.getPort(), executorConfig.getJarFiles()); } - init(classLoader); + init(); } @Override diff --git a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java index 18065b84a2..19ecf95eab 100644 --- a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java +++ b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java @@ -20,42 +20,32 @@ package org.dinky.explainer; import org.dinky.assertion.Asserts; -import org.dinky.data.enums.GatewayType; import org.dinky.data.model.LineageRel; import org.dinky.data.result.ExplainResult; import org.dinky.data.result.SqlExplainResult; -import org.dinky.executor.CustomTableEnvironment; import org.dinky.executor.Executor; import org.dinky.explainer.print_table.PrintStatementExplainer; import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; import org.dinky.interceptor.FlinkInterceptor; import org.dinky.job.JobConfig; -import org.dinky.job.JobManager; +import org.dinky.job.JobManagerHandler; import org.dinky.job.JobParam; import org.dinky.job.StatementParam; import org.dinky.job.builder.JobUDFBuilder; import org.dinky.parser.SqlType; import org.dinky.trans.Operations; import org.dinky.trans.ddl.CustomSetOperation; -import org.dinky.trans.dml.ExecuteJarOperation; import org.dinky.trans.parse.AddFileSqlParseStrategy; import org.dinky.trans.parse.AddJarSqlParseStrategy; import org.dinky.trans.parse.ExecuteJarParseStrategy; import org.dinky.trans.parse.SetSqlParseStrategy; import org.dinky.utils.DinkyClassLoaderUtil; -import org.dinky.utils.FlinkStreamEnvironmentUtil; import org.dinky.utils.IpUtil; import org.dinky.utils.LogUtil; import org.dinky.utils.SqlUtil; import org.dinky.utils.URLUtils; -import org.apache.flink.api.dag.Pipeline; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.core.fs.FileSystem; -import org.apache.flink.runtime.rest.messages.JobPlanInfo; - -import java.net.URL; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; @@ -86,23 +76,23 @@ public class Explainer { private Executor executor; private boolean useStatementSet; private ObjectMapper mapper = new ObjectMapper(); - private JobManager jobManager; + private JobManagerHandler jobManager; - public Explainer(Executor executor, boolean useStatementSet, JobManager jobManager) { + public Explainer(Executor executor, boolean useStatementSet, JobManagerHandler jobManager) { this.executor = executor; this.useStatementSet = useStatementSet; this.jobManager = jobManager; } - public static Explainer build(Executor executor, boolean useStatementSet, JobManager jobManager) { + public static Explainer build(Executor executor, boolean useStatementSet, JobManagerHandler jobManager) { return new Explainer(executor, useStatementSet, jobManager); } public Explainer initialize(JobConfig config, String statement) { - DinkyClassLoaderUtil.initClassLoader(config, jobManager.getDinkyClassLoader()); + DinkyClassLoaderUtil.initClassLoader(config, executor.getDinkyClassLoader()); String[] statements = SqlUtil.getStatements(SqlUtil.removeNote(statement)); - List udfs = parseUDFFromStatements(statements); - jobManager.setJobParam(new JobParam(udfs)); + JobParam jobParam = pretreatStatements(statements); + jobManager.setJobParam(jobParam); try { JobUDFBuilder.build(jobManager).run(); } catch (Exception e) { @@ -140,19 +130,18 @@ public JobParam pretreatStatements(String[] statements) { customSetOperation.execute(this.executor.getCustomTableEnvironment()); } else if (operationType.equals(SqlType.ADD)) { AddJarSqlParseStrategy.getAllFilePath(statement) - .forEach(t -> jobManager.getUdfPathContextHolder().addOtherPlugins(t)); + .forEach(t -> executor.getUdfPathContextHolder().addOtherPlugins(t)); (executor.getDinkyClassLoader()) .addURLs(URLUtils.getURLs( - jobManager.getUdfPathContextHolder().getOtherPluginsFiles())); + executor.getUdfPathContextHolder().getOtherPluginsFiles())); } else if (operationType.equals(SqlType.ADD_FILE)) { AddFileSqlParseStrategy.getAllFilePath(statement) - .forEach(t -> jobManager.getUdfPathContextHolder().addFile(t)); + .forEach(t -> executor.getUdfPathContextHolder().addFile(t)); (executor.getDinkyClassLoader()) .addURLs(URLUtils.getURLs( - jobManager.getUdfPathContextHolder().getFiles())); + executor.getUdfPathContextHolder().getFiles())); } else if (operationType.equals(SqlType.ADD_JAR)) { - Configuration combinationConfig = getCombinationConfig(); - FileSystem.initialize(combinationConfig, null); + executor.initializeFileSystem(); ddl.add(new StatementParam(statement, operationType)); statementList.add(statement); } else if (transSqlTypeSet.contains(operationType)) { @@ -173,7 +162,7 @@ public JobParam pretreatStatements(String[] statements) { PrintStatementExplainer.getCreateStatement(tableName, host, port), SqlType.CTAS)); } } else { - UDF udf = UDFUtil.toUDF(statement, jobManager.getDinkyClassLoader()); + UDF udf = UDFUtil.toUDF(statement, executor.getDinkyClassLoader()); if (Asserts.isNotNull(udf)) { udfList.add(udf); } @@ -184,30 +173,6 @@ public JobParam pretreatStatements(String[] statements) { return new JobParam(statementList, ddl, trans, execute, CollUtil.removeNull(udfList), parsedSql.toString()); } - private Configuration getCombinationConfig() { - CustomTableEnvironment cte = executor.getCustomTableEnvironment(); - Configuration rootConfig = cte.getRootConfiguration(); - Configuration config = cte.getConfig().getConfiguration(); - Configuration combinationConfig = new Configuration(); - combinationConfig.addAll(rootConfig); - combinationConfig.addAll(config); - return combinationConfig; - } - - public List parseUDFFromStatements(String[] statements) { - List udfList = new ArrayList<>(); - for (String statement : statements) { - if (statement.isEmpty()) { - continue; - } - UDF udf = UDFUtil.toUDF(statement, jobManager.getDinkyClassLoader()); - if (Asserts.isNotNull(udf)) { - udfList.add(udf); - } - } - return udfList; - } - public ExplainResult explainSql(String statement) { log.info("Start explain FlinkSQL..."); JobParam jobParam; @@ -311,6 +276,7 @@ public ExplainResult explainSql(String statement) { } } } + for (StatementParam item : jobParam.getExecute()) { SqlExplainResult.Builder resultBuilder = SqlExplainResult.Builder.newBuilder(); @@ -319,11 +285,7 @@ public ExplainResult explainSql(String statement) { if (Asserts.isNull(sqlExplainResult)) { sqlExplainResult = new SqlExplainResult(); } else if (ExecuteJarParseStrategy.INSTANCE.match(item.getValue())) { - - List allFileByAdd = jobManager.getAllFileSet(); - Pipeline pipeline = new ExecuteJarOperation(item.getValue()) - .explain(executor.getCustomTableEnvironment(), allFileByAdd); - sqlExplainResult.setExplain(FlinkStreamEnvironmentUtil.getStreamingPlanAsJSON(pipeline)); + sqlExplainResult.setExplain(executor.getJarStreamingPlanStringJson(item.getValue())); } else { executor.executeSql(item.getValue()); } @@ -372,34 +334,8 @@ public ObjectNode getStreamGraph(String statement) { return mapper.createObjectNode(); } - public JobPlanInfo getJobPlanInfo(String statement) { - JobParam jobParam = pretreatStatements(SqlUtil.getStatements(statement)); - jobParam.getDdl().forEach(statementParam -> executor.executeSql(statementParam.getValue())); - - if (!jobParam.getTrans().isEmpty()) { - return executor.getJobPlanInfo(jobParam.getTransStatement()); - } - - if (!jobParam.getExecute().isEmpty()) { - List dataStreamPlans = - jobParam.getExecute().stream().map(StatementParam::getValue).collect(Collectors.toList()); - return executor.getJobPlanInfoFromDataStream(dataStreamPlans); - } - throw new RuntimeException("Creating job plan fails because this job doesn't contain an insert statement."); - } - public List getLineage(String statement) { - JobConfig jobConfig = JobConfig.builder() - .type(GatewayType.LOCAL.getLongValue()) - .useRemote(false) - .fragment(true) - .statementSet(useStatementSet) - .parallelism(1) - .configJson(executor.getTableConfig().getConfiguration().toMap()) - .build(); - jobManager.setConfig(jobConfig); - jobManager.setExecutor(executor); - this.initialize(jobConfig, statement); + initialize(jobManager.getConfig(), statement); List lineageRelList = new ArrayList<>(); for (String item : SqlUtil.getStatements(statement)) { diff --git a/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageBuilder.java b/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageBuilder.java index d856a2c1ea..6d124ac11d 100644 --- a/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageBuilder.java +++ b/dinky-core/src/main/java/org/dinky/explainer/lineage/LineageBuilder.java @@ -19,10 +19,11 @@ package org.dinky.explainer.lineage; +import org.dinky.data.enums.GatewayType; import org.dinky.data.model.LineageRel; -import org.dinky.executor.ExecutorFactory; import org.dinky.explainer.Explainer; -import org.dinky.job.JobManager; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManagerHandler; import java.util.ArrayList; import java.util.HashMap; @@ -37,7 +38,15 @@ public class LineageBuilder { public static LineageResult getColumnLineageByLogicalPlan(String statement) { - Explainer explainer = new Explainer(ExecutorFactory.getDefaultExecutor(), false, new JobManager()); + JobConfig jobConfig = JobConfig.builder() + .type(GatewayType.LOCAL.getLongValue()) + .useRemote(false) + .fragment(true) + .statementSet(false) + .parallelism(1) + .build(); + JobManagerHandler jobManagerHandler = JobManagerHandler.build(jobConfig, false); + Explainer explainer = new Explainer(jobManagerHandler.getExecutor(), false, jobManagerHandler); List lineageRelList = explainer.getLineage(statement); List relations = new ArrayList<>(); Map tableMap = new HashMap<>(); diff --git a/dinky-core/src/main/java/org/dinky/flink/DinkyExecutor.java b/dinky-core/src/main/java/org/dinky/flink/DinkyExecutor.java new file mode 100644 index 0000000000..2252bda04d --- /dev/null +++ b/dinky-core/src/main/java/org/dinky/flink/DinkyExecutor.java @@ -0,0 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.flink; + +import java.rmi.Remote; + +public interface DinkyExecutor extends Remote {} diff --git a/dinky-core/src/main/java/org/dinky/interceptor/FlinkInterceptor.java b/dinky-core/src/main/java/org/dinky/interceptor/FlinkInterceptor.java index 5d8ddb7e74..03dc320b76 100644 --- a/dinky-core/src/main/java/org/dinky/interceptor/FlinkInterceptor.java +++ b/dinky-core/src/main/java/org/dinky/interceptor/FlinkInterceptor.java @@ -25,8 +25,6 @@ import org.dinky.trans.Operations; import org.dinky.utils.SqlUtil; -import org.apache.flink.table.api.TableResult; - /** * FlinkInterceptor * @@ -46,13 +44,10 @@ public static String pretreatStatement(Executor executor, String statement) { // return false to continue with executeSql public static FlinkInterceptorResult build(Executor executor, String statement) { - boolean noExecute = false; - TableResult tableResult = null; Operation operation = Operations.buildOperation(statement); if (Asserts.isNotNull(operation)) { - tableResult = operation.execute(executor); - noExecute = operation.noExecute(); + return FlinkInterceptorResult.build(operation.noExecute(), operation.execute(executor)); } - return FlinkInterceptorResult.build(noExecute, tableResult); + return FlinkInterceptorResult.build(false, null); } } diff --git a/dinky-core/src/main/java/org/dinky/job/ExecuteSqlException.java b/dinky-core/src/main/java/org/dinky/job/ExecuteSqlException.java new file mode 100644 index 0000000000..58b111e334 --- /dev/null +++ b/dinky-core/src/main/java/org/dinky/job/ExecuteSqlException.java @@ -0,0 +1,26 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.job; + +public class ExecuteSqlException extends Exception { + public ExecuteSqlException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/dinky-core/src/main/java/org/dinky/job/IJobManager.java b/dinky-core/src/main/java/org/dinky/job/IJobManager.java new file mode 100644 index 0000000000..cd66cf8ee1 --- /dev/null +++ b/dinky-core/src/main/java/org/dinky/job/IJobManager.java @@ -0,0 +1,55 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.job; + +import org.dinky.data.result.ExplainResult; +import org.dinky.data.result.IResult; +import org.dinky.gateway.enums.SavePointType; +import org.dinky.gateway.result.SavePointResult; + +import com.fasterxml.jackson.databind.node.ObjectNode; + +public interface IJobManager { + void init(JobConfig config, boolean isPlanMode); + + void prepare(String statement); + + boolean close(); + + ObjectNode getJarStreamGraphJson(String statement); + + JobResult executeJarSql(String statement) throws Exception; + + JobResult executeSql(String statement) throws Exception; + + IResult executeDDL(String statement); + + ExplainResult explainSql(String statement); + + ObjectNode getStreamGraph(String statement); + + String getJobPlanJson(String statement); + + boolean cancelNormal(String jobId); + + SavePointResult savepoint(String jobId, SavePointType savePointType, String savePoint, boolean isUseRestAPI); + + String exportSql(String sql); +} diff --git a/dinky-core/src/main/java/org/dinky/job/JobBuilder.java b/dinky-core/src/main/java/org/dinky/job/JobBuilder.java index c31f33bd55..864b5c8b6e 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobBuilder.java +++ b/dinky-core/src/main/java/org/dinky/job/JobBuilder.java @@ -19,30 +19,7 @@ package org.dinky.job; -import org.dinky.data.enums.GatewayType; -import org.dinky.executor.Executor; +public interface JobBuilder { -public abstract class JobBuilder { - - protected JobManager jobManager; - protected JobConfig config; - protected JobParam jobParam; - protected GatewayType runMode; - protected Executor executor; - protected boolean useStatementSet; - protected boolean useGateway; - protected Job job; - - public JobBuilder(JobManager jobManager) { - this.jobManager = jobManager; - this.config = jobManager.getConfig(); - this.jobParam = jobManager.getJobParam(); - this.runMode = jobManager.getRunMode(); - this.executor = jobManager.getExecutor(); - this.useStatementSet = jobManager.isUseStatementSet(); - this.useGateway = jobManager.isUseGateway(); - this.job = jobManager.getJob(); - } - - public abstract void run() throws Exception; + void run() throws Exception; } diff --git a/dinky-core/src/main/java/org/dinky/job/JobContext.java b/dinky-core/src/main/java/org/dinky/job/JobContext.java new file mode 100644 index 0000000000..a66bf88176 --- /dev/null +++ b/dinky-core/src/main/java/org/dinky/job/JobContext.java @@ -0,0 +1,42 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.job; + +import org.dinky.data.enums.GatewayType; +import org.dinky.executor.Executor; +import org.dinky.executor.ExecutorConfig; + +import lombok.Data; + +@Data +public class JobContext { + private JobHandler handler; + private ExecutorConfig executorConfig; + private JobConfig config; + private Executor executor; + private boolean useGateway = false; + private boolean isPlanMode = false; + private boolean useStatementSet = false; + private boolean useRestAPI = false; + private GatewayType runMode = GatewayType.LOCAL; + + private JobParam jobParam = null; + private Job job; +} diff --git a/dinky-core/src/main/java/org/dinky/job/JobManager.java b/dinky-core/src/main/java/org/dinky/job/JobManagerHandler.java similarity index 69% rename from dinky-core/src/main/java/org/dinky/job/JobManager.java rename to dinky-core/src/main/java/org/dinky/job/JobManagerHandler.java index b49a2a0203..946b73f579 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobManager.java +++ b/dinky-core/src/main/java/org/dinky/job/JobManagerHandler.java @@ -21,27 +21,22 @@ import org.dinky.api.FlinkAPI; import org.dinky.assertion.Asserts; -import org.dinky.classloader.DinkyClassLoader; import org.dinky.context.CustomTableEnvironmentContext; -import org.dinky.context.FlinkUdfPathContextHolder; import org.dinky.context.RowLevelPermissionsContext; import org.dinky.data.annotations.ProcessStep; import org.dinky.data.enums.GatewayType; import org.dinky.data.enums.ProcessStepType; import org.dinky.data.enums.Status; import org.dinky.data.exception.BusException; -import org.dinky.data.model.SystemConfiguration; import org.dinky.data.result.ErrorResult; import org.dinky.data.result.ExplainResult; import org.dinky.data.result.IResult; +import org.dinky.data.result.InsertResult; import org.dinky.data.result.ResultBuilder; -import org.dinky.data.result.ResultPool; -import org.dinky.data.result.SelectResult; import org.dinky.executor.Executor; import org.dinky.executor.ExecutorConfig; import org.dinky.executor.ExecutorFactory; import org.dinky.explainer.Explainer; -import org.dinky.function.util.UDFUtil; import org.dinky.gateway.Gateway; import org.dinky.gateway.config.FlinkConfig; import org.dinky.gateway.config.GatewayConfig; @@ -49,7 +44,6 @@ import org.dinky.gateway.enums.SavePointType; import org.dinky.gateway.result.GatewayResult; import org.dinky.gateway.result.SavePointResult; -import org.dinky.gateway.result.TestResult; import org.dinky.job.builder.JobDDLBuilder; import org.dinky.job.builder.JobExecuteBuilder; import org.dinky.job.builder.JobJarStreamGraphBuilder; @@ -66,22 +60,24 @@ import org.dinky.utils.SqlUtil; import org.dinky.utils.URLUtils; +import org.apache.flink.api.common.Plan; import org.apache.flink.api.dag.Pipeline; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.CoreOptions; import org.apache.flink.configuration.DeploymentOptions; import org.apache.flink.configuration.PipelineOptions; +import org.apache.flink.core.execution.JobClient; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.SavepointConfigOptions; +import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings; import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator; import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions; +import org.apache.flink.streaming.api.graph.StreamGraph; import org.apache.flink.table.api.TableResult; import org.apache.flink.yarn.configuration.YarnConfigOptions; import java.io.File; import java.io.IOException; -import java.lang.ref.WeakReference; -import java.net.URL; import java.time.LocalDateTime; import java.util.Arrays; import java.util.Collections; @@ -91,165 +87,73 @@ import java.util.Set; import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.fasterxml.jackson.databind.node.ObjectNode; -import cn.hutool.core.collection.CollUtil; import cn.hutool.core.text.StrFormatter; -import lombok.Data; -import lombok.extern.slf4j.Slf4j; -@Slf4j -@Data -public class JobManager { - private JobHandler handler; +public class JobManagerHandler implements IJobManager { + Logger log = LoggerFactory.getLogger(JobManagerHandler.class); private ExecutorConfig executorConfig; private JobConfig config; + private Executor executor; private boolean useGateway = false; - private boolean isPlanMode = false; private boolean useStatementSet = false; - private boolean useRestAPI = false; private GatewayType runMode = GatewayType.LOCAL; private JobParam jobParam = null; - private String currentSql = ""; - private final WeakReference dinkyClassLoader = new WeakReference<>(DinkyClassLoader.build()); - private Job job; - - public JobManager() {} - - public JobParam getJobParam() { - return jobParam; - } - - public void setJobParam(JobParam jobParam) { - this.jobParam = jobParam; - } - - public JobConfig getConfig() { - return config; - } - - public void setConfig(JobConfig config) { - this.config = config; - } - - public GatewayType getRunMode() { - return runMode; - } - - public void setCurrentSql(String currentSql) { - this.currentSql = currentSql; - } - - public Executor getExecutor() { - return executor; - } - - public void setExecutor(Executor executor) { - this.executor = executor; - } - - public void setPlanMode(boolean planMode) { - isPlanMode = planMode; - } - public boolean isPlanMode() { - return isPlanMode; - } - - public boolean isUseStatementSet() { - return useStatementSet; - } - - public boolean isUseRestAPI() { - return useRestAPI; - } - - public boolean isUseGateway() { - return useGateway; - } - - // return dinkyclassloader - public DinkyClassLoader getDinkyClassLoader() { - return dinkyClassLoader.get(); - } - - // return udfPathContextHolder - public FlinkUdfPathContextHolder getUdfPathContextHolder() { - return getDinkyClassLoader().getUdfPathContextHolder(); - } - - // return job - public Job getJob() { - return job; - } - - // set job - public void setJob(Job job) { - this.job = job; - } + private Job job; - private JobManager(JobConfig config) { + private JobManagerHandler(JobConfig config, boolean isPlanMode) { this.config = config; - } - - public static JobManager build(JobConfig config) { - JobManager manager = new JobManager(config); - manager.init(); - return manager; - } - public static JobManager buildPlanMode(JobConfig config) { - JobManager manager = new JobManager(config); - manager.setPlanMode(true); - manager.init(); - log.info("Build Flink plan mode success."); - return manager; - } - - public void init() { if (!isPlanMode) { runMode = GatewayType.get(config.getType()); useGateway = GatewayType.isDeployCluster(config.getType()); - handler = JobHandler.build(); } + useStatementSet = config.isStatementSet(); - useRestAPI = SystemConfiguration.getInstances().isUseRestAPI(); - executorConfig = config.getExecutorSetting(); + executorConfig = config.createExecutorSetting(); executorConfig.setPlan(isPlanMode); - executor = ExecutorFactory.buildExecutor(executorConfig, getDinkyClassLoader()); + executor = ExecutorFactory.buildExecutor(executorConfig); } - private boolean ready() { - return handler.init(job); - } + @Override + public void init(JobConfig config, boolean isPlanMode) {} - private boolean success() { - return handler.success(); + public static JobManagerHandler build(JobConfig config, boolean isPlanMode) { + return new JobManagerHandler(config, isPlanMode); } - private boolean failed() { - return handler.failed(); + @Override + public void prepare(String statement) { + job = Job.build(runMode, config, executorConfig, statement, useGateway); } + @Override public boolean close() { CustomTableEnvironmentContext.clear(); RowLevelPermissionsContext.clear(); try { - getExecutor().getDinkyClassLoader().close(); + executor.getDinkyClassLoader().close(); } catch (IOException e) { throw new RuntimeException(e); } return true; } + @Override public ObjectNode getJarStreamGraphJson(String statement) { - Pipeline pipeline = JobJarStreamGraphBuilder.build(this).getJarStreamGraph(statement, getDinkyClassLoader()); + Pipeline pipeline = JobJarStreamGraphBuilder.build(this).getJarStreamGraph(statement); Configuration configuration = Configuration.fromMap(getExecutorConfig().getConfig()); JobGraph jobGraph = FlinkStreamEnvironmentUtil.getJobGraph(pipeline, configuration); return JsonUtils.parseObject(JsonPlanGenerator.generatePlan(jobGraph)); } + @Override @ProcessStep(type = ProcessStepType.SUBMIT_EXECUTE) public JobResult executeJarSql(String statement) throws Exception { List statements = Arrays.stream(SqlUtil.getStatements(statement)) @@ -258,15 +162,68 @@ public JobResult executeJarSql(String statement) throws Exception { statement = String.join(";\n", statements); jobParam = Explainer.build(executor, useStatementSet, this).pretreatStatements(SqlUtil.getStatements(statement)); - job = Job.build(runMode, config, executorConfig, executor, statement, useGateway); - ready(); + job = Job.build(runMode, config, executorConfig, statement, useGateway); + JobJarStreamGraphBuilder jobJarStreamGraphBuilder = JobJarStreamGraphBuilder.build(this); + Pipeline pipeline = jobJarStreamGraphBuilder.getJarStreamGraph(statement); + Configuration configuration = + executor.getCustomTableEnvironment().getConfig().getConfiguration(); + if (pipeline instanceof StreamGraph) { + if (Asserts.isNotNullString(config.getSavePointPath())) { + ((StreamGraph) pipeline) + .setSavepointRestoreSettings(SavepointRestoreSettings.forPath( + config.getSavePointPath(), + configuration.get(SavepointConfigOptions.SAVEPOINT_IGNORE_UNCLAIMED_STATE))); + } + } try { - JobJarStreamGraphBuilder.build(this).run(); - if (job.isFailed()) { - failed(); + if (!useGateway) { + JobClient jobClient = + FlinkStreamEnvironmentUtil.executeAsync(pipeline, executor.getStreamExecutionEnvironment()); + if (Asserts.isNotNull(jobClient)) { + job.setJobId(jobClient.getJobID().toHexString()); + job.setJids(Collections.singletonList(job.getJobId())); + job.setStatus(Job.JobStatus.SUCCESS); + } else { + job.setStatus(Job.JobStatus.FAILED); + } } else { - job.setStatus(Job.JobStatus.SUCCESS); - success(); + GatewayResult gatewayResult; + config.addGatewayConfig(executor.getCustomTableEnvironment() + .getConfig() + .getConfiguration() + .toMap()); + if (runMode.isApplicationMode()) { + config.getGatewayConfig().setSql(statement); + gatewayResult = + Gateway.build(config.getGatewayConfig()).submitJar(executor.getUdfPathContextHolder()); + } else { + if (pipeline instanceof StreamGraph) { + ((StreamGraph) pipeline).setJobName(config.getJobName()); + } else if (pipeline instanceof Plan) { + ((Plan) pipeline).setJobName(config.getJobName()); + } + JobGraph jobGraph = FlinkStreamEnvironmentUtil.getJobGraph(pipeline, configuration); + GatewayConfig gatewayConfig = config.getGatewayConfig(); + List uriList = jobJarStreamGraphBuilder.getUris(statement); + String[] jarPaths = uriList.stream() + .map(URLUtils::toFile) + .map(File::getAbsolutePath) + .toArray(String[]::new); + gatewayConfig.setJarPaths(jarPaths); + gatewayResult = Gateway.build(gatewayConfig).submitJobGraph(jobGraph); + } + job.setResult(InsertResult.success(gatewayResult.getId())); + job.setJobId(gatewayResult.getId()); + job.setJids(gatewayResult.getJids()); + job.setJobManagerAddress(URLUtils.formatAddress(gatewayResult.getWebURL())); + + if (gatewayResult.isSuccess()) { + job.setStatus(Job.JobStatus.SUCCESS); + } else { + job.setStatus(Job.JobStatus.FAILED); + job.setError(gatewayResult.getError()); + log.error(gatewayResult.getError()); + } } } catch (Exception e) { String error = @@ -274,7 +231,6 @@ public JobResult executeJarSql(String statement) throws Exception { job.setEndTime(LocalDateTime.now()); job.setStatus(Job.JobStatus.FAILED); job.setError(error); - failed(); throw new Exception(error, e); } finally { close(); @@ -282,12 +238,11 @@ public JobResult executeJarSql(String statement) throws Exception { return job.getJobResult(); } + @Override @ProcessStep(type = ProcessStepType.SUBMIT_EXECUTE) public JobResult executeSql(String statement) throws Exception { - job = Job.build(runMode, config, executorConfig, executor, statement, useGateway); - ready(); - - DinkyClassLoaderUtil.initClassLoader(config, getDinkyClassLoader()); + Objects.requireNonNull(job, "job is null, prepare() first"); + DinkyClassLoaderUtil.initClassLoader(config, executor.getDinkyClassLoader()); jobParam = Explainer.build(executor, useStatementSet, this).pretreatStatements(SqlUtil.getStatements(statement)); try { @@ -301,11 +256,8 @@ public JobResult executeSql(String statement) throws Exception { JobExecuteBuilder.build(this).run(); // finished job.setEndTime(LocalDateTime.now()); - if (job.isFailed()) { - failed(); - } else { + if (!job.isFailed()) { job.setStatus(Job.JobStatus.SUCCESS); - success(); } } catch (Exception e) { String errorMessage = e.getMessage(); @@ -313,11 +265,10 @@ public JobResult executeSql(String statement) throws Exception { throw new BusException(Status.OPERATE_NOT_SUPPORT_QUERY.getMessage()); } String error = StrFormatter.format( - "Exception in executing FlinkSQL:\n{}\n{}", SqlUtil.addLineNumber(currentSql), errorMessage); + "Exception in executing FlinkSQL:\n{}\n{}", SqlUtil.addLineNumber(e.getMessage()), errorMessage); job.setEndTime(LocalDateTime.now()); job.setStatus(Job.JobStatus.FAILED); job.setError(error); - failed(); throw new Exception(error, e); } finally { close(); @@ -325,6 +276,7 @@ public JobResult executeSql(String statement) throws Exception { return job.getJobResult(); } + @Override public IResult executeDDL(String statement) { String[] statements = SqlUtil.getStatements(statement); try { @@ -337,13 +289,16 @@ public IResult executeDDL(String statement) { SqlType operationType = Operations.getOperationType(newStatement); if (SqlType.INSERT == operationType || SqlType.SELECT == operationType) { continue; - } else if (operationType.equals(SqlType.ADD) || operationType.equals(SqlType.ADD_JAR)) { + } + + if (operationType.equals(SqlType.ADD) || operationType.equals(SqlType.ADD_JAR)) { Set allFilePath = AddJarSqlParseStrategy.getAllFilePath(item); - getExecutor().getDinkyClassLoader().addURLs(allFilePath); + executor.getDinkyClassLoader().addURLs(allFilePath); } else if (operationType.equals(SqlType.ADD_FILE)) { Set allFilePath = AddFileSqlParseStrategy.getAllFilePath(item); - getExecutor().getDinkyClassLoader().addURLs(allFilePath); + executor.getDinkyClassLoader().addURLs(allFilePath); } + LocalDateTime startTime = LocalDateTime.now(); TableResult tableResult = executor.executeSql(newStatement); result = ResultBuilder.build( @@ -358,34 +313,28 @@ public IResult executeDDL(String statement) { return new ErrorResult(); } - public static SelectResult getJobData(String jobId) { - SelectResult selectResult = ResultPool.get(jobId); - if (Objects.isNull(selectResult) || selectResult.isDestroyed()) { - JobReadHandler readHandler = JobHandler.build().getReadHandler(); - return readHandler.readResultDataFromStorage(Integer.parseInt(jobId)); - } - return selectResult; - } - + @Override public ExplainResult explainSql(String statement) { return Explainer.build(executor, useStatementSet, this) .initialize(config, statement) .explainSql(statement); } + @Override public ObjectNode getStreamGraph(String statement) { return Explainer.build(executor, useStatementSet, this) .initialize(config, statement) .getStreamGraph(statement); } + @Override public String getJobPlanJson(String statement) { - return Explainer.build(executor, useStatementSet, this) - .initialize(config, statement) - .getJobPlanInfo(statement) - .getJsonPlan(); + Explainer explainer = Explainer.build(executor, useStatementSet, this).initialize(config, statement); + JobParam jobParam = explainer.pretreatStatements(SqlUtil.getStatements(statement)); + return executor.getJobPlanJson(jobParam); } + @Override public boolean cancelNormal(String jobId) { try { return FlinkAPI.build(config.getAddress()).stop(jobId); @@ -395,8 +344,10 @@ public boolean cancelNormal(String jobId) { } } - public SavePointResult savepoint(String jobId, SavePointType savePointType, String savePoint) { - if (useGateway && !useRestAPI) { + @Override + public SavePointResult savepoint( + String jobId, SavePointType savePointType, String savePoint, boolean isUseRestAPI) { + if (useGateway && !isUseRestAPI) { config.getGatewayConfig() .setFlinkConfig( FlinkConfig.build(jobId, ActionType.SAVEPOINT.getValue(), savePointType.getValue(), null)); @@ -406,19 +357,7 @@ public SavePointResult savepoint(String jobId, SavePointType savePointType, Stri } } - public static void killCluster(GatewayConfig gatewayConfig, String appId) { - gatewayConfig.getClusterConfig().setAppId(appId); - Gateway.build(gatewayConfig).killCluster(); - } - - public static GatewayResult deploySessionCluster(GatewayConfig gatewayConfig) { - return Gateway.build(gatewayConfig).deployCluster(UDFUtil.createFlinkUdfPathContextHolder()); - } - - public static TestResult testGateway(GatewayConfig gatewayConfig) { - return Gateway.build(gatewayConfig).test(); - } - + @Override public String exportSql(String sql) { String statement = executor.pretreatStatement(sql); StringBuilder sb = new StringBuilder(); @@ -480,10 +419,67 @@ public String exportSql(String sql) { return sb.toString(); } - public List getAllFileSet() { - return CollUtil.isEmpty(getUdfPathContextHolder().getAllFileSet()) - ? Collections.emptyList() - : Arrays.asList(URLUtils.getURLs( - getUdfPathContextHolder().getAllFileSet().toArray(new File[0]))); + public Logger getLog() { + return log; + } + + public void setLog(Logger log) { + this.log = log; + } + + public ExecutorConfig getExecutorConfig() { + return executorConfig; + } + + public void setExecutorConfig(ExecutorConfig executorConfig) { + this.executorConfig = executorConfig; + } + + public JobConfig getConfig() { + return config; + } + + public void setConfig(JobConfig config) { + this.config = config; + } + + public Executor getExecutor() { + return executor; + } + + public void setExecutor(Executor executor) { + this.executor = executor; + } + + public boolean isUseGateway() { + return useGateway; + } + + public boolean isUseStatementSet() { + return useStatementSet; + } + + public void setUseStatementSet(boolean useStatementSet) { + this.useStatementSet = useStatementSet; + } + + public GatewayType getRunMode() { + return runMode; + } + + public JobParam getJobParam() { + return jobParam; + } + + public void setJobParam(JobParam jobParam) { + this.jobParam = jobParam; + } + + public Job getJob() { + return job; + } + + public void setJob(Job job) { + this.job = job; } } diff --git a/dinky-core/src/main/java/org/dinky/job/JobParam.java b/dinky-core/src/main/java/org/dinky/job/JobParam.java index fb2f7b29ff..043d536f78 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobParam.java +++ b/dinky-core/src/main/java/org/dinky/job/JobParam.java @@ -45,22 +45,6 @@ public JobParam(List udfList) { this.udfList = udfList; } - public JobParam(List ddl, List trans) { - this.ddl = ddl; - this.trans = trans; - } - - public JobParam( - List statements, - List ddl, - List trans, - List execute) { - this.statements = statements; - this.ddl = ddl; - this.trans = trans; - this.execute = execute; - } - public JobParam( List statements, List ddl, diff --git a/dinky-core/src/main/java/org/dinky/job/builder/JobDDLBuilder.java b/dinky-core/src/main/java/org/dinky/job/builder/JobDDLBuilder.java index 64869061ec..1c36e51da5 100644 --- a/dinky-core/src/main/java/org/dinky/job/builder/JobDDLBuilder.java +++ b/dinky-core/src/main/java/org/dinky/job/builder/JobDDLBuilder.java @@ -19,8 +19,11 @@ package org.dinky.job.builder; +import org.dinky.executor.Executor; +import org.dinky.job.ExecuteSqlException; import org.dinky.job.JobBuilder; -import org.dinky.job.JobManager; +import org.dinky.job.JobManagerHandler; +import org.dinky.job.JobParam; import org.dinky.job.StatementParam; import lombok.extern.slf4j.Slf4j; @@ -30,21 +33,28 @@ * */ @Slf4j -public class JobDDLBuilder extends JobBuilder { +public class JobDDLBuilder implements JobBuilder { - public JobDDLBuilder(JobManager jobManager) { - super(jobManager); + private final JobParam jobParam; + private final Executor executor; + + public JobDDLBuilder(JobParam jobParam, Executor executor) { + this.jobParam = jobParam; + this.executor = executor; } - public static JobDDLBuilder build(JobManager jobManager) { - return new JobDDLBuilder(jobManager); + public static JobDDLBuilder build(JobManagerHandler jobManager) { + return new JobDDLBuilder(jobManager.getJobParam(), jobManager.getExecutor()); } @Override public void run() throws Exception { for (StatementParam item : jobParam.getDdl()) { - jobManager.setCurrentSql(item.getValue()); - executor.executeSql(item.getValue()); + try { + executor.executeSql(item.getValue()); + } catch (Exception ex) { + throw new ExecuteSqlException(item.getValue(), ex); + } } } } diff --git a/dinky-core/src/main/java/org/dinky/job/builder/JobExecuteBuilder.java b/dinky-core/src/main/java/org/dinky/job/builder/JobExecuteBuilder.java index 6fb835742d..754cbf3fd4 100644 --- a/dinky-core/src/main/java/org/dinky/job/builder/JobExecuteBuilder.java +++ b/dinky-core/src/main/java/org/dinky/job/builder/JobExecuteBuilder.java @@ -20,14 +20,18 @@ package org.dinky.job.builder; import org.dinky.assertion.Asserts; +import org.dinky.data.enums.GatewayType; import org.dinky.data.result.IResult; import org.dinky.data.result.InsertResult; import org.dinky.data.result.ResultBuilder; +import org.dinky.executor.Executor; import org.dinky.gateway.Gateway; import org.dinky.gateway.result.GatewayResult; import org.dinky.job.Job; import org.dinky.job.JobBuilder; -import org.dinky.job.JobManager; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManagerHandler; +import org.dinky.job.JobParam; import org.dinky.job.StatementParam; import org.dinky.parser.SqlType; import org.dinky.utils.URLUtils; @@ -37,20 +41,47 @@ import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings; import org.apache.flink.streaming.api.graph.StreamGraph; -import java.util.ArrayList; +import java.util.Collections; /** * JobExecuteBuilder - * */ -public class JobExecuteBuilder extends JobBuilder { +public class JobExecuteBuilder implements JobBuilder { + + private final JobParam jobParam; + private final boolean useGateway; + private final Executor executor; + private final boolean useStatementSet; + private final JobConfig config; + private final GatewayType runMode; + private final Job job; - public JobExecuteBuilder(JobManager jobManager) { - super(jobManager); + public JobExecuteBuilder( + JobParam jobParam, + boolean useGateway, + Executor executor, + boolean useStatementSet, + JobConfig config, + GatewayType runMode, + Job job) { + this.jobParam = jobParam; + this.useGateway = useGateway; + this.executor = executor; + this.useStatementSet = useStatementSet; + this.config = config; + this.runMode = runMode; + this.job = job; } - public static JobExecuteBuilder build(JobManager jobManager) { - return new JobExecuteBuilder(jobManager); + public static JobExecuteBuilder build(JobManagerHandler jobManager) { + return new JobExecuteBuilder( + jobManager.getJobParam(), + jobManager.isUseGateway(), + jobManager.getExecutor(), + jobManager.isUseStatementSet(), + jobManager.getConfig(), + jobManager.getRunMode(), + jobManager.getJob()); } @Override @@ -98,16 +129,13 @@ public void run() throws Exception { break; } } + JobClient jobClient = executor.executeAsync(config.getJobName()); if (Asserts.isNotNull(jobClient)) { job.setJobId(jobClient.getJobID().toHexString()); - job.setJids(new ArrayList() { - - { - add(job.getJobId()); - } - }); + job.setJids(Collections.singletonList(job.getJobId())); } + if (config.isUseResult()) { IResult result = ResultBuilder.build( SqlType.EXECUTE, diff --git a/dinky-core/src/main/java/org/dinky/job/builder/JobJarStreamGraphBuilder.java b/dinky-core/src/main/java/org/dinky/job/builder/JobJarStreamGraphBuilder.java index 42fea3de21..a0011bd043 100644 --- a/dinky-core/src/main/java/org/dinky/job/builder/JobJarStreamGraphBuilder.java +++ b/dinky-core/src/main/java/org/dinky/job/builder/JobJarStreamGraphBuilder.java @@ -19,16 +19,11 @@ package org.dinky.job.builder; -import org.dinky.assertion.Asserts; -import org.dinky.classloader.DinkyClassLoader; import org.dinky.data.exception.DinkyException; -import org.dinky.data.result.InsertResult; -import org.dinky.gateway.Gateway; -import org.dinky.gateway.config.GatewayConfig; -import org.dinky.gateway.result.GatewayResult; -import org.dinky.job.Job; +import org.dinky.executor.Executor; import org.dinky.job.JobBuilder; -import org.dinky.job.JobManager; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManagerHandler; import org.dinky.parser.SqlType; import org.dinky.trans.Operations; import org.dinky.trans.ddl.CustomSetOperation; @@ -38,18 +33,9 @@ import org.dinky.trans.parse.ExecuteJarParseStrategy; import org.dinky.trans.parse.SetSqlParseStrategy; import org.dinky.utils.DinkyClassLoaderUtil; -import org.dinky.utils.FlinkStreamEnvironmentUtil; import org.dinky.utils.SqlUtil; -import org.dinky.utils.URLUtils; -import org.apache.flink.api.common.Plan; import org.apache.flink.api.dag.Pipeline; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.core.execution.JobClient; -import org.apache.flink.runtime.jobgraph.JobGraph; -import org.apache.flink.runtime.jobgraph.SavepointConfigOptions; -import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings; -import org.apache.flink.streaming.api.graph.StreamGraph; import java.io.File; import java.net.URL; @@ -58,106 +44,29 @@ import java.util.Set; import cn.hutool.core.lang.Assert; -import lombok.extern.slf4j.Slf4j; /** * JobJarStreamGraphBuilder */ -@Slf4j -public class JobJarStreamGraphBuilder extends JobBuilder { +public class JobJarStreamGraphBuilder implements JobBuilder { - private final Configuration configuration; + private final JobConfig config; + private final Executor executor; - public JobJarStreamGraphBuilder(JobManager jobManager) { - super(jobManager); - configuration = executor.getCustomTableEnvironment().getConfig().getConfiguration(); + public JobJarStreamGraphBuilder(JobConfig config, Executor executor) { + this.config = config; + this.executor = executor; } - public static JobJarStreamGraphBuilder build(JobManager jobManager) { - return new JobJarStreamGraphBuilder(jobManager); - } - - private Pipeline getPipeline() { - Pipeline pipeline = getJarStreamGraph(job.getStatement(), jobManager.getDinkyClassLoader()); - if (pipeline instanceof StreamGraph) { - if (Asserts.isNotNullString(config.getSavePointPath())) { - ((StreamGraph) pipeline) - .setSavepointRestoreSettings(SavepointRestoreSettings.forPath( - config.getSavePointPath(), - configuration.get(SavepointConfigOptions.SAVEPOINT_IGNORE_UNCLAIMED_STATE))); - } - } - return pipeline; + public static JobJarStreamGraphBuilder build(JobManagerHandler jobManager) { + return new JobJarStreamGraphBuilder(jobManager.getConfig(), jobManager.getExecutor()); } @Override - public void run() throws Exception { - if (!useGateway) { - submitNormal(); - } else { - GatewayResult gatewayResult; - if (runMode.isApplicationMode()) { - gatewayResult = submitGateway(); - } else { - gatewayResult = submitNormalWithGateway(); - } - job.setResult(InsertResult.success(gatewayResult.getId())); - job.setJobId(gatewayResult.getId()); - job.setJids(gatewayResult.getJids()); - job.setJobManagerAddress(URLUtils.formatAddress(gatewayResult.getWebURL())); - - if (gatewayResult.isSuccess()) { - job.setStatus(Job.JobStatus.SUCCESS); - } else { - job.setStatus(Job.JobStatus.FAILED); - job.setError(gatewayResult.getError()); - log.error(gatewayResult.getError()); - } - } - } + public void run() throws Exception {} - private GatewayResult submitGateway() throws Exception { - config.addGatewayConfig(configuration); - config.getGatewayConfig().setSql(job.getStatement()); - return Gateway.build(config.getGatewayConfig()).submitJar(jobManager.getUdfPathContextHolder()); - } - - private GatewayResult submitNormalWithGateway() { - Pipeline pipeline = getPipeline(); - if (pipeline instanceof StreamGraph) { - ((StreamGraph) pipeline).setJobName(config.getJobName()); - } else if (pipeline instanceof Plan) { - ((Plan) pipeline).setJobName(config.getJobName()); - } - JobGraph jobGraph = FlinkStreamEnvironmentUtil.getJobGraph(pipeline, configuration); - GatewayConfig gatewayConfig = config.getGatewayConfig(); - List uriList = getUris(job.getStatement()); - String[] jarPaths = uriList.stream() - .map(URLUtils::toFile) - .map(File::getAbsolutePath) - .toArray(String[]::new); - gatewayConfig.setJarPaths(jarPaths); - return Gateway.build(gatewayConfig).submitJobGraph(jobGraph); - } - - private void submitNormal() throws Exception { - JobClient jobClient = - FlinkStreamEnvironmentUtil.executeAsync(getPipeline(), executor.getStreamExecutionEnvironment()); - if (Asserts.isNotNull(jobClient)) { - job.setJobId(jobClient.getJobID().toHexString()); - job.setJids(new ArrayList() { - { - add(job.getJobId()); - } - }); - job.setStatus(Job.JobStatus.SUCCESS); - } else { - job.setStatus(Job.JobStatus.FAILED); - } - } - - public Pipeline getJarStreamGraph(String statement, DinkyClassLoader dinkyClassLoader) { - DinkyClassLoaderUtil.initClassLoader(config, dinkyClassLoader); + public Pipeline getJarStreamGraph(String statement) { + DinkyClassLoaderUtil.initClassLoader(config, executor.getDinkyClassLoader()); String[] statements = SqlUtil.getStatements(statement); ExecuteJarOperation executeJarOperation = null; for (String sql : statements) { @@ -172,16 +81,16 @@ public Pipeline getJarStreamGraph(String statement, DinkyClassLoader dinkyClassL customSetOperation.execute(this.executor.getCustomTableEnvironment()); } else if (operationType.equals(SqlType.ADD)) { Set files = AddJarSqlParseStrategy.getAllFilePath(sqlStatement); - files.forEach(executor::addJar); - files.forEach(jobManager.getUdfPathContextHolder()::addOtherPlugins); + executor.addJar(files.toArray(new File[0])); + files.forEach(executor.getUdfPathContextHolder()::addOtherPlugins); } else if (operationType.equals(SqlType.ADD_FILE)) { Set files = AddFileSqlParseStrategy.getAllFilePath(sqlStatement); - files.forEach(executor::addJar); - files.forEach(jobManager.getUdfPathContextHolder()::addFile); + executor.addJar(files.toArray(new File[0])); + files.forEach(executor.getUdfPathContextHolder()::addFile); } } Assert.notNull(executeJarOperation, () -> new DinkyException("Not found execute jar operation.")); - List urLs = jobManager.getAllFileSet(); + List urLs = executor.getAllFileSet(); return executeJarOperation.explain(executor.getCustomTableEnvironment(), urLs); } @@ -195,6 +104,7 @@ public List getUris(String statement) { break; } } + return uriList; } } diff --git a/dinky-core/src/main/java/org/dinky/job/builder/JobTransBuilder.java b/dinky-core/src/main/java/org/dinky/job/builder/JobTransBuilder.java index e3fb5a0ce8..818e7799dc 100644 --- a/dinky-core/src/main/java/org/dinky/job/builder/JobTransBuilder.java +++ b/dinky-core/src/main/java/org/dinky/job/builder/JobTransBuilder.java @@ -32,10 +32,12 @@ import org.dinky.gateway.result.GatewayResult; import org.dinky.interceptor.FlinkInterceptor; import org.dinky.interceptor.FlinkInterceptorResult; +import org.dinky.job.ExecuteSqlException; import org.dinky.job.Job; import org.dinky.job.JobBuilder; import org.dinky.job.JobConfig; -import org.dinky.job.JobManager; +import org.dinky.job.JobManagerHandler; +import org.dinky.job.JobParam; import org.dinky.job.StatementParam; import org.dinky.parser.SqlType; import org.dinky.utils.URLUtils; @@ -53,35 +55,73 @@ /** * JobTransBuilder - * */ -public class JobTransBuilder extends JobBuilder { +public class JobTransBuilder implements JobBuilder { + + private String currentSql; + private final JobParam jobParam; + private final boolean useStatementSet; + private final boolean useGateway; + private final JobConfig config; + private final Executor executor; + private final GatewayType runMode; + private final Job job; + private JobManagerHandler jobManagerHandler; + + public JobTransBuilder(JobManagerHandler jobManagerHandler) { + this( + jobManagerHandler.getJobParam(), + jobManagerHandler.isUseStatementSet(), + jobManagerHandler.isUseGateway(), + jobManagerHandler.getConfig(), + jobManagerHandler.getExecutor(), + jobManagerHandler.getRunMode(), + jobManagerHandler.getJob()); + this.jobManagerHandler = jobManagerHandler; + } - public JobTransBuilder(JobManager jobManager) { - super(jobManager); + public JobTransBuilder( + JobParam jobParam, + boolean useStatementSet, + boolean useGateway, + JobConfig config, + Executor executor, + GatewayType runMode, + Job job) { + this.jobParam = jobParam; + this.useStatementSet = useStatementSet; + this.useGateway = useGateway; + this.config = config; + this.executor = executor; + this.runMode = runMode; + this.job = job; } - public static JobTransBuilder build(JobManager jobManager) { + public static JobTransBuilder build(JobManagerHandler jobManager) { return new JobTransBuilder(jobManager); } @Override public void run() throws Exception { - if (jobParam.getTrans().isEmpty()) { + try { + if (jobParam.getTrans().isEmpty()) { String transSqlTypes = SqlType.getTransSqlTypes().stream().map(SqlType::getType).collect(Collectors.joining(",")); throw new BusException(MessageFormat.format(Status.TASK_SQL_NO_EXECUTABLE.getMessage(), transSqlTypes)); - } + } - if (useStatementSet) { - handleStatementSet(); - return; - } + if (useStatementSet) { + handleStatementSet(); + return; + } - handleNonStatementSet(); + handleNonStatementSet(); + } catch (Exception ex) { + throw new ExecuteSqlException(currentSql, ex); + } } - private void handleStatementSet() throws Exception { + private void handleStatementSet() { List inserts = collectInserts(); if (useGateway) { @@ -91,7 +131,7 @@ private void handleStatementSet() throws Exception { processWithoutGateway(inserts); } - private void handleNonStatementSet() throws Exception { + private void handleNonStatementSet() { if (useGateway) { processSingleInsertWithGateway(); return; @@ -111,36 +151,36 @@ private List collectInserts() { return inserts; } - private void processWithGateway(List inserts) throws Exception { - jobManager.setCurrentSql(String.join(FlinkSQLConstant.SEPARATOR, inserts)); + private void processWithGateway(List inserts) { + currentSql = String.join(FlinkSQLConstant.SEPARATOR, inserts); GatewayResult gatewayResult = submitByGateway(inserts); setJobResultFromGatewayResult(gatewayResult); } - private void processWithoutGateway(List inserts) throws Exception { + private void processWithoutGateway(List inserts) { if (!inserts.isEmpty()) { - jobManager.setCurrentSql(String.join(FlinkSQLConstant.SEPARATOR, inserts)); + currentSql = String.join(FlinkSQLConstant.SEPARATOR, inserts); TableResult tableResult = executor.executeStatementSet(inserts); updateJobWithTableResult(tableResult); } } - private void processSingleInsertWithGateway() throws Exception { + private void processSingleInsertWithGateway() { List singleInsert = collectInserts(); processWithGateway(singleInsert); } - private void processFirstStatement() throws Exception { + private void processFirstStatement() { if (jobParam.getTrans().isEmpty()) { return; } // Only process the first statement when not using statement set StatementParam item = jobParam.getTrans().get(0); - jobManager.setCurrentSql(item.getValue()); + currentSql = item.getValue(); processSingleStatement(item); } - private void processSingleStatement(StatementParam item) throws Exception { + private void processSingleStatement(StatementParam item) { FlinkInterceptorResult flinkInterceptorResult = FlinkInterceptor.build(executor, item.getValue()); if (Asserts.isNotNull(flinkInterceptorResult.getTableResult())) { updateJobWithTableResult(flinkInterceptorResult.getTableResult(), item.getType()); @@ -182,20 +222,21 @@ private void updateJobWithTableResult(TableResult tableResult, SqlType sqlType) config.isUseChangeLog(), config.isUseAutoCancel(), executor.getTimeZone()) - .getResultWithPersistence(tableResult, jobManager.getHandler()); + .getResult(tableResult); + // TODO: 2024/7/15 persist result should execute at dinky server by network. + // .getResultWithPersistence(tableResult, jobManagerHandler.getHandler()); job.setResult(result); } } private GatewayResult submitByGateway(List inserts) { - JobConfig config = jobManager.getConfig(); - GatewayType runMode = jobManager.getRunMode(); - Executor executor = jobManager.getExecutor(); - GatewayResult gatewayResult = null; // Use gateway need to build gateway config, include flink configuration. - config.addGatewayConfig(executor.getCustomTableEnvironment().getConfig().getConfiguration()); + config.addGatewayConfig(executor.getCustomTableEnvironment() + .getConfig() + .getConfiguration() + .toMap()); config.getGatewayConfig().setSql(jobParam.getParsedSql()); if (runMode.isApplicationMode()) { // Application mode need to submit dinky-app.jar that in the hdfs or image. diff --git a/dinky-core/src/main/java/org/dinky/job/builder/JobUDFBuilder.java b/dinky-core/src/main/java/org/dinky/job/builder/JobUDFBuilder.java index c219c2337c..846937e2e1 100644 --- a/dinky-core/src/main/java/org/dinky/job/builder/JobUDFBuilder.java +++ b/dinky-core/src/main/java/org/dinky/job/builder/JobUDFBuilder.java @@ -24,11 +24,14 @@ import static org.dinky.function.util.UDFUtil.YARN; import org.dinky.assertion.Asserts; -import org.dinky.data.model.SystemConfiguration; +import org.dinky.data.enums.GatewayType; +import org.dinky.executor.Executor; import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; import org.dinky.job.JobBuilder; -import org.dinky.job.JobManager; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManagerHandler; +import org.dinky.job.JobParam; import org.dinky.utils.URLUtils; import java.io.File; @@ -45,30 +48,37 @@ /** * JobUDFBuilder - * */ @Slf4j -public class JobUDFBuilder extends JobBuilder { - - public JobUDFBuilder(JobManager jobManager) { - super(jobManager); +public class JobUDFBuilder implements JobBuilder { + + private final JobParam jobParam; + private final Executor executor; + private final JobConfig config; + private final GatewayType runMode; + + public JobUDFBuilder(JobParam jobParam, Executor executor, JobConfig config, GatewayType runMode) { + this.jobParam = jobParam; + this.executor = executor; + this.config = config; + this.runMode = runMode; } - public static JobUDFBuilder build(JobManager jobManager) { - return new JobUDFBuilder(jobManager); + public static JobUDFBuilder build(JobManagerHandler jobManager) { + return new JobUDFBuilder( + jobManager.getJobParam(), jobManager.getExecutor(), jobManager.getConfig(), jobManager.getRunMode()); } @Override public void run() throws Exception { Asserts.checkNotNull(jobParam, "No executable statement."); - List udfList = jobManager.getJobParam().getUdfList(); + List udfList = jobParam.getUdfList(); Integer taskId = config.getTaskId(); if (taskId == null) { taskId = -RandomUtil.randomInt(0, 1000); } // 1. Obtain the path of the jar package and inject it into the remote environment - List jarFiles = - new ArrayList<>(jobManager.getUdfPathContextHolder().getAllFileSet()); + List jarFiles = new ArrayList<>(executor.getUdfPathContextHolder().getAllFileSet()); String[] userCustomUdfJarPath = UDFUtil.initJavaUDF(udfList, taskId); String[] jarPaths = CollUtil.removeNull(jarFiles).stream() @@ -89,7 +99,7 @@ public void run() throws Exception { for (String pyPath : pyPaths) { if (StrUtil.isNotBlank(pyPath)) { jarFiles.add(new File(pyPath)); - jobManager.getUdfPathContextHolder().addPyUdfPath(new File(pyPath)); + executor.getUdfPathContextHolder().addPyUdfPath(new File(pyPath)); } } } @@ -97,14 +107,14 @@ public void run() throws Exception { for (String jarPath : userCustomUdfJarPath) { if (StrUtil.isNotBlank(jarPath)) { jarFiles.add(new File(jarPath)); - jobManager.getUdfPathContextHolder().addUdfPath(new File(jarPath)); + executor.getUdfPathContextHolder().addUdfPath(new File(jarPath)); } } } - Set pyUdfFile = jobManager.getUdfPathContextHolder().getPyUdfFile(); + Set pyUdfFile = executor.getUdfPathContextHolder().getPyUdfFile(); executor.initPyUDF( - SystemConfiguration.getInstances().getPythonHome(), + config.getSystemConfiguration().getPythonHome(), pyUdfFile.stream().map(File::getAbsolutePath).toArray(String[]::new)); if (GATEWAY_TYPE_MAP.get(YARN).contains(runMode)) { config.getGatewayConfig().setJarPaths(ArrayUtil.append(jarPaths, pyPaths)); @@ -113,11 +123,9 @@ public void run() throws Exception { try { List jarList = CollUtil.newArrayList(URLUtils.getURLs(jarFiles)); // 3.Write the required files for UDF - UDFUtil.writeManifest(taskId, jarList, jobManager.getUdfPathContextHolder()); + UDFUtil.writeManifest(taskId, jarList, executor.getUdfPathContextHolder()); UDFUtil.addConfigurationClsAndJars( - jobManager.getExecutor().getCustomTableEnvironment(), - jarList, - CollUtil.newArrayList(URLUtils.getURLs(jarFiles))); + executor.getCustomTableEnvironment(), jarList, CollUtil.newArrayList(URLUtils.getURLs(jarFiles))); } catch (Exception e) { throw new RuntimeException("add configuration failed: ", e); } diff --git a/dinky-core/src/main/java/org/dinky/trans/Operations.java b/dinky-core/src/main/java/org/dinky/trans/Operations.java index f4984f416e..3945828ce0 100644 --- a/dinky-core/src/main/java/org/dinky/trans/Operations.java +++ b/dinky-core/src/main/java/org/dinky/trans/Operations.java @@ -52,9 +52,9 @@ public class Operations { private Operations() {} - private static final Operation[] ALL_OPERATIONS = getAllOperations(); + public static final Operation[] ALL_OPERATIONS = getAllOperations(); - private static final List JAVA_STATIC_UDF_LIST = getCustomStaticUdfs(); + public static final List JAVA_STATIC_UDF_LIST = getCustomStaticUdfs(); /** * get all {@link Operation} children ordinary class, * diff --git a/dinky-core/src/test/java/org/dinky/interceptor/CdcSourceTests.java b/dinky-core/src/test/java/org/dinky/interceptor/CdcSourceTests.java index 0a782c7192..4212440131 100644 --- a/dinky-core/src/test/java/org/dinky/interceptor/CdcSourceTests.java +++ b/dinky-core/src/test/java/org/dinky/interceptor/CdcSourceTests.java @@ -19,10 +19,9 @@ package org.dinky.interceptor; -import org.dinky.classloader.DinkyClassLoader; import org.dinky.executor.Executor; import org.dinky.executor.ExecutorConfig; -import org.dinky.executor.ExecutorFactory; +import org.dinky.executor.LocalStreamExecutor; import org.junit.Ignore; import org.junit.Test; @@ -50,7 +49,7 @@ public void printTest() throws Exception { .toString(); ExecutorConfig executorConfig = ExecutorConfig.DEFAULT; - Executor executor = ExecutorFactory.buildLocalExecutor(executorConfig, DinkyClassLoader.build()); + Executor executor = new LocalStreamExecutor(executorConfig); executor.executeSql(statement); executor.execute(""); } diff --git a/dinky-core/src/test/java/org/dinky/interceptor/FlinkCDCPipelineTest.java b/dinky-core/src/test/java/org/dinky/interceptor/FlinkCDCPipelineTest.java index 7e314b3a5b..8a474c8d5d 100644 --- a/dinky-core/src/test/java/org/dinky/interceptor/FlinkCDCPipelineTest.java +++ b/dinky-core/src/test/java/org/dinky/interceptor/FlinkCDCPipelineTest.java @@ -19,10 +19,9 @@ package org.dinky.interceptor; -import org.dinky.classloader.DinkyClassLoader; import org.dinky.executor.Executor; import org.dinky.executor.ExecutorConfig; -import org.dinky.executor.ExecutorFactory; +import org.dinky.executor.LocalStreamExecutor; import org.junit.Ignore; import org.junit.Test; @@ -60,7 +59,7 @@ public void mysqlTest() throws Exception { .toString(); ExecutorConfig executorConfig = ExecutorConfig.DEFAULT; - Executor executor = ExecutorFactory.buildLocalExecutor(executorConfig, DinkyClassLoader.build()); + Executor executor = new LocalStreamExecutor(executorConfig); executor.executeSql(statement); executor.execute(""); } diff --git a/dinky-executor-server/pom.xml b/dinky-executor-server/pom.xml new file mode 100644 index 0000000000..97ce288128 --- /dev/null +++ b/dinky-executor-server/pom.xml @@ -0,0 +1,21 @@ + + + 4.0.0 + + org.dinky + dinky + ${revision} + ../pom.xml + + + dinky-executor-server + + + + org.dinky + dinky-core + + + + diff --git a/dinky-executor-server/src/main/java/org/dinky/JobManagerServiceImpl.java b/dinky-executor-server/src/main/java/org/dinky/JobManagerServiceImpl.java new file mode 100644 index 0000000000..a1cd97274f --- /dev/null +++ b/dinky-executor-server/src/main/java/org/dinky/JobManagerServiceImpl.java @@ -0,0 +1,344 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky; + +import org.dinky.cluster.FlinkCluster; +import org.dinky.cluster.FlinkClusterInfo; +import org.dinky.context.RowLevelPermissionsContext; +import org.dinky.data.enums.JobStatus; +import org.dinky.data.model.Catalog; +import org.dinky.data.model.CheckPointReadTable; +import org.dinky.data.model.Column; +import org.dinky.data.model.ResourcesVO; +import org.dinky.data.model.Schema; +import org.dinky.data.model.SystemConfiguration; +import org.dinky.data.model.Table; +import org.dinky.data.result.ExplainResult; +import org.dinky.data.result.IResult; +import org.dinky.explainer.lineage.LineageBuilder; +import org.dinky.explainer.lineage.LineageResult; +import org.dinky.explainer.print_table.PrintStatementExplainer; +import org.dinky.explainer.sqllineage.SQLLineageBuilder; +import org.dinky.flink.checkpoint.CheckpointRead; +import org.dinky.function.FunctionFactory; +import org.dinky.function.data.model.UDF; +import org.dinky.function.data.model.UDFPath; +import org.dinky.function.pool.UdfCodePool; +import org.dinky.function.util.UDFUtil; +import org.dinky.gateway.Gateway; +import org.dinky.gateway.config.GatewayConfig; +import org.dinky.gateway.enums.SavePointType; +import org.dinky.gateway.result.GatewayResult; +import org.dinky.gateway.result.SavePointResult; +import org.dinky.job.Job; +import org.dinky.job.JobConfig; +import org.dinky.job.JobManagerHandler; +import org.dinky.job.JobResult; +import org.dinky.metadata.config.DriverConfig; +import org.dinky.parser.SqlType; +import org.dinky.remote.ServerExecutorService; +import org.dinky.resource.BaseResourceManager; +import org.dinky.trans.Operations; +import org.dinky.utils.FlinkTableMetadataUtil; +import org.dinky.utils.SqlUtil; + +import java.rmi.RemoteException; +import java.rmi.server.UnicastRemoteObject; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.node.ObjectNode; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class JobManagerServiceImpl extends UnicastRemoteObject implements ServerExecutorService { + + JobManagerHandler jobManagerHandler; + protected static final CheckpointRead INSTANCE = new CheckpointRead(); + + public JobManagerServiceImpl() throws RemoteException {} + + @Override + public void init(JobConfig config, boolean isPlanMode) throws RemoteException { + jobManagerHandler = JobManagerHandler.build(config, isPlanMode); + } + + @Override + public boolean close() { + return jobManagerHandler.close(); + } + + @Override + public ObjectNode getJarStreamGraphJson(String statement) throws RemoteException { + return jobManagerHandler.getJarStreamGraphJson(statement); + } + + @Override + public JobResult executeJarSql(String statement) throws RemoteException { + try { + return jobManagerHandler.executeJarSql(statement); + } catch (Exception ex) { + return null; + } + } + + @Override + public JobResult executeSql(String statement) throws RemoteException { + try { + return jobManagerHandler.executeSql(statement); + } catch (Exception ex) { + log.error("executeSql error", ex); + return null; + } + } + + @Override + public IResult executeDDL(String statement) throws RemoteException { + return jobManagerHandler.executeDDL(statement); + } + + @Override + public ExplainResult explainSql(String statement) throws RemoteException { + return jobManagerHandler.explainSql(statement); + } + + @Override + public ObjectNode getStreamGraph(String statement) throws RemoteException { + return jobManagerHandler.getStreamGraph(statement); + } + + @Override + public String getJobPlanJson(String statement) throws RemoteException { + return jobManagerHandler.getJobPlanJson(statement); + } + + @Override + public boolean cancelNormal(String jobId) throws RemoteException { + return jobManagerHandler.cancelNormal(jobId); + } + + @Override + public SavePointResult savepoint(String jobId, SavePointType savePointType, String savePoint, boolean isUseRestAPI) + throws RemoteException { + return jobManagerHandler.savepoint(jobId, savePointType, savePoint, isUseRestAPI); + } + + @Override + public String exportSql(String sql) { + return jobManagerHandler.exportSql(sql); + } + + @Override + public Job getJob() throws RemoteException { + return jobManagerHandler.getJob(); + } + + @Override + public void prepare(String statement) throws RemoteException { + jobManagerHandler.prepare(statement); + } + + // TODO: 2024/3/29 utils, coud individual rmeote interface + @Override + public List getPythonUdfList(String udfFile) throws RemoteException { + return UDFUtil.getPythonUdfList(udfFile); + } + + @Override + public JobStatus getJobStatus(GatewayConfig gatewayConfig, String appId) throws RemoteException { + Gateway gateway = Gateway.build(gatewayConfig); + return gateway.getJobStatusById(appId); + } + + @Override + public void onJobGatewayFinishCallback(JobConfig jobConfig, String status) throws RemoteException { + Gateway.build(jobConfig.getGatewayConfig()).onJobFinishCallback(status); + } + + @Override + public List getUdfClassNameByJarPath(String path) throws RemoteException { + return UDFUtil.getUdfClassNameByJarPath(path); + } + + @Override + public Map> buildJar(List udfCodes) throws RemoteException { + return UDFUtil.buildJar(udfCodes); + } + + @Override + public void buildRowPermission(ConcurrentHashMap permission) throws RemoteException { + RowLevelPermissionsContext.set(permission); + } + + @Override + public void putFile(String fullName, byte[] context) throws RemoteException { + BaseResourceManager.getInstance().putFile(fullName, context); + } + + @Override + public List getFullDirectoryStructure(int rootId) throws RemoteException { + return BaseResourceManager.getInstance().getFullDirectoryStructure(rootId); + } + + @Override + public void rename(String path, String newPath) throws RemoteException { + BaseResourceManager.getInstance().rename(path, newPath); + } + + @Override + public String getFileContent(String path) throws RemoteException { + return BaseResourceManager.getInstance().getFileContent(path); + } + + @Override + public byte[] readFile(String path) throws RemoteException { + return BaseResourceManager.getInstance().readFileContext(path); + } + + @Override + public void updateGitPool(Map newPool) throws RemoteException { + UdfCodePool.updateGitPool(newPool); + } + + @Override + public UDFPath initUDF(List udfClassList, Integer missionId) throws RemoteException { + return FunctionFactory.initUDF(udfClassList, missionId); + } + + @Override + public LineageResult getColumnLineageByLogicalPlan(String statement) throws RemoteException { + return LineageBuilder.getColumnLineageByLogicalPlan(statement); + } + + @Override + public LineageResult getSqlLineageByOne(String statement, String type) throws RemoteException { + return SQLLineageBuilder.getSqlLineageByOne(statement, type); + } + + @Override + public LineageResult getSqlLineage(String statement, String mysql, DriverConfig> driverConfig) + throws RemoteException { + return SQLLineageBuilder.getSqlLineage(statement, mysql, driverConfig); + } + + @Override + public List getCatalog() throws RemoteException { + return FlinkTableMetadataUtil.getCatalog(jobManagerHandler.getExecutor().getCustomTableEnvironment()); + } + + @Override + public void setSchemaInfo(String catalogName, String database, Schema schema, List
tables) + throws RemoteException { + FlinkTableMetadataUtil.setSchemaInfo( + jobManagerHandler.getExecutor().getCustomTableEnvironment(), catalogName, database, schema, tables); + } + + @Override + public List getColumnList(String catalogName, String database, String tableName) throws RemoteException { + return FlinkTableMetadataUtil.getColumnList( + jobManagerHandler.getExecutor().getCustomTableEnvironment(), catalogName, database, tableName); + } + + @Override + public Map> readCheckpoint(String path, String operatorId) + throws RemoteException { + return INSTANCE.readCheckpoint(path, operatorId); + } + + @Override + public List getPrintTables(String statement) throws RemoteException { + // TODO: 2023/4/7 this function not support variable sql, because, JobManager and executor + // couple function + // and status and task execute. + final String[] statements = SqlUtil.getStatements(SqlUtil.removeNote(statement)); + return Arrays.stream(statements) + .filter(t -> SqlType.PRINT.equals(Operations.getOperationType(t))) + .flatMap(t -> Arrays.stream(PrintStatementExplainer.splitTableNames(t))) + .collect(Collectors.toList()); + } + + @Override + public FlinkClusterInfo testFlinkJobManagerIP(String hosts, String host) throws RemoteException { + return FlinkCluster.testFlinkJobManagerIP(hosts, host); + } + + @Override + public void killCluster(GatewayConfig gatewayConfig) throws RemoteException { + Gateway.build(gatewayConfig).killCluster(); + } + + @Override + public GatewayResult deployCluster(GatewayConfig gatewayConfig) throws RemoteException { + return Gateway.build(gatewayConfig).deployCluster(UDFUtil.createFlinkUdfPathContextHolder()); + } + + @Override + public void addOrUpdate(UDF udf) throws RemoteException { + UdfCodePool.addOrUpdate(udf); + } + + @Override + public void removeUdfCodePool(String className) throws RemoteException { + UdfCodePool.remove(className); + } + + @Override + public String templateParse(String dialect, String templateCode, String className) throws RemoteException { + return UDFUtil.templateParse(dialect, templateCode, className); + } + + @Override + public void registerPool(List collect) throws RemoteException { + UdfCodePool.registerPool(collect); + } + + @Override + public void initResourceManager(SystemConfiguration systemConfiguration) throws RemoteException { + BaseResourceManager.initResourceManager(systemConfiguration); + } + + @Override + public String getPyUDFAttr(String statement) throws RemoteException { + return UDFUtil.getPyUDFAttr(statement); + } + + @Override + public String getScalaFullClassName(String statement) throws RemoteException { + return UDFUtil.getScalaFullClassName(statement); + } + + @Override + public String getLatestJobManageHost(String appId, String oldJobManagerHost, GatewayConfig gatewayConfig) + throws RemoteException { + Gateway gateway = Gateway.build(gatewayConfig); + return gateway.getLatestJobManageHost(appId, oldJobManagerHost); + } + + @Override + public List getCustomStaticUdfs() throws RemoteException { + List staticUdfs = Operations.getCustomStaticUdfs(); + return staticUdfs == null ? Collections.emptyList() : staticUdfs; + } +} diff --git a/dinky-executor-server/src/main/java/org/dinky/RMIServer.java b/dinky-executor-server/src/main/java/org/dinky/RMIServer.java new file mode 100644 index 0000000000..ffc011e944 --- /dev/null +++ b/dinky-executor-server/src/main/java/org/dinky/RMIServer.java @@ -0,0 +1,43 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky; + +import org.dinky.remote.ServerExecutorService; + +import java.rmi.registry.LocateRegistry; +import java.rmi.registry.Registry; + +public class RMIServer { + + public static void main(String[] args) { + + try { + ServerExecutorService jobManagerService = new JobManagerServiceImpl(); + LocateRegistry.createRegistry(1099); + Registry registry = LocateRegistry.getRegistry(); + registry.bind("Dinky", jobManagerService); + System.out.println("Dinky server ready"); + // 如果不想再让该对象被继续调用,使用下面一行 + // UnicastRemoteObject.unexportObject(jobManagerService, false); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/dinky-function/pom.xml b/dinky-function/pom.xml index 5d542e360f..f241131abd 100644 --- a/dinky-function/pom.xml +++ b/dinky-function/pom.xml @@ -31,6 +31,10 @@ Dinky : Function + + org.dinky + dinky-common + org.freemarker freemarker diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java b/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java index 0544badcb2..f12a092a9b 100644 --- a/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java +++ b/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java @@ -37,7 +37,6 @@ public class JavaCompiler implements FunctionCompiler { * 函数代码在线动态编译 * * @param udf udf - * @param conf flink-conf * @param missionId 任务id * @return 是否成功 */ diff --git a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java index 9c2decc98a..0a93262493 100644 --- a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java +++ b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java @@ -375,35 +375,6 @@ public static FlinkUdfPathContextHolder createFlinkUdfPathContextHolder() { return udfPathContextHolder; } - public static List> getUdfClassByJar(File jarPath) { - Assert.notNull(jarPath); - - List> classList = new ArrayList<>(); - try (JarClassLoader loader = new JarClassLoader()) { - loader.addJar(jarPath); - - ClassScanner classScanner = - new ClassScanner("", aClass -> ClassUtil.isAssignable(UserDefinedFunction.class, aClass)); - classScanner.setClassLoader(loader); - ReflectUtil.invoke(classScanner, "scanJar", new JarFile(jarPath)); - Set> classes = - (Set>) ReflectUtil.getFieldValue(classScanner, "classes"); - for (Class aClass : classes) { - try { - UserDefinedFunctionHelper.validateClass(aClass); - classList.add(aClass); - } catch (Exception ex) { - throw new DinkyException(); - } - } - } catch (ValidationException e) { - throw e; - } catch (Exception e) { - e.printStackTrace(); - } - return classList; - } - public static List getPythonUdfList(String udfFile) { return getPythonUdfList(SystemConfiguration.getInstances().getPythonHome(), udfFile); } @@ -473,4 +444,38 @@ public static void writeManifest( JSONUtil.toJsonStr(flinkUdfManifest), PathConstant.getUdfPackagePath(taskId) + PathConstant.DEP_MANIFEST); } + + public static List getUdfClassNameByJarPath(String path) { + List> clazz = getUdfClassByJar(new File(path)); + return clazz.stream().map(Class::getName).collect(Collectors.toList()); + } + + public static List> getUdfClassByJar(File jarPath) { + Assert.notNull(jarPath); + + List> classList = new ArrayList<>(); + try (JarClassLoader loader = new JarClassLoader()) { + loader.addJar(jarPath); + + ClassScanner classScanner = + new ClassScanner("", aClass -> ClassUtil.isAssignable(UserDefinedFunction.class, aClass)); + classScanner.setClassLoader(loader); + ReflectUtil.invoke(classScanner, "scanJar", new JarFile(jarPath)); + Set> classes = + (Set>) ReflectUtil.getFieldValue(classScanner, "classes"); + for (Class aClass : classes) { + try { + UserDefinedFunctionHelper.validateClass(aClass); + classList.add(aClass); + } catch (Exception ex) { + throw new DinkyException(); + } + } + } catch (ValidationException e) { + throw e; + } catch (Exception e) { + e.printStackTrace(); + } + return classList; + } } diff --git a/pom.xml b/pom.xml index 0618a8d974..b3e05dbbcf 100644 --- a/pom.xml +++ b/pom.xml @@ -46,6 +46,7 @@ dinky-app dinky-admin dinky-assembly + dinky-executor-server @@ -643,6 +644,11 @@ dinky-cdc-plus ${project.version} + + org.dinky + dinky-executor-server + ${project.version} + org.apache.httpcomponents httpclient