Skip to content

Commit

Permalink
1、savepoint使用多线程并发执行 2、修复catalog版本checkpoint功能无效 3、sql预校验功能优化 4、部分代码重…
Browse files Browse the repository at this point in the history
…构和优化 5:网络检查功能优化
  • Loading branch information
zhp8341 committed Mar 28, 2021
1 parent 38d1027 commit 47268b9
Show file tree
Hide file tree
Showing 31 changed files with 520 additions and 249 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,18 @@ public class SystemConstant {

public final static String SEMICOLON = ";";

public final static String LINE_FEED= "\n";
public final static String LINE_FEED = "\n";

public final static String SPACE= "";
public final static String SPACE = "";

public static final int DEFAULT_PATTERN_FLAGS = Pattern.CASE_INSENSITIVE | Pattern.DOTALL;


public final static String JARVERSION = "lib/flink-streaming-core-1.2.0.RELEASE.jar";


public static final String QUERY_JOBID_KEY_WORD = "job-submitted-success:";

public static final String QUERY_JOBID_KEY_WORD_BACKUP = "Job has been submitted with JobID";

}
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,9 @@ public enum SqlCommand {
"(ALTER\\s+FUNCTION.*)",
(operands) -> Optional.of(new String[]{operands[0]})),

// SELECT(
// "(WITH.*SELECT.*|SELECT.*)",
// (operands) -> Optional.of(new String[]{operands[0]})),
SELECT(
"(WITH.*SELECT.*|SELECT.*)",
(operands) -> Optional.of(new String[]{operands[0]})),

SHOW_CATALOGS(
"SHOW\\s+CATALOGS",
Expand Down Expand Up @@ -117,7 +117,6 @@ public enum SqlCommand {
public final Function<String[], Optional<String[]>> operandConverter;



SqlCommand(String matchingRegex, Function<String[], Optional<String[]>> operandConverter) {
this.pattern = Pattern.compile(matchingRegex, SystemConstant.DEFAULT_PATTERN_FLAGS);
this.operandConverter = operandConverter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public static List<SqlCommandCall> fileToSql(List<String> lineList) {
trimStart(line).startsWith(SystemConstant.COMMENT_SYMBOL)) {
continue;
}
stmt.append("\n").append(line);
stmt.append(SystemConstant.LINE_FEED).append(line);
if (line.trim().endsWith(SystemConstant.SEMICOLON)) {
Optional<SqlCommandCall> optionalCall = parse(stmt.toString());
if (optionalCall.isPresent()) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
package com.flink.streaming.core;


import com.flink.streaming.common.constant.SystemConstant;
import com.flink.streaming.common.model.SqlCommandCall;
import com.flink.streaming.common.sql.SqlFileParser;
import com.flink.streaming.core.checkpoint.CheckPointParams;
import com.flink.streaming.core.checkpoint.FsCheckPoint;
import com.flink.streaming.core.execute.ExecuteSql;
import com.flink.streaming.core.model.JobRunParam;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.calcite.shaded.com.google.common.base.Preconditions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
Expand Down Expand Up @@ -49,6 +52,9 @@ public static void main(String[] args) {

TableEnvironment tEnv = StreamTableEnvironment.create(env, settings);

//设置checkPoint
FsCheckPoint.setCheckpoint(env, jobRunParam.getCheckPointParam());

List<String> sql = Files.readAllLines(Paths.get(jobRunParam.getSqlPath()));

List<SqlCommandCall> sqlCommandCallList = SqlFileParser.fileToSql(sql);
Expand All @@ -57,13 +63,18 @@ public static void main(String[] args) {

ExecuteSql.exeSql(sqlCommandCallList, tEnv, statementSet);



TableResult tableResult = statementSet.execute();
if (tableResult == null || tableResult.getJobClient().get() == null ||
tableResult.getJobClient().get().getJobID() == null) {
throw new RuntimeException("任务运行失败 没有获取到JobID");
}
System.out.println("任务提交成功 jobId=" + tableResult.getJobClient().get().getJobID());
log.info("任务提交成功 jobId={}", tableResult.getJobClient().get().getJobID());
JobID jobID=tableResult.getJobClient().get().getJobID();

System.out.println(SystemConstant.QUERY_JOBID_KEY_WORD + jobID);

log.info(SystemConstant.QUERY_JOBID_KEY_WORD + "{}",jobID);

} catch (Exception e) {
System.err.println("任务执行失败:" + e.getMessage());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ public static void exeSql(List<SqlCommandCall> sqlCommandCallList, TableEnvironm
statementSet.addInsertSql(sqlCommandCall.operands[0]);
break;
//显示语句
case SELECT:
case SHOW_CATALOGS:
case SHOW_DATABASES:
case SHOW_MODULES:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.flink.streaming.core.logs;

import com.flink.streaming.common.enums.SqlCommand;
import com.flink.streaming.common.model.SqlCommandCall;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.table.api.TableEnvironment;
Expand Down Expand Up @@ -43,7 +44,12 @@ public static void queryRestPrint(TableEnvironment tEnv, SqlCommandCall sqlComma
}
LogPrint.logPrint(sqlCommandCall);

tEnv.executeSql(sqlCommandCall.operands[0]).print();

if (sqlCommandCall.getSqlCommand().name().equalsIgnoreCase(SqlCommand.SELECT.name())) {
throw new RuntimeException("目前不支持select 语法使用");
} else {
tEnv.executeSql(sqlCommandCall.operands[0]).print();
}

// if (sqlCommandCall.getSqlCommand().name().equalsIgnoreCase(SqlCommand.SELECT.name())) {
// Iterator<Row> it = tEnv.executeSql(sqlCommandCall.operands[0]).collect();
Expand All @@ -53,4 +59,5 @@ public static void queryRestPrint(TableEnvironment tEnv, SqlCommandCall sqlComma
// }
// }
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -4,38 +4,35 @@
import com.flink.streaming.common.model.SqlCommandCall;
import com.flink.streaming.common.sql.SqlFileParser;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.config.Lex;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.sql.parser.validate.FlinkSqlConformance;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.api.config.TableConfigOptions;
import org.apache.flink.table.planner.calcite.CalciteConfig;
import org.apache.flink.table.planner.calcite.CalciteParser;
import org.apache.flink.table.planner.delegation.FlinkSqlParserFactories;
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil;
import org.apache.flink.table.planner.utils.TableConfigUtils;

import java.util.Arrays;
import java.util.Collections;
import java.util.List;

/**
* @author zhuhuipei
* @Description:
* @date 2021/1/17
* @time 10:51
*/

@Slf4j
public class SqlValidation {


//TODO 暂时没有找到好的解决方案

/**
* sql校验
*
* @author zhuhuipei
* @date 2021/1/21
* @time 22:24
* @date 2021/3/27
* @time 10:10
*/
public static void preCheckSql(List<String> sql) {

Expand All @@ -49,9 +46,7 @@ public static void preCheckSql(List<String> sql) {
TableEnvironment tEnv = StreamTableEnvironment.create(env, settings);

List<SqlCommandCall> sqlCommandCallList = SqlFileParser.fileToSql(sql);

StatementSet statementSet = tEnv.createStatementSet();

TableConfig config = tEnv.getConfig();
String value = null;

try {
Expand All @@ -60,46 +55,65 @@ public static void preCheckSql(List<String> sql) {
value = sqlCommandCall.operands[0];

switch (sqlCommandCall.sqlCommand) {
case USE_CATALOG:
case CREATE_CATALOG:
throw new RuntimeException("暂时不支持 CATALOG 相关语法校验(请不要点击 sql预校验 按钮了)");
//配置
//配置
case SET:
String key = sqlCommandCall.operands[0];
String val = sqlCommandCall.operands[1];

if (val.contains(SystemConstant.LINE_FEED)){
throw new RuntimeException("set 语法值异常:"+val) ;
}
if (TableConfigOptions.TABLE_SQL_DIALECT.key().equalsIgnoreCase(key.trim())
&& SqlDialect.HIVE.name().equalsIgnoreCase(val.trim())) {
throw new RuntimeException("暂时不支持 Hive相关语法校验 (请不要点击 sql预校验 按钮)");
&& SqlDialect.HIVE.name().equalsIgnoreCase(val.trim())) {
config.setSqlDialect(SqlDialect.HIVE);
}else{
config.setSqlDialect(SqlDialect.DEFAULT);
}
Configuration configuration = tEnv.getConfig().getConfiguration();
configuration.setString(key, val);
break;
//insert 语句
case INSERT_INTO:
case INSERT_OVERWRITE:
statementSet.addInsertSql(sqlCommandCall.operands[0]);

break;
//其他
default:
tEnv.executeSql(sqlCommandCall.operands[0]);
CalciteParser parser = new CalciteParser(getSqlParserConfig(config));
parser.parse(sqlCommandCall.operands[0]);
break;
}
}
} catch (Exception e) {
log.warn("语法异常:{} 原因 {}", value, e);
throw new RuntimeException("语法异常 " + value + " 原因 " + e.getMessage());
log.warn("语法异常: sql={} 原因是: {}", value, e);
throw new RuntimeException("语法异常 sql=" + value + " 原因: " + e.getMessage());
}

}

private static SqlParser.Config getSqlParserConfig(TableConfig tableConfig) {
return JavaScalaConversionUtil.toJava(getCalciteConfig(tableConfig).getSqlParserConfig()).orElseGet(
() -> {
SqlConformance conformance = getSqlConformance(tableConfig.getSqlDialect());
return SqlParser
.config()
.withParserFactory(FlinkSqlParserFactories.create(conformance))
.withConformance(conformance)
.withLex(Lex.JAVA)
.withIdentifierMaxLength(256);
}
);
}

private static CalciteConfig getCalciteConfig(TableConfig tableConfig) {
return TableConfigUtils.getCalciteConfig(tableConfig);
}
private static FlinkSqlConformance getSqlConformance(SqlDialect sqlDialect) {
switch (sqlDialect) {
case HIVE:
return FlinkSqlConformance.HIVE;
case DEFAULT:
return FlinkSqlConformance.DEFAULT;
default:
throw new TableException("Unsupported SQL dialect: " + sqlDialect);
}
}
/**
* 字符串转sql
*
* @author zhuhuipei
* @date 2021/1/22
* @time 22:45
*/
public static List<String> toSqlList(String sql) {
if (StringUtils.isEmpty(sql)) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package com.flink.streaming.web.adapter;

import com.flink.streaming.web.enums.DeployModeEnum;

/**
* @author zhuhuipei
* @Description:
Expand All @@ -8,24 +10,16 @@
*/
public interface CommandAdapter {

/**
* yarn per模式启动任务
*
* @author zhuhuipei
* @date 2020-09-18
* @time 20:18
*/
void startForPerYarn(String command, StringBuilder localLog, Long jobRunLogId) throws Exception;


/**
* 启动本地模式
* 启动服务
*
* @author zhuhuipei
* @date 2020/11/1
* @time 10:15
* @date 2021/3/26
* @time 17:31
*/
String startForLocal(String command, StringBuilder localLog, Long jobRunLogId) throws Exception;
String submitJob(String command, StringBuilder localLog, Long jobRunLogId, DeployModeEnum deployModeEnum) throws Exception;


/**
* yarn per模式执行savepoint
Expand Down
Loading

0 comments on commit 47268b9

Please sign in to comment.