Skip to content

Commit f3801df

Browse files
author
dapeng
committed
Merge branch '1.8_release_3.10.x' into feat_1.8_redisSink
2 parents eee1867 + e0a1043 commit f3801df

File tree

81 files changed

+3015
-194
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

81 files changed

+3015
-194
lines changed

ci/sonar_notify.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/bin/bash
22
#参考钉钉文档 https://open-doc.dingtalk.com/microapp/serverapi2/qf2nxq
33
sonarreport=$(curl -s http://172.16.100.198:8082/?projectname=dt-insight-engine/flinkStreamSQL)
4-
curl -s "https://oapi.dingtalk.com/robot/send?access_token=71555061297a53d3ac922a6f4d94285d8e23bccdca0c00b4dc6df0a2d49da724" \
4+
curl -s "https://oapi.dingtalk.com/robot/send?access_token=58fd731d8bed3b17708d3aa27e49a7e2c41c7e6545f6c4be3170963a7bba7e2a" \
55
-H "Content-Type: application/json" \
66
-d "{
77
\"msgtype\": \"markdown\",

clickhouse/clickhouse-side/clickhouse-async-side/src/main/java/com/dtstack/flink/sql/side/clickhouse/ClickhouseAsyncReqRow.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,6 @@ public void open(Configuration parameters) throws Exception {
6767
vo.setFileResolverCachingEnabled(false);
6868
Vertx vertx = Vertx.vertx(vo);
6969
setRdbSqlClient(JDBCClient.createNonShared(vertx, clickhouseClientConfig));
70-
setExecutor(new ThreadPoolExecutor(50, 50, 0, TimeUnit.MILLISECONDS,
71-
new LinkedBlockingQueue<>(10000), new DTThreadFactory("clickhouseAsyncExec"), new ThreadPoolExecutor.CallerRunsPolicy()));
7270
}
7371

7472
}

core/src/main/java/com/dtstack/flink/sql/exec/ExecuteProcessHelper.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -292,9 +292,7 @@ public static Set<URL> registerTable(SqlTree sqlTree, StreamExecutionEnvironment
292292

293293
RowTypeInfo typeInfo = new RowTypeInfo(adaptTable.getSchema().getFieldTypes(), adaptTable.getSchema().getFieldNames());
294294
DataStream adaptStream = tableEnv.toRetractStream(adaptTable, typeInfo)
295-
.map((Tuple2<Boolean, Row> f0) -> {
296-
return f0.f1;
297-
})
295+
.map((Tuple2<Boolean, Row> f0) -> f0.f1)
298296
.returns(typeInfo);
299297

300298
String fields = String.join(",", typeInfo.getFieldNames());

core/src/main/java/com/dtstack/flink/sql/option/OptionParser.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import org.apache.commons.lang.StringUtils;
2727
import java.lang.reflect.InvocationTargetException;
2828
import java.lang.reflect.Field;
29+
import java.nio.charset.StandardCharsets;
2930
import java.util.List;
3031
import java.util.Map;
3132
import java.io.File;
@@ -102,8 +103,8 @@ public List<String> getProgramExeArgList() throws Exception {
102103
continue;
103104
} else if (OPTION_SQL.equalsIgnoreCase(key)) {
104105
File file = new File(value.toString());
105-
String content = FileUtils.readFile(file, "UTF-8");
106-
value = URLEncoder.encode(content, Charsets.UTF_8.name());
106+
String content = FileUtils.readFile(file, StandardCharsets.UTF_8.name());
107+
value = URLEncoder.encode(content, StandardCharsets.UTF_8.name());
107108
}
108109
args.add("-" + key);
109110
args.add(value.toString());

core/src/main/java/com/dtstack/flink/sql/option/Options.java

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,10 @@ public class Options {
7272
@OptionRequired(description = "log level")
7373
private String logLevel = "info";
7474

75+
@OptionRequired(description = "file add to ship file")
76+
private String addShipfile;
77+
78+
7579
public String getMode() {
7680
return mode;
7781
}
@@ -183,4 +187,13 @@ public String getLogLevel() {
183187
public void setLogLevel(String logLevel) {
184188
this.logLevel = logLevel;
185189
}
190+
191+
public String getAddShipfile() {
192+
return addShipfile;
193+
}
194+
195+
public void setAddShipfile(String addShipfile) {
196+
this.addShipfile = addShipfile;
197+
}
198+
186199
}

core/src/main/java/com/dtstack/flink/sql/parser/CreateTableParser.java

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,8 @@ public class CreateTableParser implements IParser {
4141

4242
private static final Pattern PATTERN = Pattern.compile(PATTERN_STR);
4343

44+
private static final Pattern PROP_PATTERN = Pattern.compile("^'\\s*(.+)\\s*'$");
45+
4446
public static CreateTableParser newInstance(){
4547
return new CreateTableParser();
4648
}
@@ -69,18 +71,27 @@ public void parseSql(String sql, SqlTree sqlTree) {
6971
}
7072

7173
private Map parseProp(String propsStr){
72-
String[] strs = propsStr.trim().split("'\\s*,");
74+
propsStr = propsStr.replaceAll("'\\s*,", "'|");
75+
String[] strs = propsStr.trim().split("\\|");
7376
Map<String, Object> propMap = Maps.newHashMap();
7477
for(int i=0; i<strs.length; i++){
7578
List<String> ss = DtStringUtil.splitIgnoreQuota(strs[i], '=');
7679
String key = ss.get(0).trim();
77-
String value = ss.get(1).trim().replaceAll("'", "").trim();
80+
String value = extractValue(ss.get(1).trim());
7881
propMap.put(key, value);
7982
}
8083

8184
return propMap;
8285
}
8386

87+
private String extractValue(String value) {
88+
Matcher matcher = PROP_PATTERN.matcher(value);
89+
if (matcher.find()) {
90+
return matcher.group(1);
91+
}
92+
throw new RuntimeException("[" + value + "] format is invalid");
93+
}
94+
8495
public static class SqlParserResult{
8596

8697
private String tableName;

core/src/main/java/com/dtstack/flink/sql/parser/InsertSqlParser.java

Lines changed: 57 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,21 +16,21 @@
1616
* limitations under the License.
1717
*/
1818

19-
2019

2120
package com.dtstack.flink.sql.parser;
2221

23-
import org.apache.calcite.config.Lex;
24-
import org.apache.calcite.sql.SqlBasicCall;
2522
import org.apache.calcite.sql.SqlInsert;
2623
import org.apache.calcite.sql.SqlJoin;
2724
import org.apache.calcite.sql.SqlKind;
28-
import org.apache.calcite.sql.SqlMatchRecognize;
2925
import org.apache.calcite.sql.SqlNode;
30-
import org.apache.calcite.sql.SqlOrderBy;
3126
import org.apache.calcite.sql.SqlSelect;
32-
import org.apache.calcite.sql.parser.SqlParseException;
33-
import org.apache.calcite.sql.parser.SqlParser;
27+
import org.apache.calcite.sql.SqlNodeList;
28+
import org.apache.calcite.sql.SqlBasicCall;
29+
import org.apache.calcite.sql.SqlMatchRecognize;
30+
import org.apache.calcite.sql.SqlOrderBy;
31+
import org.apache.calcite.sql.SqlIdentifier;
32+
import org.apache.calcite.sql.SqlAsOperator;
33+
import org.apache.calcite.sql.parser.SqlParserPos;
3434
import org.apache.commons.lang3.StringUtils;
3535
import com.google.common.collect.Lists;
3636
import org.apache.flink.table.calcite.FlinkPlannerImpl;
@@ -49,6 +49,9 @@
4949

5050
public class InsertSqlParser implements IParser {
5151

52+
// 用来标识当前解析节点的上一层节点是否为 insert 节点
53+
private static Boolean parentIsInsert = false;
54+
5255
@Override
5356
public boolean verify(String sql) {
5457
return StringUtils.isNotBlank(sql) && sql.trim().toLowerCase().startsWith("insert");
@@ -78,13 +81,19 @@ private static void parseNode(SqlNode sqlNode, SqlParseResult sqlParseResult){
7881
SqlNode sqlTarget = ((SqlInsert)sqlNode).getTargetTable();
7982
SqlNode sqlSource = ((SqlInsert)sqlNode).getSource();
8083
sqlParseResult.addTargetTable(sqlTarget.toString());
84+
parentIsInsert = true;
8185
parseNode(sqlSource, sqlParseResult);
8286
break;
8387
case SELECT:
84-
SqlNode sqlFrom = ((SqlSelect)sqlNode).getFrom();
85-
if(sqlFrom.getKind() == IDENTIFIER){
88+
SqlSelect sqlSelect = (SqlSelect) sqlNode;
89+
if (parentIsInsert) {
90+
rebuildSelectNode(sqlSelect.getSelectList(), sqlSelect);
91+
}
92+
SqlNode sqlFrom = ((SqlSelect) sqlNode).getFrom();
93+
if (sqlFrom.getKind() == IDENTIFIER) {
8694
sqlParseResult.addSourceTable(sqlFrom.toString());
87-
}else{
95+
} else {
96+
parentIsInsert = false;
8897
parseNode(sqlFrom, sqlParseResult);
8998
}
9099
break;
@@ -141,6 +150,44 @@ private static void parseNode(SqlNode sqlNode, SqlParseResult sqlParseResult){
141150
}
142151
}
143152

153+
/**
154+
* 将第一层 select 中的 sqlNode 转化为 AsNode,解决字段名冲突问题
155+
* 仅对 table.xx 这种类型的字段进行替换
156+
* @param selectList select Node 的 select 字段
157+
* @param sqlSelect 第一层解析出来的 selectNode
158+
*/
159+
private static void rebuildSelectNode(SqlNodeList selectList, SqlSelect sqlSelect) {
160+
SqlNodeList sqlNodes = new SqlNodeList(selectList.getParserPosition());
161+
162+
for (int index = 0; index < selectList.size(); index++) {
163+
if (selectList.get(index).getKind().equals(SqlKind.AS)
164+
|| ((SqlIdentifier) selectList.get(index)).names.size() == 1) {
165+
sqlNodes.add(selectList.get(index));
166+
continue;
167+
}
168+
sqlNodes.add(transformToAsNode(selectList.get(index)));
169+
}
170+
sqlSelect.setSelectList(sqlNodes);
171+
}
172+
173+
/**
174+
* 将 sqlNode 转化为 AsNode
175+
* @param sqlNode 需要转化的 sqlNode
176+
* @return 重新构造的 AsNode
177+
*/
178+
public static SqlBasicCall transformToAsNode(SqlNode sqlNode) {
179+
String asName = "";
180+
SqlParserPos pos = new SqlParserPos(sqlNode.getParserPosition().getLineNum(),
181+
sqlNode.getParserPosition().getEndColumnNum());
182+
if (sqlNode.getKind().equals(SqlKind.IDENTIFIER)) {
183+
asName = ((SqlIdentifier) sqlNode).names.get(1);
184+
}
185+
SqlNode[] operands = new SqlNode[2];
186+
operands[0] = sqlNode;
187+
operands[1] = new SqlIdentifier(asName, null, pos);
188+
return new SqlBasicCall(new SqlAsOperator(), operands, pos);
189+
}
190+
144191
public static class SqlParseResult {
145192

146193
private List<String> sourceTableList = Lists.newArrayList();

core/src/main/java/com/dtstack/flink/sql/parser/SqlParser.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ public static SqlTree parseSql(String sql) throws Exception {
6868
throw new RuntimeException("need to set local sql plugin root");
6969
}
7070

71-
sql = sql.replaceAll("--.*", "")
71+
sql = DtStringUtil.dealSqlComment(sql)
7272
.replaceAll("\r\n", " ")
7373
.replaceAll("\n", " ")
7474
.replace("\t", " ").trim();

core/src/main/java/com/dtstack/flink/sql/side/BaseAsyncReqRow.java

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222

2323
import com.dtstack.flink.sql.enums.ECacheContentType;
2424
import com.dtstack.flink.sql.enums.ECacheType;
25+
import com.dtstack.flink.sql.factory.DTThreadFactory;
2526
import com.dtstack.flink.sql.metric.MetricConstant;
2627
import com.dtstack.flink.sql.side.cache.AbstractSideCache;
2728
import com.dtstack.flink.sql.side.cache.CacheObj;
@@ -68,6 +69,7 @@ public abstract class BaseAsyncReqRow extends RichAsyncFunction<CRow, CRow> impl
6869
private int timeOutNum = 0;
6970
protected BaseSideInfo sideInfo;
7071
protected transient Counter parseErrorRecords;
72+
private transient ThreadPoolExecutor cancelExecutor;
7173

7274
public BaseAsyncReqRow(BaseSideInfo sideInfo){
7375
this.sideInfo = sideInfo;
@@ -82,6 +84,8 @@ public void open(Configuration parameters) throws Exception {
8284
super.open(parameters);
8385
initCache();
8486
initMetric();
87+
cancelExecutor = new ThreadPoolExecutor(1, 1, 0, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(100000),
88+
new DTThreadFactory("cancel-timer-executor"));
8589
LOG.info("async dim table config info: {} ", sideInfo.getSideTableInfo().toString());
8690
}
8791

@@ -248,12 +252,11 @@ public void onProcessingTime(long timestamp) throws Exception {
248252
}
249253

250254
protected void cancelTimerWhenComplete(ResultFuture<CRow> resultFuture, ScheduledFuture<?> timerFuture){
251-
ThreadPoolExecutor executors = new ThreadPoolExecutor(1, 1,0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>());
252255
if(resultFuture instanceof StreamRecordQueueEntry){
253256
StreamRecordQueueEntry streamRecordBufferEntry = (StreamRecordQueueEntry) resultFuture;
254257
streamRecordBufferEntry.onComplete((Object value) -> {
255258
timerFuture.cancel(true);
256-
},executors);
259+
}, cancelExecutor);
257260
}
258261
}
259262

core/src/main/java/com/dtstack/flink/sql/side/SideSqlExec.java

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,11 @@ public void exec(String sql,
115115
SideSQLParser sideSQLParser = new SideSQLParser();
116116
sideSQLParser.setLocalTableCache(localTableCache);
117117
Queue<Object> exeQueue = sideSQLParser.getExeQueue(sql, sideTableMap.keySet(), scope);
118-
Object pollObj = null;
118+
Object pollObj;
119+
120+
//need clean
121+
boolean preIsSideJoin = false;
122+
List<FieldReplaceInfo> replaceInfoList = Lists.newArrayList();
119123

120124
while((pollObj = exeQueue.poll()) != null){
121125

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.dtstack.flink.sql.util;
20+
21+
import org.apache.commons.io.FileUtils;
22+
23+
import java.io.File;
24+
import java.io.IOException;
25+
import java.util.HashMap;
26+
import java.util.Map;
27+
28+
/**
29+
* Utility methods for helping with security tasks.
30+
* Date: 2019/12/28
31+
* Company: www.dtstack.com
32+
* @author maqi
33+
*/
34+
public class AuthUtil {
35+
36+
public static String creatJaasFile(String prefix, String suffix, JAASConfig jaasConfig) throws IOException {
37+
File krbConf = new File(System.getProperty("user.dir"));
38+
File temp = File.createTempFile(prefix, suffix, krbConf);
39+
temp.deleteOnExit();
40+
FileUtils.writeStringToFile(temp, jaasConfig.toString());
41+
return temp.getAbsolutePath();
42+
}
43+
44+
45+
public static class JAASConfig {
46+
private String entryName;
47+
private String loginModule;
48+
private String loginModuleFlag;
49+
private Map<String, String> loginModuleOptions;
50+
51+
public JAASConfig(String entryName, String loginModule, String loginModuleFlag, Map<String, String> loginModuleOptions) {
52+
this.entryName = entryName;
53+
this.loginModule = loginModule;
54+
this.loginModuleFlag = loginModuleFlag;
55+
this.loginModuleOptions = loginModuleOptions;
56+
}
57+
58+
public static Builder builder() {
59+
return new Builder();
60+
}
61+
62+
@Override
63+
public String toString() {
64+
StringBuilder stringBuilder = new StringBuilder(entryName).append(" {\n\t")
65+
.append(loginModule).append(" ").append(loginModuleFlag).append("\n\t");
66+
String[] keys = loginModuleOptions.keySet().toArray(new String[loginModuleOptions.size()]);
67+
for (int i = 0; i < keys.length; i++) {
68+
stringBuilder.append(keys[i]).append("=").append(loginModuleOptions.get(keys[i]));
69+
if (i != keys.length - 1) {
70+
stringBuilder.append("\n\t");
71+
} else {
72+
stringBuilder.append(";\n");
73+
}
74+
75+
}
76+
stringBuilder.append("\n").append("};");
77+
return stringBuilder.toString();
78+
}
79+
80+
public static class Builder {
81+
private String entryName;
82+
private String loginModule;
83+
private String loginModuleFlag;
84+
private Map<String, String> loginModuleOptions;
85+
86+
public Builder setEntryName(String entryName) {
87+
this.entryName = entryName;
88+
return this;
89+
}
90+
91+
public Builder setLoginModule(String loginModule) {
92+
this.loginModule = loginModule;
93+
return this;
94+
}
95+
96+
public Builder setLoginModuleFlag(String loginModuleFlag) {
97+
this.loginModuleFlag = loginModuleFlag;
98+
return this;
99+
}
100+
101+
public Builder setLoginModuleOptions(Map<String, String> loginModuleOptions) {
102+
this.loginModuleOptions = loginModuleOptions;
103+
return this;
104+
}
105+
106+
public JAASConfig build() {
107+
return new JAASConfig(
108+
entryName, loginModule, loginModuleFlag, loginModuleOptions);
109+
}
110+
}
111+
}
112+
}

0 commit comments

Comments
 (0)