|
18 | 18 |
|
19 | 19 | package com.dtstack.flink.sql.exec;
|
20 | 20 |
|
21 |
| -import com.dtstack.flink.sql.parser.CreateFuncParser; |
22 |
| -import com.dtstack.flink.sql.parser.CreateTmpTableParser; |
23 |
| -import com.dtstack.flink.sql.parser.FlinkPlanner; |
24 |
| -import com.dtstack.flink.sql.parser.InsertSqlParser; |
25 |
| -import com.dtstack.flink.sql.parser.SqlParser; |
26 |
| -import com.dtstack.flink.sql.parser.SqlTree; |
27 |
| -import org.apache.flink.api.common.typeinfo.TypeInformation; |
28 |
| -import org.apache.flink.api.java.typeutils.RowTypeInfo; |
29 |
| -import org.apache.flink.streaming.api.datastream.DataStream; |
30 |
| -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; |
31 |
| -import org.apache.flink.table.api.*; |
32 |
| -import org.apache.flink.table.api.java.StreamTableEnvironment; |
33 |
| -import org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl; |
34 |
| -import org.apache.flink.table.sinks.TableSink; |
35 |
| - |
36 | 21 | import com.dtstack.flink.sql.classloader.ClassLoaderManager;
|
37 | 22 | import com.dtstack.flink.sql.enums.ClusterMode;
|
38 | 23 | import com.dtstack.flink.sql.enums.ECacheType;
|
|
42 | 27 | import com.dtstack.flink.sql.function.FunctionManager;
|
43 | 28 | import com.dtstack.flink.sql.option.OptionParser;
|
44 | 29 | import com.dtstack.flink.sql.option.Options;
|
45 |
| -import com.dtstack.flink.sql.side.SideSqlExec; |
| 30 | +import com.dtstack.flink.sql.parser.CreateFuncParser; |
| 31 | +import com.dtstack.flink.sql.parser.CreateTmpTableParser; |
| 32 | +import com.dtstack.flink.sql.parser.FlinkPlanner; |
| 33 | +import com.dtstack.flink.sql.parser.InsertSqlParser; |
| 34 | +import com.dtstack.flink.sql.parser.SqlParser; |
| 35 | +import com.dtstack.flink.sql.parser.SqlTree; |
46 | 36 | import com.dtstack.flink.sql.side.AbstractSideTableInfo;
|
| 37 | +import com.dtstack.flink.sql.side.SideSqlExec; |
47 | 38 | import com.dtstack.flink.sql.sink.StreamSinkFactory;
|
48 | 39 | import com.dtstack.flink.sql.source.StreamSourceFactory;
|
49 | 40 | import com.dtstack.flink.sql.table.AbstractSourceTableInfo;
|
|
62 | 53 | import org.apache.calcite.sql.SqlNode;
|
63 | 54 | import org.apache.commons.io.Charsets;
|
64 | 55 | import org.apache.commons.lang3.StringUtils;
|
| 56 | +import org.apache.flink.api.common.typeinfo.TypeInformation; |
| 57 | +import org.apache.flink.api.java.typeutils.RowTypeInfo; |
| 58 | +import org.apache.flink.streaming.api.datastream.DataStream; |
| 59 | +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; |
| 60 | +import org.apache.flink.table.api.EnvironmentSettings; |
| 61 | +import org.apache.flink.table.api.Table; |
| 62 | +import org.apache.flink.table.api.TableConfig; |
| 63 | +import org.apache.flink.table.api.TableEnvironment; |
| 64 | +import org.apache.flink.table.api.java.StreamTableEnvironment; |
| 65 | +import org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl; |
| 66 | +import org.apache.flink.table.sinks.TableSink; |
65 | 67 | import org.slf4j.Logger;
|
66 | 68 | import org.slf4j.LoggerFactory;
|
67 | 69 |
|
|
71 | 73 | import java.net.URLClassLoader;
|
72 | 74 | import java.net.URLDecoder;
|
73 | 75 | import java.time.ZoneId;
|
| 76 | +import java.util.ArrayList; |
74 | 77 | import java.util.Arrays;
|
75 | 78 | import java.util.List;
|
76 | 79 | import java.util.Map;
|
77 | 80 | import java.util.Properties;
|
78 | 81 | import java.util.Set;
|
79 | 82 | import java.util.TimeZone;
|
80 |
| -import java.util.ArrayList; |
81 | 83 |
|
82 | 84 | /**
|
83 | 85 | * 任务执行时的流程方法
|
@@ -158,7 +160,7 @@ public static StreamExecutionEnvironment getStreamExecution(ParamsInfo paramsInf
|
158 | 160 | Map<String, Table> registerTableCache = Maps.newHashMap();
|
159 | 161 |
|
160 | 162 | //register udf
|
161 |
| - ExecuteProcessHelper.registerUserDefinedFunction(sqlTree, paramsInfo.getJarUrlList(), tableEnv); |
| 163 | + ExecuteProcessHelper.registerUserDefinedFunction(sqlTree, paramsInfo.getJarUrlList(), tableEnv, paramsInfo.isGetPlan()); |
162 | 164 | //register table schema
|
163 | 165 | Set<URL> classPathSets = ExecuteProcessHelper.registerTable(sqlTree, env, tableEnv, paramsInfo.getLocalSqlPluginPath(),
|
164 | 166 | paramsInfo.getRemoteSqlPluginPath(), paramsInfo.getPluginLoadMode(), sideTableMap, registerTableCache);
|
@@ -243,13 +245,19 @@ private static void sqlTranslation(String localSqlPluginPath,
|
243 | 245 | }
|
244 | 246 | }
|
245 | 247 |
|
246 |
| - public static void registerUserDefinedFunction(SqlTree sqlTree, List<URL> jarUrlList, TableEnvironment tableEnv) |
| 248 | + public static void registerUserDefinedFunction(SqlTree sqlTree, List<URL> jarUrlList, TableEnvironment tableEnv, boolean getPlan) |
247 | 249 | throws IllegalAccessException, InvocationTargetException {
|
248 | 250 | // udf和tableEnv须由同一个类加载器加载
|
249 | 251 | ClassLoader levelClassLoader = tableEnv.getClass().getClassLoader();
|
| 252 | + ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); |
250 | 253 | URLClassLoader classLoader = null;
|
251 | 254 | List<CreateFuncParser.SqlParserResult> funcList = sqlTree.getFunctionList();
|
252 | 255 | for (CreateFuncParser.SqlParserResult funcInfo : funcList) {
|
| 256 | + // 构建plan的情况下,udf和tableEnv不需要是同一个类加载器 |
| 257 | + if (getPlan) { |
| 258 | + classLoader = ClassLoaderManager.loadExtraJar(jarUrlList, (URLClassLoader) currentClassLoader); |
| 259 | + } |
| 260 | + |
253 | 261 | //classloader
|
254 | 262 | if (classLoader == null) {
|
255 | 263 | classLoader = ClassLoaderManager.loadExtraJar(jarUrlList, (URLClassLoader) levelClassLoader);
|
|
0 commit comments