From 32b79d1cdbf3fd463048d5b98a0c4893dfd6ae28 Mon Sep 17 00:00:00 2001 From: Zhenghua Gao Date: Fri, 15 May 2020 20:12:16 +0800 Subject: [PATCH] [FLINK-17748][table] Remove registration of TableSource/TableSink in table env The old TableSource/TableSink interface will be replaced by FLIP-95 in the future, thus we choose a more lightweight solution to move the registration from TableEnvironement to TableEnvironmentInternal, keep these methods from users but still avaliable to our codebase. After FLIP-95 is done, we should continue to improve table factory and related descriptor API to make it easy and intuitive for users to register user defined table (source & sink). This closes #12076 --- .../cassandra/CassandraConnectorITCase.java | 3 +- .../hbase/source/HBaseTableSource.java | 2 +- .../connector/hbase/HBaseConnectorITCase.java | 15 +-- .../flink/sql/tests/BatchSQLTestProgram.java | 7 +- .../flink/sql/tests/StreamSQLTestProgram.java | 5 +- .../flink/table/tpcds/TpcdsTestProgram.java | 3 +- .../org/apache/flink/orc/OrcTableSource.java | 2 +- .../flink/orc/OrcTableSourceITCase.java | 5 +- .../formats/parquet/ParquetTableSource.java | 2 +- .../parquet/ParquetTableSourceITCase.java | 5 +- .../pyflink/table/table_environment.py | 4 +- .../gateway/local/ExecutionContext.java | 5 +- .../client/gateway/local/LocalExecutor.java | 3 +- .../gateway/local/ExecutionContextTest.java | 2 +- .../flink/table/api/TableEnvironment.java | 52 +-------- .../api/internal/TableEnvironmentImpl.java | 33 +----- .../internal/TableEnvironmentInternal.java | 31 +++++- .../descriptors/ConnectTableDescriptor.java | 2 +- .../table/api/TableEnvironmentITCase.scala | 7 +- .../flink/table/api/batch/ExplainTest.scala | 6 +- .../flink/table/api/stream/ExplainTest.scala | 16 +-- .../plan/batch/sql/DagOptimizationTest.scala | 58 +++++----- .../plan/batch/sql/LegacySinkTest.scala | 6 +- .../plan/batch/sql/TableSourceTest.scala | 6 +- ...ectIntoLegacyTableSourceScanRuleTest.scala | 4 +- .../plan/stream/sql/DagOptimizationTest.scala | 101 ++++++++++++------ .../plan/stream/sql/LegacySinkTest.scala | 29 +++-- .../sql/MiniBatchIntervalInferTest.scala | 11 +- .../plan/stream/sql/TableSourceTest.scala | 25 ++--- .../plan/stream/table/TableSourceTest.scala | 26 ++--- .../LegacyTableSinkValidationTest.scala | 4 +- .../runtime/batch/sql/TableScanITCase.scala | 8 +- .../runtime/batch/sql/TableSourceITCase.scala | 8 +- .../batch/sql/agg/AggregateITCaseBase.scala | 2 +- .../runtime/batch/sql/join/JoinITCase.scala | 6 +- .../batch/table/LegacyTableSinkITCase.scala | 10 +- .../runtime/stream/sql/AggregateITCase.scala | 6 +- .../runtime/stream/sql/CalcITCase.scala | 4 +- .../runtime/stream/sql/CorrelateITCase.scala | 18 ++-- .../stream/sql/Limit0RemoveITCase.scala | 18 ++-- .../runtime/stream/sql/RankITCase.scala | 40 +++---- .../runtime/stream/sql/TableScanITCase.scala | 10 +- .../stream/sql/TableSourceITCase.scala | 18 ++-- .../runtime/stream/sql/UnnestITCase.scala | 4 +- .../stream/sql/WindowAggregateITCase.scala | 6 +- .../stream/table/AggregateITCase.scala | 4 +- .../runtime/stream/table/JoinITCase.scala | 14 +-- .../stream/table/LegacyTableSinkITCase.scala | 28 ++--- .../runtime/utils/BatchTableEnvUtil.scala | 5 +- .../table/planner/utils/TableTestBase.scala | 15 ++- .../table/api/internal/TableEnvImpl.scala | 49 ++------- .../runtime/batch/JavaTableSourceITCase.java | 5 +- .../table/api/TableEnvironmentITCase.scala | 32 +++--- .../apache/flink/table/api/TableITCase.scala | 8 +- .../flink/table/api/TableSourceTest.scala | 51 ++++++--- .../flink/table/api/batch/ExplainTest.scala | 17 +-- .../validation/InsertIntoValidationTest.scala | 14 ++- .../validation/InsertIntoValidationTest.scala | 7 +- .../flink/table/api/stream/ExplainTest.scala | 17 +-- .../validation/InsertIntoValidationTest.scala | 11 +- .../api/stream/table/TableSourceTest.scala | 25 ++--- .../validation/InsertIntoValidationTest.scala | 8 +- .../validation/TableSinkValidationTest.scala | 10 +- .../TableSourceValidationTest.scala | 6 +- .../validation/TableSinksValidationTest.scala | 8 +- .../TableSourceValidationTest.scala | 26 ++--- .../batch/sql/PartitionableSinkITCase.scala | 7 +- .../batch/sql/TableEnvironmentITCase.scala | 47 ++++---- .../runtime/batch/sql/TableSourceITCase.scala | 8 +- .../batch/table/TableEnvironmentITCase.scala | 4 +- .../runtime/batch/table/TableSinkITCase.scala | 7 +- .../batch/table/TableSourceITCase.scala | 49 +++++---- .../runtime/stream/TimeAttributesITCase.scala | 9 +- .../runtime/stream/sql/InsertIntoITCase.scala | 19 ++-- .../table/runtime/stream/sql/SortITCase.scala | 5 +- .../table/runtime/stream/sql/SqlITCase.scala | 6 +- .../stream/sql/TableSourceITCase.scala | 4 +- .../stream/table/AggregateITCase.scala | 4 +- .../runtime/stream/table/JoinITCase.scala | 6 +- .../stream/table/TableSinkITCase.scala | 27 ++--- .../stream/table/TableSourceITCase.scala | 44 ++++---- .../table/utils/MockTableEnvironment.scala | 9 -- .../src/main/scala/SpendReport.scala | 6 +- 83 files changed, 659 insertions(+), 600 deletions(-) diff --git a/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java b/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java index 6a16d10b17b1e..c14e94d00f80e 100644 --- a/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java +++ b/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java @@ -45,6 +45,7 @@ import org.apache.flink.streaming.api.functions.sink.SinkContextUtil; import org.apache.flink.streaming.runtime.operators.WriteAheadSinkTestBase; import org.apache.flink.table.api.TableResult; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.java.StreamTableEnvironment; import org.apache.flink.testutils.junit.FailsOnJava11; import org.apache.flink.types.Row; @@ -463,7 +464,7 @@ public void testCassandraTableSink() throws Exception { DataStreamSource source = env.fromCollection(rowCollection); tEnv.createTemporaryView("testFlinkTable", source); - tEnv.registerTableSink( + ((TableEnvironmentInternal) tEnv).registerTableSinkInternal( "cassandraTable", new CassandraAppendTableSink(builder, injectTableName(INSERT_DATA_QUERY)).configure( new String[]{"f0", "f1", "f2"}, diff --git a/flink-connectors/flink-connector-hbase/src/main/java/org/apache/flink/connector/hbase/source/HBaseTableSource.java b/flink-connectors/flink-connector-hbase/src/main/java/org/apache/flink/connector/hbase/source/HBaseTableSource.java index e27749a14f30b..93af17969ea23 100644 --- a/flink-connectors/flink-connector-hbase/src/main/java/org/apache/flink/connector/hbase/source/HBaseTableSource.java +++ b/flink-connectors/flink-connector-hbase/src/main/java/org/apache/flink/connector/hbase/source/HBaseTableSource.java @@ -58,7 +58,7 @@ * hSrc.addColumn("fam1", "col2", Integer.class); * hSrc.addColumn("fam2", "col1", String.class); * - * tableEnv.registerTableSource("hTable", hSrc); + * tableEnv.registerTableSourceInternal("hTable", hSrc); * Table res = tableEnv.sqlQuery( * "SELECT t.fam2.col1, SUM(t.fam1.col2) FROM hTable AS t " + * "WHERE t.rowkey LIKE 'flink%' GROUP BY t.fam2.col1"); diff --git a/flink-connectors/flink-connector-hbase/src/test/java/org/apache/flink/connector/hbase/HBaseConnectorITCase.java b/flink-connectors/flink-connector-hbase/src/test/java/org/apache/flink/connector/hbase/HBaseConnectorITCase.java index d01f56d3b761a..12ee3f286935a 100644 --- a/flink-connectors/flink-connector-hbase/src/test/java/org/apache/flink/connector/hbase/HBaseConnectorITCase.java +++ b/flink-connectors/flink-connector-hbase/src/test/java/org/apache/flink/connector/hbase/HBaseConnectorITCase.java @@ -35,6 +35,7 @@ import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.TableSchema; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.internal.TableImpl; import org.apache.flink.table.api.java.BatchTableEnvironment; import org.apache.flink.table.api.java.StreamTableEnvironment; @@ -100,7 +101,7 @@ public void testTableSourceFullScan() throws Exception { hbaseTable.addColumn(FAMILY3, F3COL1, Double.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); - tEnv.registerTableSource("hTable", hbaseTable); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("hTable", hbaseTable); Table table = tEnv.sqlQuery("SELECT " + " h.family1.col1, " + @@ -135,7 +136,7 @@ public void testTableSourceProjection() throws Exception { hbaseTable.addColumn(FAMILY3, F3COL1, Double.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); - tEnv.registerTableSource("hTable", hbaseTable); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("hTable", hbaseTable); Table table = tEnv.sqlQuery("SELECT " + " h.family1.col1, " + @@ -169,7 +170,7 @@ public void testTableSourceFieldOrder() throws Exception { hbaseTable.addColumn(FAMILY2, F2COL2, Long.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); - tEnv.registerTableSource("hTable", hbaseTable); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("hTable", hbaseTable); Table table = tEnv.sqlQuery("SELECT * FROM hTable AS h"); @@ -194,7 +195,7 @@ public void testTableSourceReadAsByteArray() throws Exception { HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE_1); hbaseTable.addColumn(FAMILY2, F2COL1, byte[].class); hbaseTable.addColumn(FAMILY2, F2COL2, byte[].class); - tEnv.registerTableSource("hTable", hbaseTable); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("hTable", hbaseTable); tEnv.registerFunction("toUTF8", new ToUTF8()); tEnv.registerFunction("toLong", new ToLong()); @@ -293,7 +294,7 @@ public void testTableSink() throws Exception { DataStream ds = execEnv.fromCollection(testData1).returns(testTypeInfo1); tEnv.createTemporaryView("src", ds); - tEnv.registerTableSink("hbase", tableSink); + ((TableEnvironmentInternal) tEnv).registerTableSinkInternal("hbase", tableSink); String query = "INSERT INTO hbase SELECT ROW(f1c1), ROW(f2c1, f2c2), rowkey, ROW(f3c1, f3c2, f3c3) FROM src"; TableEnvUtil.execInsertSqlAndWaitResult(tEnv, query); @@ -310,7 +311,7 @@ public void testTableSink() throws Exception { hbaseTable.addColumn(FAMILY3, F3COL1, Double.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); - batchTableEnv.registerTableSource("hTable", hbaseTable); + ((TableEnvironmentInternal) batchTableEnv).registerTableSourceInternal("hTable", hbaseTable); Table table = batchTableEnv.sqlQuery( "SELECT " + @@ -475,7 +476,7 @@ public void testHBaseLookupTableSource() throws Exception { TableSource source = TableFactoryService .find(HBaseTableFactory.class, tableProperties) .createTableSource(tableProperties); - streamTableEnv.registerTableSource("hbaseLookup", source); + ((TableEnvironmentInternal) streamTableEnv).registerTableSourceInternal("hbaseLookup", source); // perform a temporal table join query String query = "SELECT a,family1.col1, family3.col3 FROM src " + "JOIN hbaseLookup FOR SYSTEM_TIME AS OF src.proc as h ON src.a = h.rk"; diff --git a/flink-end-to-end-tests/flink-batch-sql-test/src/main/java/org/apache/flink/sql/tests/BatchSQLTestProgram.java b/flink-end-to-end-tests/flink-batch-sql-test/src/main/java/org/apache/flink/sql/tests/BatchSQLTestProgram.java index 903bc8c773a11..1c9d92f2b2c7a 100644 --- a/flink-end-to-end-tests/flink-batch-sql-test/src/main/java/org/apache/flink/sql/tests/BatchSQLTestProgram.java +++ b/flink-end-to-end-tests/flink-batch-sql-test/src/main/java/org/apache/flink/sql/tests/BatchSQLTestProgram.java @@ -28,6 +28,7 @@ import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.TableResult; import org.apache.flink.table.api.TableSchema; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.sinks.CsvTableSink; import org.apache.flink.table.sources.InputFormatTableSource; import org.apache.flink.table.types.DataType; @@ -61,9 +62,9 @@ public static void main(String[] args) throws Exception { .inBatchMode() .build()); - tEnv.registerTableSource("table1", new GeneratorTableSource(10, 100, 60, 0)); - tEnv.registerTableSource("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); - tEnv.registerTableSink("sinkTable", + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table1", new GeneratorTableSource(10, 100, 60, 0)); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); + ((TableEnvironmentInternal) tEnv).registerTableSinkInternal("sinkTable", new CsvTableSink(outputPath) .configure(new String[]{"f0", "f1"}, new TypeInformation[]{Types.INT, Types.SQL_TIMESTAMP})); diff --git a/flink-end-to-end-tests/flink-stream-sql-test/src/main/java/org/apache/flink/sql/tests/StreamSQLTestProgram.java b/flink-end-to-end-tests/flink-stream-sql-test/src/main/java/org/apache/flink/sql/tests/StreamSQLTestProgram.java index 8d08466eec7cc..074b615931032 100644 --- a/flink-end-to-end-tests/flink-stream-sql-test/src/main/java/org/apache/flink/sql/tests/StreamSQLTestProgram.java +++ b/flink-end-to-end-tests/flink-stream-sql-test/src/main/java/org/apache/flink/sql/tests/StreamSQLTestProgram.java @@ -46,6 +46,7 @@ import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.TableSchema; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.java.StreamTableEnvironment; import org.apache.flink.table.sources.DefinedFieldMapping; import org.apache.flink.table.sources.DefinedRowtimeAttributes; @@ -108,8 +109,8 @@ public static void main(String[] args) throws Exception { final StreamTableEnvironment tEnv = StreamTableEnvironment.create(sEnv, settings); - tEnv.registerTableSource("table1", new GeneratorTableSource(10, 100, 60, 0)); - tEnv.registerTableSource("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table1", new GeneratorTableSource(10, 100, 60, 0)); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); int overWindowSizeSeconds = 1; int tumbleWindowSizeSeconds = 10; diff --git a/flink-end-to-end-tests/flink-tpcds-test/src/main/java/org/apache/flink/table/tpcds/TpcdsTestProgram.java b/flink-end-to-end-tests/flink-tpcds-test/src/main/java/org/apache/flink/table/tpcds/TpcdsTestProgram.java index ac1d6c9dab01d..258ce6ff6d5ba 100644 --- a/flink-end-to-end-tests/flink-tpcds-test/src/main/java/org/apache/flink/table/tpcds/TpcdsTestProgram.java +++ b/flink-end-to-end-tests/flink-tpcds-test/src/main/java/org/apache/flink/table/tpcds/TpcdsTestProgram.java @@ -26,6 +26,7 @@ import org.apache.flink.table.api.TableResult; import org.apache.flink.table.api.config.ExecutionConfigOptions; import org.apache.flink.table.api.config.OptimizerConfigOptions; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.catalog.ConnectorCatalogTable; import org.apache.flink.table.catalog.ObjectPath; import org.apache.flink.table.sinks.CsvTableSink; @@ -92,7 +93,7 @@ public static void main(String[] args) throws Exception { //register sink table String sinkTableName = QUERY_PREFIX + queryId + "_sinkTable"; - tableEnvironment.registerTableSink(sinkTableName, + ((TableEnvironmentInternal) tableEnvironment).registerTableSinkInternal(sinkTableName, new CsvTableSink( sinkTablePath + FILE_SEPARATOR + queryId + RESULT_SUFFIX, COL_DELIMITER, diff --git a/flink-formats/flink-orc/src/main/java/org/apache/flink/orc/OrcTableSource.java b/flink-formats/flink-orc/src/main/java/org/apache/flink/orc/OrcTableSource.java index 26094333dcbc3..0a553608f64d8 100644 --- a/flink-formats/flink-orc/src/main/java/org/apache/flink/orc/OrcTableSource.java +++ b/flink-formats/flink-orc/src/main/java/org/apache/flink/orc/OrcTableSource.java @@ -74,7 +74,7 @@ * .forOrcSchema("struct") * .build(); * - * tEnv.registerTableSource("orcTable", orcSrc); + * tEnv.registerTableSourceInternal("orcTable", orcSrc); * Table res = tableEnv.sqlQuery("SELECT * FROM orcTable"); * } * diff --git a/flink-formats/flink-orc/src/test/java/org/apache/flink/orc/OrcTableSourceITCase.java b/flink-formats/flink-orc/src/test/java/org/apache/flink/orc/OrcTableSourceITCase.java index 6b720c163b64f..b9c972d25c098 100644 --- a/flink-formats/flink-orc/src/test/java/org/apache/flink/orc/OrcTableSourceITCase.java +++ b/flink-formats/flink-orc/src/test/java/org/apache/flink/orc/OrcTableSourceITCase.java @@ -21,6 +21,7 @@ import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.java.BatchTableEnvironment; import org.apache.flink.test.util.MultipleProgramsTestBase; import org.apache.flink.types.Row; @@ -54,7 +55,7 @@ public void testFullScan() throws Exception { .path(getPath(TEST_FILE_FLAT)) .forOrcSchema(TEST_SCHEMA_FLAT) .build(); - tEnv.registerTableSource("OrcTable", orc); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("OrcTable", orc); String query = "SELECT COUNT(*), " + @@ -89,7 +90,7 @@ public void testScanWithProjectionAndFilter() throws Exception { .path(getPath(TEST_FILE_FLAT)) .forOrcSchema(TEST_SCHEMA_FLAT) .build(); - tEnv.registerTableSource("OrcTable", orc); + ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("OrcTable", orc); String query = "SELECT " + diff --git a/flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/ParquetTableSource.java b/flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/ParquetTableSource.java index dd853ad0a2e88..fbe4c977d9777 100644 --- a/flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/ParquetTableSource.java +++ b/flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/ParquetTableSource.java @@ -86,7 +86,7 @@ * .schema(messageType) * .build(); * - * tEnv.registerTableSource("parquetTable", parquetSrc); + * tEnv.registerTableSourceInternal("parquetTable", parquetSrc); * Table res = tableEnv.sqlQuery("SELECT * FROM parquetTable"); * } * diff --git a/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTableSourceITCase.java b/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTableSourceITCase.java index 6d5049fc7cc40..3d50ad47aaf81 100644 --- a/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTableSourceITCase.java +++ b/flink-formats/flink-parquet/src/test/java/org/apache/flink/formats/parquet/ParquetTableSourceITCase.java @@ -23,6 +23,7 @@ import org.apache.flink.core.fs.Path; import org.apache.flink.formats.parquet.utils.TestUtil; import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.java.BatchTableEnvironment; import org.apache.flink.test.util.MultipleProgramsTestBase; import org.apache.flink.types.Row; @@ -64,7 +65,7 @@ public void testFullScan() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment batchTableEnvironment = BatchTableEnvironment.create(env); ParquetTableSource tableSource = createParquetTableSource(testPath); - batchTableEnvironment.registerTableSource("ParquetTable", tableSource); + ((TableEnvironmentInternal) batchTableEnvironment).registerTableSourceInternal("ParquetTable", tableSource); String query = "SELECT foo " + "FROM ParquetTable"; @@ -81,7 +82,7 @@ public void testScanWithProjectionAndFilter() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment batchTableEnvironment = BatchTableEnvironment.create(env); ParquetTableSource tableSource = createParquetTableSource(testPath); - batchTableEnvironment.registerTableSource("ParquetTable", tableSource); + ((TableEnvironmentInternal) batchTableEnvironment).registerTableSourceInternal("ParquetTable", tableSource); String query = "SELECT foo " + "FROM ParquetTable WHERE foo >= 1 AND bar.spam >= 30 AND CARDINALITY(arr) >= 1 AND arr[1] <= 50"; diff --git a/flink-python/pyflink/table/table_environment.py b/flink-python/pyflink/table/table_environment.py index af2e7f5f17eb7..d2ec22005ca92 100644 --- a/flink-python/pyflink/table/table_environment.py +++ b/flink-python/pyflink/table/table_environment.py @@ -190,7 +190,7 @@ def register_table_source(self, name, table_source): .. note:: Deprecated in 1.10. Use :func:`connect` instead. """ warnings.warn("Deprecated in 1.10. Use connect instead.", DeprecationWarning) - self._j_tenv.registerTableSource(name, table_source._j_table_source) + self._j_tenv.registerTableSourceInternal(name, table_source._j_table_source) def register_table_sink(self, name, table_sink): """ @@ -215,7 +215,7 @@ def register_table_sink(self, name, table_sink): .. note:: Deprecated in 1.10. Use :func:`connect` instead. """ warnings.warn("Deprecated in 1.10. Use connect instead.", DeprecationWarning) - self._j_tenv.registerTableSink(name, table_sink._j_table_sink) + self._j_tenv.registerTableSinkInternal(name, table_sink._j_table_sink) def scan(self, *table_path): """ diff --git a/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/ExecutionContext.java b/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/ExecutionContext.java index ac1af6c5053ba..6a5b9963546a4 100644 --- a/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/ExecutionContext.java +++ b/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/ExecutionContext.java @@ -39,6 +39,7 @@ import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.java.BatchTableEnvironment; import org.apache.flink.table.api.java.StreamTableEnvironment; import org.apache.flink.table.api.java.internal.BatchTableEnvironmentImpl; @@ -581,9 +582,9 @@ private void initializeCatalogs() { } }); // register table sources - tableSources.forEach(tableEnv::registerTableSource); + tableSources.forEach(((TableEnvironmentInternal) tableEnv)::registerTableSourceInternal); // register table sinks - tableSinks.forEach(tableEnv::registerTableSink); + tableSinks.forEach(((TableEnvironmentInternal) tableEnv)::registerTableSinkInternal); //-------------------------------------------------------------------------------------------------------------- // Step.4 Register temporal tables. diff --git a/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/LocalExecutor.java b/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/LocalExecutor.java index 24563cce64baa..c19e147d8ed02 100644 --- a/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/LocalExecutor.java +++ b/flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/local/LocalExecutor.java @@ -39,6 +39,7 @@ import org.apache.flink.table.api.Table; import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.TableSchema; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.catalog.exceptions.CatalogException; import org.apache.flink.table.client.SqlClientException; import org.apache.flink.table.client.config.Environment; @@ -627,7 +628,7 @@ private ResultDescriptor executeQueryInternal(String sessionId, ExecutionCon try { // writing to a sink requires an optimization step that might reference UDFs during code compilation context.wrapClassLoader(() -> { - context.getTableEnvironment().registerTableSink(tableName, result.getTableSink()); + ((TableEnvironmentInternal) context.getTableEnvironment()).registerTableSinkInternal(tableName, result.getTableSink()); table.insertInto(tableName); }); pipeline = context.createPipeline(jobName); diff --git a/flink-table/flink-sql-client/src/test/java/org/apache/flink/table/client/gateway/local/ExecutionContextTest.java b/flink-table/flink-sql-client/src/test/java/org/apache/flink/table/client/gateway/local/ExecutionContextTest.java index 871875e59b33c..b68ebefb91133 100644 --- a/flink-table/flink-sql-client/src/test/java/org/apache/flink/table/client/gateway/local/ExecutionContextTest.java +++ b/flink-table/flink-sql-client/src/test/java/org/apache/flink/table/client/gateway/local/ExecutionContextTest.java @@ -244,7 +244,7 @@ public void testTemporalTables() throws Exception { new String[]{"integerField", "stringField", "rowtimeField", "integerField0", "stringField0", "rowtimeField0"}, tableEnv.from("TemporalTableUsage").getSchema().getFieldNames()); - // Please delete this test after removing registerTableSource in SQL-CLI. + // Please delete this test after removing registerTableSourceInternal in SQL-CLI. TableSchema tableSchema = tableEnv.from("EnrichmentSource").getSchema(); LogicalType timestampType = tableSchema.getFieldDataTypes()[2].getLogicalType(); assertTrue(timestampType instanceof TimestampType); diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java index 9b6d53c5c88df..56a521e63c519 100644 --- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java +++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java @@ -21,7 +21,6 @@ import org.apache.flink.annotation.Experimental; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.JobExecutionResult; -import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.table.api.internal.TableEnvironmentImpl; import org.apache.flink.table.catalog.Catalog; import org.apache.flink.table.descriptors.ConnectTableDescriptor; @@ -545,55 +544,6 @@ default Table fromValues(DataType rowType, Object... values) { */ void createTemporaryView(String path, Table view); - /** - * Registers an external {@link TableSource} in this {@link TableEnvironment}'s catalog. - * Registered tables can be referenced in SQL queries. - * - *

Temporary objects can shadow permanent ones. If a permanent object in a given path exists, it will - * be inaccessible in the current session. To make the permanent object available again one can drop the - * corresponding temporary object. - * - * @param name The name under which the {@link TableSource} is registered. - * @param tableSource The {@link TableSource} to register. - * @deprecated Use {@link #connect(ConnectorDescriptor)} instead. - */ - @Deprecated - void registerTableSource(String name, TableSource tableSource); - - /** - * Registers an external {@link TableSink} with given field names and types in this - * {@link TableEnvironment}'s catalog. - * Registered sink tables can be referenced in SQL DML statements. - * - *

Temporary objects can shadow permanent ones. If a permanent object in a given path exists, it will - * be inaccessible in the current session. To make the permanent object available again one can drop the - * corresponding temporary object. - * - * @param name The name under which the {@link TableSink} is registered. - * @param fieldNames The field names to register with the {@link TableSink}. - * @param fieldTypes The field types to register with the {@link TableSink}. - * @param tableSink The {@link TableSink} to register. - * @deprecated Use {@link #connect(ConnectorDescriptor)} instead. - */ - @Deprecated - void registerTableSink(String name, String[] fieldNames, TypeInformation[] fieldTypes, TableSink tableSink); - - /** - * Registers an external {@link TableSink} with already configured field names and field types in - * this {@link TableEnvironment}'s catalog. - * Registered sink tables can be referenced in SQL DML statements. - * - *

Temporary objects can shadow permanent ones. If a permanent object in a given path exists, it will - * be inaccessible in the current session. To make the permanent object available again one can drop the - * corresponding temporary object. - * - * @param name The name under which the {@link TableSink} is registered. - * @param configuredSink The configured {@link TableSink} to register. - * @deprecated Use {@link #connect(ConnectorDescriptor)} instead. - */ - @Deprecated - void registerTableSink(String name, TableSink configuredSink); - /** * Scans a registered table and returns the resulting {@link Table}. * @@ -924,7 +874,7 @@ default Table fromValues(DataType rowType, Object... values) { *

 	 * {@code
 	 *   // register the configured table sink into which the result is inserted.
-	 *   tEnv.registerTableSink("sinkTable", configuredSink);
+	 *   tEnv.registerTableSinkInternal("sinkTable", configuredSink);
 	 *   Table sourceTable = ...
 	 *   String tableName = sourceTable.toString();
 	 *   // sourceTable is not registered to the table environment
diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentImpl.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentImpl.java
index 4bae503af8594..f1aab3038be42 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentImpl.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentImpl.java
@@ -21,7 +21,6 @@
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.VisibleForTesting;
 import org.apache.flink.api.common.JobExecutionResult;
-import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.dag.Pipeline;
 import org.apache.flink.api.dag.Transformation;
 import org.apache.flink.core.execution.JobClient;
@@ -428,32 +427,6 @@ private void createTemporaryView(UnresolvedIdentifier identifier, Table view) {
 		catalogManager.createTemporaryTable(tableTable, tableIdentifier, false);
 	}
 
-	@Override
-	public void registerTableSource(String name, TableSource tableSource) {
-		// only accept StreamTableSource and LookupableTableSource here
-		// TODO should add a validation, while StreamTableSource is in flink-table-api-java-bridge module now
-		registerTableSourceInternal(name, tableSource);
-	}
-
-	@Override
-	public void registerTableSink(
-			String name,
-			String[] fieldNames,
-			TypeInformation[] fieldTypes,
-			TableSink tableSink) {
-		registerTableSink(name, tableSink.configure(fieldNames, fieldTypes));
-	}
-
-	@Override
-	public void registerTableSink(String name, TableSink configuredSink) {
-		// validate
-		if (configuredSink.getTableSchema().getFieldCount() == 0) {
-			throw new TableException("Table schema cannot be empty.");
-		}
-
-		registerTableSinkInternal(name, configuredSink);
-	}
-
 	@Override
 	public Table scan(String... tablePath) {
 		UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(tablePath);
@@ -1182,7 +1155,8 @@ protected ExplainDetail[] getExplainDetails(boolean extended) {
 		}
 	}
 
-	private void registerTableSourceInternal(String name, TableSource tableSource) {
+	@Override
+	public void registerTableSourceInternal(String name, TableSource tableSource) {
 		validateTableSource(tableSource);
 		ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
 		Optional table = getTemporaryTable(objectIdentifier);
@@ -1212,7 +1186,8 @@ private void registerTableSourceInternal(String name, TableSource tableSource
 		}
 	}
 
-	private void registerTableSinkInternal(String name, TableSink tableSink) {
+	@Override
+	public void registerTableSinkInternal(String name, TableSink tableSink) {
 		ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
 		Optional table = getTemporaryTable(objectIdentifier);
 
diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentInternal.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentInternal.java
index 0d587ab201646..370489b45b3f8 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentInternal.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableEnvironmentInternal.java
@@ -27,6 +27,8 @@
 import org.apache.flink.table.operations.ModifyOperation;
 import org.apache.flink.table.operations.Operation;
 import org.apache.flink.table.operations.QueryOperation;
+import org.apache.flink.table.sinks.TableSink;
+import org.apache.flink.table.sources.TableSource;
 
 import java.util.List;
 
@@ -38,7 +40,7 @@
  * By then, these methods can be moved into TableEnvironmentImpl.
  */
 @Internal
-interface TableEnvironmentInternal extends TableEnvironment {
+public interface TableEnvironmentInternal extends TableEnvironment {
 
 	/**
 	 * Return a {@link Parser} that provides methods for parsing a SQL string.
@@ -79,4 +81,31 @@ interface TableEnvironmentInternal extends TableEnvironment {
 	 */
 	String explainInternal(List operations, ExplainDetail... extraDetails);
 
+
+	/**
+	 * Registers an external {@link TableSource} in this {@link TableEnvironment}'s catalog.
+	 * Registered tables can be referenced in SQL queries.
+	 *
+	 * 

Temporary objects can shadow permanent ones. If a permanent object in a given path exists, it will + * be inaccessible in the current session. To make the permanent object available again one can drop the + * corresponding temporary object. + * + * @param name The name under which the {@link TableSource} is registered. + * @param tableSource The {@link TableSource} to register. + */ + void registerTableSourceInternal(String name, TableSource tableSource); + + /** + * Registers an external {@link TableSink} with already configured field names and field types in + * this {@link TableEnvironment}'s catalog. + * Registered sink tables can be referenced in SQL DML statements. + * + *

Temporary objects can shadow permanent ones. If a permanent object in a given path exists, it will + * be inaccessible in the current session. To make the permanent object available again one can drop the + * corresponding temporary object. + * + * @param name The name under which the {@link TableSink} is registered. + * @param configuredSink The configured {@link TableSink} to register. + */ + void registerTableSinkInternal(String name, TableSink configuredSink); } diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/descriptors/ConnectTableDescriptor.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/descriptors/ConnectTableDescriptor.java index 7208084f4f62b..2b38bfdeb251b 100644 --- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/descriptors/ConnectTableDescriptor.java +++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/descriptors/ConnectTableDescriptor.java @@ -86,7 +86,7 @@ public void createTemporaryTable(String path) { if (schemaDescriptor == null) { throw new TableException( "Table schema must be explicitly defined. To derive schema from the underlying connector" + - " use registerTableSource/registerTableSink/registerTableSourceAndSink."); + " use registerTableSourceInternal/registerTableSinkInternal/registerTableSourceAndSink."); } registration.createTemporaryTable(path, CatalogTableImpl.fromProperties(toProperties())); diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala index d3c90347dcc75..a90b2c5b83732 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala @@ -22,7 +22,7 @@ import org.apache.flink.api.common.typeinfo.Types.STRING import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => ScalaStreamExecutionEnvironment} -import org.apache.flink.table.api.internal.TableEnvironmentImpl +import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal} import org.apache.flink.table.api.java.StreamTableEnvironment import org.apache.flink.table.api.scala.{StreamTableEnvironment => ScalaStreamTableEnvironment, _} import org.apache.flink.table.planner.factories.utils.TestCollectionTableFactory @@ -31,16 +31,13 @@ import org.apache.flink.table.planner.utils.TableTestUtil.{readFromResource, rep import org.apache.flink.table.planner.utils.{TableTestUtil, TestTableSourceSinks, TestTableSourceWithTime} import org.apache.flink.types.Row import org.apache.flink.util.{FileUtils, TestLogger} - import org.apache.flink.shaded.guava18.com.google.common.collect.Lists - import org.hamcrest.Matchers.containsString import org.junit.Assert.{assertEquals, assertFalse, assertTrue} import org.junit.rules.{ExpectedException, TemporaryFolder} import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{Assert, Before, Rule, Test} - import _root_.java.io.{File, FileFilter} import _root_.java.lang.{Long => JLong} import _root_.java.util @@ -619,7 +616,7 @@ class TableEnvironmentITCase(tableEnvName: String, isStreaming: Boolean) extends val sourceType = Types.STRING val tableSource = new TestTableSourceWithTime(true, schema, sourceType, data, null, "pt") // TODO refactor this after FLINK-16160 is finished - tEnv.registerTableSource("T", tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("T", tableSource) val tableResult = tEnv.executeSql("select * from T") assertTrue(tableResult.getJobClient.isPresent) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala index bc0e840c33c58..b60799d1d9722 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala @@ -21,10 +21,10 @@ package org.apache.flink.table.api.batch import org.apache.flink.api.scala._ import org.apache.flink.table.api.ExplainDetail import org.apache.flink.table.api.config.ExecutionConfigOptions +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.utils.TableTestBase import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} - import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{Before, Test} @@ -106,12 +106,12 @@ class ExplainTest(extended: Boolean) extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt > 10") val sink1 = util.createCollectTableSink(Array("a", "cnt"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt < 10") val sink2 = util.createCollectTableSink(Array("a", "cnt"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table2) util.verifyExplain(stmtSet, extraDetails: _*) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala index 8c317afc9c53b..ee7222d0c85bf 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala @@ -24,13 +24,13 @@ import org.apache.flink.table.api.config.ExecutionConfigOptions import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.utils.TableTestBase import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} - import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{Before, Test} - import java.sql.Timestamp +import org.apache.flink.table.api.internal.TableEnvironmentInternal + @RunWith(classOf[Parameterized]) class ExplainTest(extended: Boolean) extends TableTestBase { @@ -105,12 +105,14 @@ class ExplainTest(extended: Boolean) extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt > 10") val upsertSink1 = util.createUpsertTableSink(Array(0), Array("a", "cnt"), Array(INT, LONG)) - util.tableEnv.registerTableSink("upsertSink1", upsertSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink1", upsertSink1) stmtSet.addInsert("upsertSink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt < 10") val upsertSink2 = util.createUpsertTableSink(Array(0), Array("a", "cnt"), Array(INT, LONG)) - util.tableEnv.registerTableSink("upsertSink2", upsertSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink2", upsertSink2) stmtSet.addInsert("upsertSink2", table2) util.verifyExplain(stmtSet, extraDetails: _*) @@ -146,7 +148,8 @@ class ExplainTest(extended: Boolean) extends TableTestBase { |GROUP BY id1, TUMBLE(ts, INTERVAL '8' SECOND) """.stripMargin) val appendSink1 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING)) - util.tableEnv.registerTableSink("appendSink1", appendSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink1", appendSink1) stmtSet.addInsert("appendSink1", table1) val table2 = util.tableEnv.sqlQuery( @@ -156,7 +159,8 @@ class ExplainTest(extended: Boolean) extends TableTestBase { |GROUP BY id1, HOP(ts, INTERVAL '12' SECOND, INTERVAL '6' SECOND) """.stripMargin) val appendSink2 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING)) - util.tableEnv.registerTableSink("appendSink2", appendSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink2", appendSink2) stmtSet.addInsert("appendSink2", table2) util.verifyExplain(stmtSet, extraDetails: _*) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/DagOptimizationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/DagOptimizationTest.scala index 331f52a1e9ece..a25300824795c 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/DagOptimizationTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/DagOptimizationTest.scala @@ -23,11 +23,11 @@ import org.apache.flink.table.planner.plan.optimize.RelNodeBlockPlanBuilder import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.NonDeterministicUdf import org.apache.flink.table.planner.utils.{TableFunc1, TableTestBase} import org.apache.flink.table.types.logical._ - import org.junit.Test - import java.sql.Timestamp +import org.apache.flink.table.api.internal.TableEnvironmentInternal + class DagOptimizationTest extends TableTestBase { private val util = batchTestUtil() util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c) @@ -136,11 +136,11 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT MIN(sum_a) AS total_min FROM table1") val sink1 = util.createCollectTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table2) val sink2 = util.createCollectTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table3) util.verifyPlan(stmtSet) @@ -160,11 +160,11 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT * FROM table1 UNION ALL SELECT * FROM table2") val sink1 = util.createCollectTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table3) val sink2 = util.createCollectTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table3) util.verifyPlan(stmtSet) @@ -184,11 +184,11 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT * FROM table1 UNION ALL SELECT * FROM table2") val sink1 = util.createCollectTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table2) val sink2 = util.createCollectTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table3) util.verifyPlan(stmtSet) @@ -213,11 +213,11 @@ class DagOptimizationTest extends TableTestBase { val table6 = util.tableEnv.sqlQuery("SELECT a1, b, c1 FROM table4, table5 WHERE a1 = a3") val sink1 = util.createCollectTableSink(Array("a1", "b", "c2"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table5) val sink2 = util.createCollectTableSink(Array("a1", "b", "c1"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table6) util.verifyPlan(stmtSet) @@ -236,11 +236,11 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM table1") val sink1 = util.createCollectTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table2) val sink2 = util.createCollectTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table3) util.verifyPlan(stmtSet) @@ -255,7 +255,7 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT a, b, c FROM MyTable WHERE c LIKE '%hello%'") util.tableEnv.registerTable("TempTable1", table1) val sink1 = util.createCollectTableSink(Array("a", "b", "c"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT a, b, c FROM MyTable WHERE c LIKE '%world%'") @@ -275,12 +275,12 @@ class DagOptimizationTest extends TableTestBase { val table4 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable3 WHERE b < 4") val sink2 = util.createCollectTableSink(Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table4) val table5 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable3 WHERE b >=4 AND b < 6") val sink3 = util.createCollectTableSink(Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("sink3", sink3) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink3", sink3) stmtSet.addInsert("sink3", table5) util.verifyPlan(stmtSet) @@ -316,13 +316,13 @@ class DagOptimizationTest extends TableTestBase { val sqlQuery5 = "SELECT * FROM table4 WHERE a > 50" val table5 = util.tableEnv.sqlQuery(sqlQuery5) val sink1 = util.createCollectTableSink(Array("a", "total_c"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table5) val sqlQuery6 = "SELECT * FROM table4 WHERE a < 50" val table6 = util.tableEnv.sqlQuery(sqlQuery6) val sink2 = util.createCollectTableSink(Array("a", "total_c"), Array(INT, LONG)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table6) util.verifyPlan(stmtSet) @@ -340,12 +340,12 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable") val sink1 = util.createCollectTableSink( Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table1) val table3 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable") val sink2 = util.createCollectTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table3) util.verifyPlan(stmtSet) @@ -373,18 +373,18 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable") val sink1 = util.createCollectTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable") val sink2 = util.createCollectTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table2) val sqlQuery2 = "SELECT a FROM (SELECT a, c FROM MyTable UNION ALL SELECT d, f FROM MyTable1)" val table3 = util.tableEnv.sqlQuery(sqlQuery2) val sink3 = util.createCollectTableSink(Array("a"), Array(INT)) - util.tableEnv.registerTableSink("sink3", sink3) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink3", sink3) stmtSet.addInsert("sink3", table3) util.verifyPlan(stmtSet) @@ -402,7 +402,7 @@ class DagOptimizationTest extends TableTestBase { util.tableEnv.registerTable("TempTable", table) val sink1 = util.createCollectTableSink(Array("a", "c"), Array(INT, STRING)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table) val sqlQuery2 = "SELECT a, c FROM TempTable UNION ALL SELECT a, c FROM MyTable2" @@ -411,12 +411,12 @@ class DagOptimizationTest extends TableTestBase { val table2 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable1") val sink2 = util.createCollectTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table2) val table3 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable1") val sink3 = util.createCollectTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("sink3", sink3) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink3", sink3) stmtSet.addInsert("sink3", table3) util.verifyPlan(stmtSet) @@ -442,12 +442,12 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable") val sink1 = util.createCollectTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable") val sink2 = util.createCollectTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table2) util.verifyPlan(stmtSet) @@ -487,13 +487,13 @@ class DagOptimizationTest extends TableTestBase { val sink1 = util.createCollectTableSink( Array("a", "sum_c", "time", "window_start", "window_end"), Array(INT, DOUBLE, TIMESTAMP, TIMESTAMP, TIMESTAMP)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table1) val table2 = util.tableEnv.sqlQuery(sqlQuery2) val sink2 = util.createCollectTableSink(Array("a", "sum_c", "time"), Array(INT, DOUBLE, TIMESTAMP)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table2) util.verifyPlan(stmtSet) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/LegacySinkTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/LegacySinkTest.scala index 65ab4ad0378b0..d50b0ebe25e9d 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/LegacySinkTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/LegacySinkTest.scala @@ -19,11 +19,11 @@ package org.apache.flink.table.planner.plan.batch.sql import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.plan.optimize.RelNodeBlockPlanBuilder import org.apache.flink.table.planner.utils.TableTestBase import org.apache.flink.table.types.logical.{BigIntType, IntType} - import org.junit.Test class LegacySinkTest extends TableTestBase { @@ -52,11 +52,11 @@ class LegacySinkTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT MIN(sum_a) AS total_min FROM table1") val sink1 = util.createCollectTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("sink1", sink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1) stmtSet.addInsert("sink1", table2) val sink2 = util.createCollectTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("sink2", sink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2) stmtSet.addInsert("sink2", table3) util.verifyPlan(stmtSet) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/TableSourceTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/TableSourceTest.scala index 30ecf0e3436fb..0b9392f2efde7 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/TableSourceTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/TableSourceTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.planner.plan.batch.sql import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.RowTypeInfo +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.{DataTypes, TableException, TableSchema, Types, ValidationException} import org.apache.flink.table.planner.expressions.utils.Func1 import org.apache.flink.table.planner.utils._ @@ -36,7 +37,8 @@ class TableSourceTest extends TableTestBase { @Before def setup(): Unit = { - util.tableEnv.registerTableSource("ProjectableTable", new TestProjectableTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "ProjectableTable", new TestProjectableTableSource( true, tableSchema, new RowTypeInfo( @@ -102,7 +104,7 @@ class TableSourceTest extends TableTestBase { Array(Types.INT, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "deepNested", "nested", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(true, tableSchema, returnType, Seq())) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/PushProjectIntoLegacyTableSourceScanRuleTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/PushProjectIntoLegacyTableSourceScanRuleTest.scala index c62c8d80f3474..1fddf845be6b6 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/PushProjectIntoLegacyTableSourceScanRuleTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/PushProjectIntoLegacyTableSourceScanRuleTest.scala @@ -24,9 +24,9 @@ import org.apache.flink.table.api.{TableSchema, Types} import org.apache.flink.table.planner.expressions.utils.Func0 import org.apache.flink.table.planner.plan.optimize.program.{FlinkBatchProgram, FlinkHepRuleSetProgramBuilder, HEP_RULES_EXECUTION_TYPE} import org.apache.flink.table.planner.utils.{TableConfigUtils, TableTestBase, TestNestedProjectableTableSource, TestProjectableTableSource} - import org.apache.calcite.plan.hep.HepMatchOrder import org.apache.calcite.tools.RuleSets +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.junit.{Before, Test} /** @@ -138,7 +138,7 @@ class PushProjectIntoLegacyTableSourceScanRuleTest extends TableTestBase { Array(Types.INT, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "deepNested", "nested", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(true, tableSchema, returnType, Seq())) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/DagOptimizationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/DagOptimizationTest.scala index 0a05700be99c1..45ab089a412dc 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/DagOptimizationTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/DagOptimizationTest.scala @@ -19,12 +19,12 @@ package org.apache.flink.table.planner.plan.stream.sql import org.apache.flink.api.scala._ import org.apache.flink.table.api.ExplainDetail +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.plan.optimize.RelNodeBlockPlanBuilder import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.NonDeterministicUdf import org.apache.flink.table.planner.utils.{TableFunc1, TableTestBase} import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} - import org.junit.Test class DagOptimizationTest extends TableTestBase { @@ -136,11 +136,13 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT MIN(sum_a) AS total_min FROM table1") val retractSink1 = util.createRetractTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("retractSink1", retractSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink1", retractSink1) stmtSet.addInsert("retractSink1", table2) val retractSink2 = util.createRetractTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("retractSink2", retractSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink2", retractSink2) stmtSet.addInsert("retractSink2", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -160,11 +162,13 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT * FROM table1 UNION ALL SELECT * FROM table2") val appendSink1 = util.createAppendTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("appendSink1", appendSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink1", appendSink1) stmtSet.addInsert("appendSink1", table3) val appendSink2 = util.createAppendTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("appendSink2", appendSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink2", appendSink2) stmtSet.addInsert("appendSink2", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -184,11 +188,13 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT * FROM table1 UNION ALL SELECT * FROM table2") val appendSink1 = util.createAppendTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("appendSink1", appendSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink1", appendSink1) stmtSet.addInsert("appendSink1", table2) val appendSink2 = util.createAppendTableSink(Array("a", "b1"), Array(INT, LONG)) - util.tableEnv.registerTableSink("appendSink2", appendSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink2", appendSink2) stmtSet.addInsert("appendSink2", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -210,11 +216,13 @@ class DagOptimizationTest extends TableTestBase { val table6 = util.tableEnv.sqlQuery("SELECT a1, b, c1 FROM table4, table5 WHERE a1 = a3") val appendSink1 = util.createAppendTableSink(Array("a1", "b", "c2"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("appendSink1", appendSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink1", appendSink1) stmtSet.addInsert("appendSink1", table5) val appendSink2 = util.createAppendTableSink(Array("a1", "b", "c1"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("appendSink2", appendSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink2", appendSink2) stmtSet.addInsert("appendSink2", table6) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -233,11 +241,13 @@ class DagOptimizationTest extends TableTestBase { val table3 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM table1") val retractSink1 = util.createRetractTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("retractSink1", retractSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink1", retractSink1) stmtSet.addInsert("retractSink1", table2) val retractSink2 = util.createRetractTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("retractSink2", retractSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink2", retractSink2) stmtSet.addInsert("retractSink2", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -273,13 +283,15 @@ class DagOptimizationTest extends TableTestBase { val sqlQuery5 = "SELECT * FROM table4 WHERE a > 50" val table5 = util.tableEnv.sqlQuery(sqlQuery5) val retractSink1 = util.createRetractTableSink(Array("a", "total_c"), Array(INT, LONG)) - util.tableEnv.registerTableSink("retractSink1", retractSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink1", retractSink1) stmtSet.addInsert("retractSink1", table5) val sqlQuery6 = "SELECT * FROM table4 WHERE a < 50" val table6 = util.tableEnv.sqlQuery(sqlQuery6) val retractSink2 = util.createRetractTableSink(Array("a", "total_c"), Array(INT, LONG)) - util.tableEnv.registerTableSink("retractSink2", retractSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink2", retractSink2) stmtSet.addInsert("retractSink2", table6) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -297,12 +309,14 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable") val upsertSink = util.createUpsertTableSink(Array(), Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table1) val table3 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable") val retractSink = util.createRetractTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -330,18 +344,21 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable") val retractSink1 = util.createRetractTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("retractSink1", retractSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink1", retractSink1) stmtSet.addInsert("retractSink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable") val retractSink2 = util.createRetractTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("retractSink2", retractSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink2", retractSink2) stmtSet.addInsert("retractSink2", table2) val sqlQuery2 = "SELECT a FROM (SELECT a, c FROM MyTable UNION ALL SELECT d, f FROM MyTable1)" val table3 = util.tableEnv.sqlQuery(sqlQuery2) val appendSink3 = util.createAppendTableSink(Array("a"), Array(INT)) - util.tableEnv.registerTableSink("appendSink3", appendSink3) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink3", appendSink3) stmtSet.addInsert("appendSink3", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -359,7 +376,8 @@ class DagOptimizationTest extends TableTestBase { util.tableEnv.registerTable("TempTable", table) val appendSink = util.createAppendTableSink(Array("a", "c"), Array(INT, STRING)) - util.tableEnv.registerTableSink("appendSink", appendSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("appendSink", appendSink) stmtSet.addInsert("appendSink", table) val sqlQuery2 = "SELECT a, c FROM TempTable UNION ALL SELECT a, c FROM MyTable2" @@ -368,12 +386,14 @@ class DagOptimizationTest extends TableTestBase { val table2 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable1") val retractSink = util.createRetractTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table2) val table3 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable1") val upsertSink = util.createUpsertTableSink(Array(), Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -399,12 +419,14 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable") val upsertSink = util.createUpsertTableSink(Array(), Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table1) val table2 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable") val retractSink = util.createRetractTableSink(Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table2) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -442,12 +464,14 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery(sqlQuery) val retractSink = util.createRetractTableSink( Array("a", "b", "c", "rank_num"), Array(INT, LONG, STRING, LONG)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table1) val upsertSink = util.createUpsertTableSink(Array(), Array("a", "b"), Array(INT, LONG)) val table2 = util.tableEnv.sqlQuery("SELECT a, b FROM TempTable WHERE a < 6") - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table2) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -469,12 +493,14 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT a FROM TempTable WHERE a > 6") val retractSink = util.createRetractTableSink(Array("a"), Array(INT)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table1) val table2 = util.tableEnv.sqlQuery("SELECT a, b FROM TempTable WHERE a < 6") val upsertSink = util.createUpsertTableSink(Array(), Array("a", "b"), Array(INT, LONG)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table2) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -489,7 +515,8 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT a, b, c FROM MyTable WHERE c LIKE '%hello%'") util.tableEnv.registerTable("TempTable1", table1) val appendSink = util.createAppendTableSink(Array("a", "b", "c"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("appendSink", appendSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink", appendSink) stmtSet.addInsert("appendSink", table1) val table2 = util.tableEnv.sqlQuery("SELECT a, b, c FROM MyTable WHERE c LIKE '%world%'") @@ -509,12 +536,14 @@ class DagOptimizationTest extends TableTestBase { val table4 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable3 WHERE b < 4") val retractSink = util.createRetractTableSink(Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table4) val table5 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable3 WHERE b >=4 AND b < 6") val upsertSink = util.createUpsertTableSink(Array(), Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table5) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -529,7 +558,8 @@ class DagOptimizationTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT a, b, c FROM MyTable WHERE c LIKE '%hello%'") util.tableEnv.registerTable("TempTable1", table1) val appendSink = util.createAppendTableSink(Array("a", "b", "c"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("appendSink", appendSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink", appendSink) stmtSet.addInsert("appendSink", table1) val table2 = util.tableEnv.sqlQuery("SELECT a, b, c FROM MyTable WHERE c LIKE '%world%'") @@ -546,7 +576,8 @@ class DagOptimizationTest extends TableTestBase { val table4 = util.tableEnv.sqlQuery("SELECT * FROM TempTable3 WHERE b >= 5") val retractSink1 = util.createRetractTableSink(Array("a", "b", "c"), Array(INT, LONG, STRING)) - util.tableEnv.registerTableSink("retractSink1", retractSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink1", retractSink1) stmtSet.addInsert("retractSink1", table4) val table5 = util.tableEnv.sqlQuery("SELECT b, count(a) as cnt FROM TempTable3 GROUP BY b") @@ -554,12 +585,14 @@ class DagOptimizationTest extends TableTestBase { val table6 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable4 WHERE b < 4") val retractSink2 = util.createRetractTableSink(Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("retractSink2", retractSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink2", retractSink2) stmtSet.addInsert("retractSink2", table6) util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable4 WHERE b >=4 AND b < 6") val upsertSink = util.createUpsertTableSink(Array(), Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table6) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/LegacySinkTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/LegacySinkTest.scala index c3fdeb71e0780..1f72b868d8c16 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/LegacySinkTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/LegacySinkTest.scala @@ -19,11 +19,11 @@ package org.apache.flink.table.planner.plan.stream.sql import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{ExplainDetail, TableException} import org.apache.flink.table.planner.utils.TableTestBase import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} - import org.junit.Test class LegacySinkTest extends TableTestBase { @@ -53,13 +53,15 @@ class LegacySinkTest extends TableTestBase { util.tableEnv.createTemporaryView("TempTable", table) val retractSink = util.createRetractTableSink(Array("cnt"), Array(LONG)) - util.tableEnv.registerTableSink("retractSink1", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink1", retractSink) stmtSet.addInsert("retractSink1", table) val table2 = util.tableEnv.sqlQuery( "SELECT cnt, SUM(cnt) OVER (ORDER BY PROCTIME()) FROM TempTable") val retractSink2 = util.createRetractTableSink(Array("cnt", "total"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("retractSink2", retractSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink2", retractSink2) stmtSet.addInsert("retractSink2", table2) thrown.expect(classOf[TableException]) @@ -141,12 +143,14 @@ class LegacySinkTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable WHERE b < 4") val retractSink = util.createRetractTableSink(Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table1) val table2 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable WHERE b >= 4 AND b < 6") val upsertSink = util.createUpsertTableSink(Array(), Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table2) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -161,12 +165,14 @@ class LegacySinkTest extends TableTestBase { val table1 = util.tableEnv.sqlQuery( "SELECT cnt, COUNT(b) AS frequency FROM TempTable WHERE b < 4 GROUP BY cnt") val upsertSink1 = util.createUpsertTableSink(Array(0), Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("upsertSink1", upsertSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink1", upsertSink1) stmtSet.addInsert("upsertSink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT b, cnt FROM TempTable WHERE b >= 4 AND b < 6") val upsertSink2 = util.createUpsertTableSink(Array(), Array("b", "cnt"), Array(LONG, LONG)) - util.tableEnv.registerTableSink("upsertSink2", upsertSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink2", upsertSink2) stmtSet.addInsert("upsertSink2", table2) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) @@ -183,7 +189,8 @@ class LegacySinkTest extends TableTestBase { util.tableEnv.registerTable("TempTable", table) val appendSink = util.createAppendTableSink(Array("a", "b"), Array(INT, LONG)) - util.tableEnv.registerTableSink("appendSink", appendSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink", appendSink) stmtSet.addInsert("appendSink", table) val table1 = util.tableEnv.sqlQuery( @@ -192,12 +199,14 @@ class LegacySinkTest extends TableTestBase { val table2 = util.tableEnv.sqlQuery("SELECT SUM(a) AS total_sum FROM TempTable1") val retractSink = util.createRetractTableSink(Array("total_sum"), Array(INT)) - util.tableEnv.registerTableSink("retractSink", retractSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "retractSink", retractSink) stmtSet.addInsert("retractSink", table2) val table3 = util.tableEnv.sqlQuery("SELECT MIN(a) AS total_min FROM TempTable1") val upsertSink = util.createUpsertTableSink(Array(), Array("total_min"), Array(INT)) - util.tableEnv.registerTableSink("upsertSink", upsertSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "upsertSink", upsertSink) stmtSet.addInsert("upsertSink", table3) util.verifyPlan(stmtSet, ExplainDetail.CHANGELOG_MODE) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/MiniBatchIntervalInferTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/MiniBatchIntervalInferTest.scala index 973b32606bef1..fa94d4e06b254 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/MiniBatchIntervalInferTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/MiniBatchIntervalInferTest.scala @@ -22,11 +22,11 @@ import org.apache.flink.api.common.time.Time import org.apache.flink.api.scala._ import org.apache.flink.table.api.TableConfig import org.apache.flink.table.api.config.ExecutionConfigOptions +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.plan.utils.WindowEmitStrategy.{TABLE_EXEC_EMIT_EARLY_FIRE_DELAY, TABLE_EXEC_EMIT_EARLY_FIRE_ENABLED} import org.apache.flink.table.planner.utils.{TableConfigUtils, TableTestBase} import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} - import org.junit.{Before, Test} class MiniBatchIntervalInferTest extends TableTestBase { @@ -314,7 +314,8 @@ class MiniBatchIntervalInferTest extends TableTestBase { |GROUP BY HOP(ts, INTERVAL '12' SECOND, INTERVAL '4' SECOND), id1 """.stripMargin) val appendSink1 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING)) - util.tableEnv.registerTableSink("appendSink1", appendSink1) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink1", appendSink1) stmtSet.addInsert("appendSink1", table3) val table4 = util.tableEnv.sqlQuery( @@ -325,7 +326,8 @@ class MiniBatchIntervalInferTest extends TableTestBase { |GROUP BY TUMBLE(ts, INTERVAL '9' SECOND), id1 """.stripMargin) val appendSink2 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING)) - util.tableEnv.registerTableSink("appendSink2", appendSink2) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink2", appendSink2) stmtSet.addInsert("appendSink2", table4) val table5 = util.tableEnv.sqlQuery( @@ -336,7 +338,8 @@ class MiniBatchIntervalInferTest extends TableTestBase { |GROUP BY id1 """.stripMargin) val appendSink3 = util.createRetractTableSink(Array("a", "b"), Array(INT, LONG)) - util.tableEnv.registerTableSink("appendSink3", appendSink3) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "appendSink3", appendSink3) stmtSet.addInsert("appendSink3", table5) util.verifyExplain(stmtSet) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/TableSourceTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/TableSourceTest.scala index 14de3ca6b6240..f83a2ddfd56fb 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/TableSourceTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/TableSourceTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.planner.plan.stream.sql import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.RowTypeInfo +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.{DataTypes, TableSchema, Types} import org.apache.flink.table.planner.expressions.utils.Func1 import org.apache.flink.table.planner.utils._ @@ -66,7 +67,7 @@ class TableSourceTest extends TableTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rowtime", "val", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row](false, tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -83,7 +84,7 @@ class TableSourceTest extends TableTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rowtime", "val", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row](false, tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -100,7 +101,7 @@ class TableSourceTest extends TableTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rowtime", "val", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row](false, tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -125,7 +126,7 @@ class TableSourceTest extends TableTestBase { Array(Types.INT, Types.LONG, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "val", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "procTimeT", new TestTableSourceWithTime[Row](false, tableSchema, returnType, Seq(), proctime = "pTime")) @@ -142,7 +143,7 @@ class TableSourceTest extends TableTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -159,7 +160,7 @@ class TableSourceTest extends TableTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -176,7 +177,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -192,7 +193,7 @@ class TableSourceTest extends TableTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -208,7 +209,7 @@ class TableSourceTest extends TableTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -226,7 +227,7 @@ class TableSourceTest extends TableTestBase { Array("p-rtime", "p-id", "p-name", "p-val")) val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name") - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, Seq(), "rtime", "ptime", mapping)) @@ -259,7 +260,7 @@ class TableSourceTest extends TableTestBase { Array(Types.INT, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "deepNested", "nested", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(false, tableSchema, returnType, Seq())) @@ -282,7 +283,7 @@ class TableSourceTest extends TableTestBase { Array(Types.INT, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "name")) - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, Seq(), null, null)) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableSourceTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableSourceTest.scala index cec7edfb24b50..e0d731082a78f 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableSourceTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableSourceTest.scala @@ -20,11 +20,11 @@ package org.apache.flink.table.planner.plan.stream.table import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.RowTypeInfo +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{Over, TableSchema, Tumble, Types} import org.apache.flink.table.planner.utils.{TableTestBase, TestNestedProjectableTableSource, TestProjectableTableSource, TestTableSourceWithTime} import org.apache.flink.types.Row - import org.junit.Test class TableSourceTest extends TableTestBase { @@ -41,7 +41,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rowtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row]( false, tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -62,7 +62,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rowtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row]( false, tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -83,7 +83,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rowtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row]( false, tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -107,7 +107,7 @@ class TableSourceTest extends TableTestBase { Array("id", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "procTimeT", new TestTableSourceWithTime[Row]( false, tableSchema, returnType, Seq(), proctime = "proctime")) @@ -127,7 +127,7 @@ class TableSourceTest extends TableTestBase { Array("id", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "procTimeT", new TestTableSourceWithTime[Row]( false, tableSchema, returnType, Seq(), proctime = "proctime")) @@ -150,7 +150,7 @@ class TableSourceTest extends TableTestBase { Array("id", "name", "val", "rtime")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -170,7 +170,7 @@ class TableSourceTest extends TableTestBase { Array("id", "name", "val", "rtime")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -189,7 +189,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -208,7 +208,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -227,7 +227,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -248,7 +248,7 @@ class TableSourceTest extends TableTestBase { val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name") val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, Seq(), "rtime", "ptime", mapping)) @@ -284,7 +284,7 @@ class TableSourceTest extends TableTestBase { Array("id", "deepNested", "nested", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource( false, tableSchema, returnType, Seq())) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/LegacyTableSinkValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/LegacyTableSinkValidationTest.scala index b86681a373592..51193f68513d9 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/LegacyTableSinkValidationTest.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/LegacyTableSinkValidationTest.scala @@ -21,12 +21,12 @@ package org.apache.flink.table.planner.plan.stream.table.validation import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{DataTypes, TableException, TableSchema, ValidationException} import org.apache.flink.table.planner.runtime.utils.{TableEnvUtil, TestData, TestingAppendSink, TestingUpsertTableSink} import org.apache.flink.table.planner.utils.{MemoryTableSourceSinkUtil, TableTestBase, TableTestUtil} import org.apache.flink.types.Row - import org.junit.Test class LegacyTableSinkValidationTest extends TableTestBase { @@ -63,7 +63,7 @@ class LegacyTableSinkValidationTest extends TableTestBase { .select('len, 'id.count, 'num.sum) val schema = result.getSchema sink.configure(schema.getFieldNames, schema.getFieldTypes) - tEnv.registerTableSink("testSink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("testSink", sink) tEnv.insertInto("testSink", result) // must fail because table is updating table without full key env.execute() diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableScanITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableScanITCase.scala index 134205f092dd3..d34baa8aabdcf 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableScanITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableScanITCase.scala @@ -26,11 +26,11 @@ import org.apache.flink.table.planner.runtime.utils.BatchTestBase import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row import org.apache.flink.table.planner.utils.{TestTableSourceWithTime, WithoutTimeAttributesTableSource} import org.apache.flink.table.runtime.functions.SqlDateTimeUtils.unixTimestampToLocalDateTime - import org.junit.Test - import java.lang.{Integer => JInt} +import org.apache.flink.table.api.internal.TableEnvironmentInternal + class TableScanITCase extends BatchTestBase { @Test @@ -53,7 +53,7 @@ class TableScanITCase extends BatchTestBase { val returnType = Types.STRING val tableSource = new TestTableSourceWithTime(true, schema, returnType, data, null, "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) checkResult( s"SELECT name FROM $tableName", @@ -81,7 +81,7 @@ class TableScanITCase extends BatchTestBase { fieldNames) val tableSource = new TestTableSourceWithTime(true, schema, rowType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) checkResult( s"SELECT * FROM $tableName", diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala index c3ea5c5209344..71be60d8e2b23 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala @@ -28,12 +28,13 @@ import org.apache.flink.table.planner.utils.{TestDataTypeTableSource, TestFileIn import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter import org.apache.flink.types.Row import org.junit.{Before, Test} - import java.io.FileWriter import java.lang.{Boolean => JBool, Integer => JInt, Long => JLong} import java.math.{BigDecimal => JDecimal} import java.time.{Instant, LocalDateTime, ZoneId} +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.mutable class TableSourceITCase extends BatchTestBase { @@ -45,7 +46,8 @@ class TableSourceITCase extends BatchTestBase { val tableSchema = TableSchema.builder().fields( Array("a", "b", "c"), Array(DataTypes.INT(), DataTypes.BIGINT(), DataTypes.STRING())).build() - tEnv.registerTableSource("MyTable", new TestProjectableTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "MyTable", new TestProjectableTableSource( true, tableSchema, new RowTypeInfo( @@ -122,7 +124,7 @@ class TableSourceITCase extends BatchTestBase { Array(Types.LONG, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "deepNested", "nested", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(true, tableSchema, returnType, data)) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/AggregateITCaseBase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/AggregateITCaseBase.scala index c2384c31bdcf7..9d9a284b325f5 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/AggregateITCaseBase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/AggregateITCaseBase.scala @@ -852,7 +852,7 @@ abstract class AggregateITCaseBase(testName: String) extends BatchTestBase { // "7499,ALLEN,SALESMAN,7698,1981-02-20,1600.00,300.00,30$" + // "7521,WARD,SALESMAN,7698,1981-02-22,1250.00,500.00,30", // "csv-test", "tmp") -// tEnv.registerTableSource("emp", +// tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("emp", // CsvTableSource.builder() // .path(csvPath) // .fields(Array("empno", "ename", "job", "mgr", "hiredate", "sal", "comm", "deptno"), diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/join/JoinITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/join/JoinITCase.scala index ceb5797c0584c..47a1df2771feb 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/join/JoinITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/join/JoinITCase.scala @@ -34,13 +34,13 @@ import org.apache.flink.table.planner.runtime.utils.TestData._ import org.apache.flink.table.planner.sinks.CollectRowTableSink import org.apache.flink.table.planner.utils.{TestingStatementSet, TestingTableEnvironment} import org.apache.flink.table.runtime.operators.CodeGenOperatorFactory - import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{Assert, Before, Test} - import java.util +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.JavaConversions._ import scala.collection.Seq @@ -101,7 +101,7 @@ class JoinITCase(expectedJoinType: JoinType) extends BatchTestBase { def testLongHashJoinGenerator(): Unit = { if (expectedJoinType == HashJoin) { val sink = (new CollectRowTableSink).configure(Array("c"), Array(Types.STRING)) - tEnv.registerTableSink("outputTable", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("outputTable", sink) val stmtSet = tEnv.createStatementSet() val table = tEnv.sqlQuery("SELECT c FROM SmallTable3, Table5 WHERE b = e") stmtSet.addInsert("outputTable", table) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/LegacyTableSinkITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/LegacyTableSinkITCase.scala index cd4ab06972768..85e2b964bf1c3 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/LegacyTableSinkITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/LegacyTableSinkITCase.scala @@ -24,12 +24,12 @@ import org.apache.flink.table.planner.runtime.utils.TestData._ import org.apache.flink.table.planner.runtime.utils.{BatchTestBase, TestingRetractTableSink, TestingUpsertTableSink} import org.apache.flink.table.planner.utils.MemoryTableSourceSinkUtil import org.apache.flink.test.util.TestBaseUtils - import org.junit.Assert._ import org.junit._ - import java.util.TimeZone +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.JavaConverters._ class LegacyTableSinkITCase extends BatchTestBase { @@ -122,7 +122,8 @@ class LegacyTableSinkITCase extends BatchTestBase { .field("b", DataTypes.DOUBLE()) .build() val sink = new TestingUpsertTableSink(Array(0), TimeZone.getDefault) - tEnv.registerTableSink("testSink", sink.configure(schema.getFieldNames, schema.getFieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "testSink", sink.configure(schema.getFieldNames, schema.getFieldTypes)) registerCollection("MyTable", simpleData2, simpleType2, "a, b", nullableOfSimpleData2) sink } @@ -173,7 +174,8 @@ class LegacyTableSinkITCase extends BatchTestBase { .field("b", DataTypes.DOUBLE()) .build() val sink = new TestingRetractTableSink(TimeZone.getDefault) - tEnv.registerTableSink("testSink", sink.configure(schema.getFieldNames, schema.getFieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "testSink", sink.configure(schema.getFieldNames, schema.getFieldTypes)) registerCollection("MyTable", simpleData2, simpleType2, "a, b", nullableOfSimpleData2) sink } diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala index 14710de90855e..58867cbbf18ca 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala @@ -39,15 +39,15 @@ import org.apache.flink.table.planner.runtime.utils._ import org.apache.flink.table.planner.utils.DateTimeTestUtil.{localDate, localDateTime, localTime => mLocalTime} import org.apache.flink.table.runtime.typeutils.BigDecimalTypeInfo import org.apache.flink.types.{Row, RowKind} - import org.junit.Assert.assertEquals import org.junit._ import org.junit.runner.RunWith import org.junit.runners.Parameterized - import java.lang.{Integer => JInt, Long => JLong} import java.math.{BigDecimal => JBigDecimal} +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.{Seq, mutable} import scala.util.Random @@ -1241,7 +1241,7 @@ class AggregateITCase( val tableSink = new TestingUpsertTableSink(Array(0)).configure( Array[String]("c", "bMax"), Array[TypeInformation[_]](Types.STRING, Types.LONG)) - tEnv.registerTableSink("testSink", tableSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("testSink", tableSink) execInsertSqlAndWaitResult( """ diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CalcITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CalcITCase.scala index c73faf554488a..718a5a6302b1c 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CalcITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CalcITCase.scala @@ -22,13 +22,13 @@ import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.RowTypeInfo import org.apache.flink.api.scala._ import org.apache.flink.api.scala.typeutils.Types +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.data.{GenericRowData, RowData} import org.apache.flink.table.planner.runtime.utils._ import org.apache.flink.table.runtime.typeutils.RowDataTypeInfo import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit._ @@ -215,7 +215,7 @@ class CalcITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sqlQuery) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("0,0,0", "1,1,1", "2,2,2") diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CorrelateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CorrelateITCase.scala index 69dce1fb5291a..6aff49e1ab5e3 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CorrelateITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CorrelateITCase.scala @@ -25,12 +25,12 @@ import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedTableFunction import org.apache.flink.table.planner.runtime.utils.{StreamingTestBase, TestSinkUtil, TestingAppendSink, TestingAppendTableSink} import org.apache.flink.table.planner.utils.{RF, TableFunc7} import org.apache.flink.types.Row - import org.junit.Assert.assertEquals import org.junit.{Before, Test} - import java.lang.{Boolean => JBoolean} +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.mutable class CorrelateITCase extends StreamingTestBase { @@ -238,7 +238,7 @@ class CorrelateITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("1,2,,null", "1,3,,null") @@ -259,7 +259,7 @@ class CorrelateITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("3018-06-10", "2018-06-03", "2018-06-01", "2018-06-02") @@ -280,7 +280,7 @@ class CorrelateITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("1,3018-06-10", "1,2018-06-03", "1,2018-06-01", "1,2018-06-02") @@ -300,7 +300,7 @@ class CorrelateITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("a") @@ -320,7 +320,7 @@ class CorrelateITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") // output two null @@ -341,7 +341,7 @@ class CorrelateITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("1,a") @@ -361,7 +361,7 @@ class CorrelateITCase extends StreamingTestBase { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("2,null", "3,null") diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/Limit0RemoveITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/Limit0RemoveITCase.scala index 747eaa615b2fb..4c61f608c7cbf 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/Limit0RemoveITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/Limit0RemoveITCase.scala @@ -19,9 +19,9 @@ package org.apache.flink.table.planner.runtime.stream.sql import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.runtime.utils.{StreamingTestBase, TestSinkUtil, TestingAppendTableSink, TestingRetractTableSink} - import org.junit.Assert.assertEquals import org.junit.Test @@ -37,7 +37,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") assertEquals(0, sink.getAppendResults.size) @@ -53,7 +53,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") assertEquals(0, sink.getAppendResults.size) @@ -69,7 +69,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") assertEquals(0, sink.getAppendResults.size) @@ -89,7 +89,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") assertEquals(0, sink.getAppendResults.size) @@ -109,7 +109,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = Seq("1", "2", "3", "4", "5", "6") @@ -130,7 +130,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingRetractTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") assertEquals(0, sink.getRawResults.size) @@ -150,7 +150,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingRetractTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = Seq("1", "2", "3", "4", "5", "6") @@ -171,7 +171,7 @@ class Limit0RemoveITCase extends StreamingTestBase() { val result = tEnv.sqlQuery(sql) val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink()) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") assertEquals(0, sink.getAppendResults.size) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/RankITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/RankITCase.scala index 46b904b3642b1..a422ed822ff33 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/RankITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/RankITCase.scala @@ -21,12 +21,12 @@ package org.apache.flink.table.planner.runtime.stream.sql import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation} import org.apache.flink.api.java.typeutils.RowTypeInfo import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode import org.apache.flink.table.planner.runtime.utils.{TestingRetractTableSink, TestingUpsertTableSink, _} import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter.fromDataTypeToTypeInfo import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.Parameterized @@ -130,7 +130,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 3)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -192,7 +192,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 3)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val updatedExpected = List( @@ -254,7 +254,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 3)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val updatedExpected = List( @@ -299,7 +299,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 3)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val updatedExpected = List( @@ -352,7 +352,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 3)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val updatedExpected = List( @@ -396,7 +396,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 3)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val updatedExpected = List( @@ -530,7 +530,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 1)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -588,7 +588,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 1)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -647,7 +647,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -708,7 +708,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -778,7 +778,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 1)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -841,7 +841,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 3)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -919,7 +919,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 1)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -1003,7 +1003,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingUpsertTableSink(Array(0, 1)). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -1069,7 +1069,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink = new TestingRetractTableSink(). configure(schema.getFieldNames, schema.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(table, "MySink") val expected = List( @@ -1197,7 +1197,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink1 = new TestingUpsertTableSink(Array(0, 3)). configure(schema1.getFieldNames, schema1 .getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink1", sink1) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink1", sink1) execInsertTableAndWaitResult(table1, "MySink1") val table2 = tEnv.sqlQuery( @@ -1213,7 +1213,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink2 = new TestingUpsertTableSink(Array(0, 3)). configure(schema2.getFieldNames, schema2 .getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink2", sink2) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink2", sink2) execInsertTableAndWaitResult(table2, "MySink2") val expected1 = List( @@ -1269,7 +1269,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink1 = new TestingRetractTableSink(). configure(schema1.getFieldNames, schema1.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink1", sink1) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink1", sink1) execInsertTableAndWaitResult(table1, "MySink1") val table2 = tEnv.sqlQuery( @@ -1285,7 +1285,7 @@ class RankITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val sink2 = new TestingRetractTableSink(). configure(schema2.getFieldNames, schema2.getFieldDataTypes.map(_.nullable()).map(fromDataTypeToTypeInfo)) - tEnv.registerTableSink("MySink2", sink2) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink2", sink2) execInsertTableAndWaitResult(table2, "MySink2") val expected1 = List( diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableScanITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableScanITCase.scala index 2843e8e4f02a9..34ab3154bdd1c 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableScanITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableScanITCase.scala @@ -28,12 +28,12 @@ import org.apache.flink.table.planner.runtime.utils.{StreamingTestBase, TestingA import org.apache.flink.table.planner.utils.{TestPreserveWMTableSource, TestTableSourceWithTime, WithoutTimeAttributesTableSource} import org.apache.flink.types.Row import org.apache.flink.util.Collector - import org.junit.Assert._ import org.junit.Test - import java.lang.{Integer => JInt, Long => JLong} +import org.apache.flink.table.api.internal.TableEnvironmentInternal + class TableScanITCase extends StreamingTestBase { @Test @@ -60,7 +60,7 @@ class TableScanITCase extends StreamingTestBase { val returnType = Types.STRING val tableSource = new TestTableSourceWithTime(false, schema, returnType, data, null, "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val sqlQuery = s"SELECT name FROM $tableName" val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row] @@ -95,7 +95,7 @@ class TableScanITCase extends StreamingTestBase { rowtime = "rowtime", mapping = mapping, existingTs = "ts") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val sqlQuery = s""" @@ -137,7 +137,7 @@ class TableScanITCase extends StreamingTestBase { fieldNames) val tableSource = new TestPreserveWMTableSource(schema, rowType, data, "rtime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val sqlQuery = s"SELECT id, name FROM $tableName" val sink = new TestingAppendSink diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableSourceITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableSourceITCase.scala index 341fec7a5e026..b95a2f337e47d 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableSourceITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/TableSourceITCase.scala @@ -27,12 +27,12 @@ import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row import org.apache.flink.table.planner.runtime.utils.{StreamingTestBase, TestData, TestingAppendSink} import org.apache.flink.table.planner.utils.{TestDataTypeTableSource, TestFilterableTableSource, TestInputFormatTableSource, TestNestedProjectableTableSource, TestPartitionableSourceFactory, TestProjectableTableSource, TestStreamTableSource, TestTableSourceSinks} import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit.Test - import java.lang.{Boolean => JBool, Integer => JInt, Long => JLong} +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.mutable class TableSourceITCase extends StreamingTestBase { @@ -53,7 +53,7 @@ class TableSourceITCase extends StreamingTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, data, "rtime", "ptime")) @@ -87,7 +87,7 @@ class TableSourceITCase extends StreamingTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, data, "rtime", "ptime")) @@ -120,7 +120,7 @@ class TableSourceITCase extends StreamingTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, data, "rtime", "ptime")) @@ -153,7 +153,7 @@ class TableSourceITCase extends StreamingTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, data, "rtime", "ptime")) @@ -182,7 +182,7 @@ class TableSourceITCase extends StreamingTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(false, tableSchema, returnType, data, "rtime", "ptime")) @@ -216,7 +216,7 @@ class TableSourceITCase extends StreamingTestBase { Array("p-rtime", "p-id", "p-name", "p-val")) val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name") - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource( false, tableSchema, returnType, data, "rtime", "ptime", mapping)) @@ -279,7 +279,7 @@ class TableSourceITCase extends StreamingTestBase { Array(Types.LONG, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "deepNested", "nested", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(false, tableSchema, returnType, data)) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/UnnestITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/UnnestITCase.scala index 1784dc01c5424..06ebe4cc03178 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/UnnestITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/UnnestITCase.scala @@ -21,12 +21,12 @@ package org.apache.flink.table.planner.runtime.stream.sql import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.table.api.Types +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode import org.apache.flink.table.planner.runtime.utils.TimeTestUtil.TimestampAndWatermarkWithOffset import org.apache.flink.table.planner.runtime.utils.{StreamingWithStateTestBase, TestData, TestingAppendSink, TestingRetractSink, TestingRetractTableSink} import org.apache.flink.types.Row - import org.junit.Assert.assertEquals import org.junit.Test import org.junit.runner.RunWith @@ -264,7 +264,7 @@ class UnnestITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mo val sink = new TestingRetractTableSink().configure( Array("a", "b", "v"), Array(Types.INT, Types.LONG, Types.STRING)) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = List("1,11,10", "1,11,11", "2,22,20", "3,33,30", "3,33,31") diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala index f817c66994842..decefaa704115 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala @@ -31,15 +31,15 @@ import org.apache.flink.table.planner.runtime.utils.TimeTestUtil.TimestampAndWat import org.apache.flink.table.planner.runtime.utils._ import org.apache.flink.table.planner.utils.TableConfigUtils.getMillisecondFromConfigDuration import org.apache.flink.types.Row - import org.junit.Assert.assertEquals import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.Parameterized - import java.math.BigDecimal import java.util.concurrent.TimeUnit +import org.apache.flink.table.api.internal.TableEnvironmentInternal + @RunWith(classOf[Parameterized]) class WindowAggregateITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode) { @@ -230,7 +230,7 @@ class WindowAggregateITCase(mode: StateBackendMode) val fieldNames = fieldTypes.indices.map("f" + _).toArray val sink = new TestingUpsertTableSink(Array(0, 1)).configure(fieldNames, fieldTypes) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(result, "MySink") val expected = Seq( diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/AggregateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/AggregateITCase.scala index f921242f90aea..10dbef6c69ca8 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/AggregateITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/AggregateITCase.scala @@ -22,6 +22,7 @@ import org.apache.flink.api.common.time.Time import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.table.api.Types +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.{CountDistinct, DataViewTestAgg, WeightedAvg} import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode @@ -29,7 +30,6 @@ import org.apache.flink.table.planner.runtime.utils.TestData._ import org.apache.flink.table.planner.runtime.utils.{JavaUserDefinedAggFunctions, StreamingWithStateTestBase, TestingRetractSink, TestingUpsertTableSink} import org.apache.flink.table.planner.utils.CountMinMax import org.apache.flink.types.Row - import org.junit.Assert.assertEquals import org.junit.runner.RunWith import org.junit.runners.Parameterized @@ -307,7 +307,7 @@ class AggregateITCase(mode: StateBackendMode) extends StreamingWithStateTestBase val tableSink = new TestingUpsertTableSink(Array(0)).configure( Array[String]("c", "bMax"), Array[TypeInformation[_]](Types.STRING, Types.LONG)) - tEnv.registerTableSink("testSink", tableSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("testSink", tableSink) execInsertTableAndWaitResult(t, "testSink") val expected = List("A,1", "B,2", "C,3") diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/JoinITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/JoinITCase.scala index 18e1f79262323..d82f02ca07cc0 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/JoinITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/JoinITCase.scala @@ -21,6 +21,7 @@ package org.apache.flink.table.planner.runtime.stream.table import org.apache.flink.api.common.time.Time import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{Tumble, Types} import org.apache.flink.table.planner.expressions.utils.FuncWithOpen @@ -30,7 +31,6 @@ import org.apache.flink.table.planner.runtime.utils.TestData._ import org.apache.flink.table.planner.runtime.utils.{StreamingWithStateTestBase, TestingAppendSink, TestingRetractSink, TestingRetractTableSink, TestingUpsertTableSink} import org.apache.flink.table.planner.utils.CountAggFunction import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.Parameterized @@ -134,7 +134,7 @@ class JoinITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode Array[String]("a", "b", "c"), Array[TypeInformation[_]](Types.INT, Types.LONG, Types.LONG)) - tEnv.registerTableSink("upsertSink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("upsertSink", sink) execInsertTableAndWaitResult(t, "upsertSink") val expected = Seq("0,1,1", "1,2,3", "2,1,1", "3,1,1", "4,1,1", "5,2,3", "6,0,1") @@ -184,7 +184,7 @@ class JoinITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val t = leftTableWithPk .join(rightTable, 'a === 'bb && ('a < 4 || 'a > 4)) .select('a, 'b, 'c, 'd) - tEnv.registerTableSink("retractSink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("retractSink", sink) execInsertTableAndWaitResult(t, "retractSink") val expected = Seq("1,1,1,1", "1,1,1,1", "1,1,1,1", "1,1,1,1", "2,2,2,2", "3,3,3,3", @@ -788,7 +788,7 @@ class JoinITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val schema = t.getSchema val sink = new TestingUpsertTableSink(Array(0, 2)) .configure(schema.getFieldNames, schema.getFieldTypes) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(t, "MySink") val expected = Seq("1,5,1,2") @@ -868,7 +868,7 @@ class JoinITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val schema = t.getSchema val sink = new TestingRetractTableSink().configure( schema.getFieldNames, schema.getFieldTypes) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(t, "MySink") val expected = Seq("1,4,1,2", "1,5,1,2") @@ -1055,7 +1055,7 @@ class JoinITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val schema = t.getSchema val sink = new TestingUpsertTableSink(Array(0, 1)) .configure(schema.getFieldNames, schema.getFieldTypes) - tEnv.registerTableSink("MySink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", sink) execInsertTableAndWaitResult(t, "MySink") val expected = Seq("0,1,1", "1,2,3", "2,1,1", "3,1,1", "4,1,1", "5,2,3", "6,0,1") @@ -1337,7 +1337,7 @@ class JoinITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode val schema = t.getSchema val sink = new TestingUpsertTableSink(Array(0, 1, 2)) .configure(schema.getFieldNames, schema.getFieldTypes) - tEnv.registerTableSink("sinkTests", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sinkTests", sink) execInsertTableAndWaitResult(t, "sinkTests") val expected = Seq("4,1,1,1") diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/LegacyTableSinkITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/LegacyTableSinkITCase.scala index 8dac757dc8336..b0fd938fea547 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/LegacyTableSinkITCase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/LegacyTableSinkITCase.scala @@ -30,13 +30,13 @@ import org.apache.flink.table.planner.utils.{MemoryTableSourceSinkUtil, TableTes import org.apache.flink.table.sinks._ import org.apache.flink.test.util.{AbstractTestBase, TestBaseUtils} import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit.Test - import java.io.File import java.util.TimeZone +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.JavaConverters._ class LegacyTableSinkITCase extends AbstractTestBase { @@ -56,7 +56,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { val tEnv = StreamTableEnvironment.create(env, TableTestUtil.STREAM_SETTING) env.setParallelism(4) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "csvSink", new CsvTableSink(path).configure( Array[String]("nullableCol", "c", "b"), @@ -96,7 +96,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) val sink = new TestingAppendTableSink(TimeZone.getDefault) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "appendSink", sink.configure( Array[String]("t", "icnt", "nsum"), @@ -130,7 +130,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { tEnv.registerTable("src", t) val sink = new TestingAppendTableSink() - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "appendSink", sink.configure( Array[String]("t", "item"), @@ -158,7 +158,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { val ds2 = env.fromCollection(tupleData5).toTable(tEnv, 'd, 'e, 'f, 'g, 'h) val sink = new TestingAppendTableSink - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "appendSink", sink.configure( Array[String]("c", "g"), @@ -185,7 +185,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { .toTable(tEnv, 'id, 'num, 'text) val sink = new TestingRetractTableSink() - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "retractSink", sink.configure( Array[String]("len", "icnt", "nsum"), @@ -221,7 +221,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) val sink = new TestingRetractTableSink(TimeZone.getDefault) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "retractSink", sink.configure( Array[String]("t", "icnt", "nsum"), @@ -264,7 +264,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { Array[TypeInformation[_]](Types.LONG, Types.DECIMAL(), Types.BOOLEAN)) sink.expectedKeys = Some(Array("cnt", "cTrue")) sink.expectedIsAppendOnly = Some(false) - tEnv.registerTableSink("upsertSink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("upsertSink", sink) val table = t.select('id, 'num, 'text.charLength() as 'len, ('id > 0) as 'cTrue) .groupBy('len, 'cTrue) @@ -303,7 +303,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { Array[TypeInformation[_]](Types.LONG, Types.SQL_TIMESTAMP, Types.LONG)) sink.expectedKeys = Some(Array("wend", "num")) sink.expectedIsAppendOnly = Some(true) - tEnv.registerTableSink("upsertSink", sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("upsertSink", sink) val table = t.window(Tumble over 5.millis on 'rowtime as 'w) .groupBy('w, 'num) @@ -344,7 +344,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { val sink = new TestingUpsertTableSink(Array(0, 1, 2), TimeZone.getDefault) sink.expectedKeys = Some(Array("wend", "num")) sink.expectedIsAppendOnly = Some(true) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", sink.configure( Array[String]("wstart", "wend", "num", "icnt"), @@ -388,7 +388,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { val sink = new TestingUpsertTableSink(Array(0), TimeZone.getDefault) sink.expectedIsAppendOnly = Some(true) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", sink.configure( Array[String]("wend", "cnt"), @@ -431,7 +431,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { val sink = new TestingUpsertTableSink(Array(0), TimeZone.getDefault) sink.expectedIsAppendOnly = Some(true) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", sink.configure( Array[String]("num", "cnt"), @@ -476,7 +476,7 @@ class LegacyTableSinkITCase extends AbstractTestBase { val sink = new TestingUpsertTableSink(Array(0)) sink.expectedIsAppendOnly = Some(false) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", sink.configure( Array[String]("num", "cnt"), diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTableEnvUtil.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTableEnvUtil.scala index b7b3cddad9142..eea8842bc94ae 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTableEnvUtil.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTableEnvUtil.scala @@ -24,7 +24,7 @@ import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer import org.apache.flink.api.java.io.CollectionInputFormat import org.apache.flink.streaming.api.datastream.DataStream -import org.apache.flink.table.api.internal.TableEnvironmentImpl +import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal} import org.apache.flink.table.api.{Table, TableEnvironment} import org.apache.flink.table.expressions.ExpressionParser import org.apache.flink.table.planner.delegation.PlannerBase @@ -33,7 +33,6 @@ import org.apache.flink.table.planner.sinks.CollectTableSink import org.apache.flink.table.planner.utils.TableTestUtil import org.apache.flink.table.types.utils.TypeConversions.fromDataTypeToLegacyInfo import org.apache.flink.util.AbstractID - import _root_.java.util.{UUID, ArrayList => JArrayList} import _root_.scala.collection.JavaConversions._ @@ -54,7 +53,7 @@ object BatchTableEnvUtil { val id = new AbstractID().toString sink.init(typeSerializer.asInstanceOf[TypeSerializer[T]], id) val sinkName = UUID.randomUUID().toString - tEnv.registerTableSink(sinkName, sink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(sinkName, sink) val res = TableEnvUtil.execInsertTableAndWaitResult(table, s"`$sinkName`") val accResult: JArrayList[Array[Byte]] = res.getAccumulatorResult(id) diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala index 3b78afd874071..f4b6e5b838d4b 100644 --- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala +++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala @@ -27,7 +27,7 @@ import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => Scala import org.apache.flink.streaming.api.{TimeCharacteristic, environment} import org.apache.flink.table.api._ import org.apache.flink.table.api.config.ExecutionConfigOptions -import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableImpl} +import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal, TableImpl} import org.apache.flink.table.api.java.internal.{StreamTableEnvironmentImpl => JavaStreamTableEnvImpl} import org.apache.flink.table.api.java.{StreamTableEnvironment => JavaStreamTableEnv} import org.apache.flink.table.api.scala.internal.{StreamTableEnvironmentImpl => ScalaStreamTableEnvImpl} @@ -61,7 +61,6 @@ import org.apache.flink.table.types.logical.LogicalType import org.apache.flink.table.types.utils.TypeConversions import org.apache.flink.table.typeutils.FieldInfoUtils import org.apache.flink.types.Row - import org.apache.calcite.avatica.util.TimeUnit import org.apache.calcite.rel.RelNode import org.apache.calcite.sql.parser.SqlParserPos @@ -70,7 +69,6 @@ import org.apache.commons.lang3.SystemUtils import org.junit.Assert.{assertEquals, assertTrue} import org.junit.Rule import org.junit.rules.{ExpectedException, TemporaryFolder, TestName} - import _root_.java.math.{BigDecimal => JBigDecimal} import _root_.java.util @@ -242,7 +240,8 @@ abstract class TableTestUtilBase(test: TableTestBase, isStreamingMode: Boolean) def addTableSource( name: String, tableSource: TableSource[_]): Table = { - getTableEnv.registerTableSource(name, tableSource) + getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + name, tableSource) getTableEnv.from(name) } @@ -333,7 +332,7 @@ abstract class TableTestUtilBase(test: TableTestBase, isStreamingMode: Boolean) targetPath: String, extraDetails: ExplainDetail*): Unit = { val stmtSet = getTableEnv.createStatementSet() - getTableEnv.registerTableSink(targetPath, sink) + getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink) stmtSet.addInsert(targetPath, table) verifyExplain(stmtSet, extraDetails: _*) } @@ -542,8 +541,8 @@ abstract class TableTestUtil( name: String, tableSource: TableSource[_], statistic: FlinkStatistic): Table = { - // TODO RichTableSourceQueryOperation should be deleted and use registerTableSource method - // instead of registerTable method here after unique key in TableSchema is ready + // TODO RichTableSourceQueryOperation should be deleted and use registerTableSourceInternal + // method instead of registerTable method here after unique key in TableSchema is ready // and setting catalog statistic to TableSourceTable in DatabaseCalciteSchema is ready val identifier = ObjectIdentifier.of( testingTableEnv.getCurrentCatalog, @@ -591,7 +590,7 @@ abstract class TableTestUtil( targetPath: String, extraDetails: ExplainDetail*): Unit = { val stmtSet = tableEnv.createStatementSet() - tableEnv.registerTableSink(targetPath, sink) + tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink) stmtSet.addInsert(targetPath, table) verifyPlan(stmtSet, extraDetails: _*) } diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala index 2d75534069c61..b57f906752417 100644 --- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala +++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala @@ -323,44 +323,6 @@ abstract class TableEnvImpl( false) } - override def registerTableSource(name: String, tableSource: TableSource[_]): Unit = { - validateTableSource(tableSource) - registerTableSourceInternal(name, tableSource) - } - - override def registerTableSink( - name: String, - fieldNames: Array[String], - fieldTypes: Array[TypeInformation[_]], - tableSink: TableSink[_]): Unit = { - - if (fieldNames == null) { - throw new TableException("fieldNames must not be null.") - } - if (fieldTypes == null) { - throw new TableException("fieldTypes must not be null.") - } - if (fieldNames.length == 0) { - throw new TableException("fieldNames must not be empty.") - } - if (fieldNames.length != fieldTypes.length) { - throw new TableException("Same number of field names and types required.") - } - - val configuredSink = tableSink.configure(fieldNames, fieldTypes) - registerTableSinkInternal(name, configuredSink) - } - - override def registerTableSink(name: String, configuredSink: TableSink[_]): Unit = { - // validate - if (configuredSink.getTableSchema.getFieldNames.length == 0) { - throw new TableException("Field names must not be empty.") - } - - validateTableSink(configuredSink) - registerTableSinkInternal(name, configuredSink) - } - override def fromTableSource(source: TableSource[_]): Table = { createTable(new TableSourceQueryOperation(source, isBatchTable)) } @@ -383,10 +345,11 @@ abstract class TableEnvImpl( */ protected def validateTableSink(tableSink: TableSink[_]): Unit - private def registerTableSourceInternal( + override def registerTableSourceInternal( name: String, tableSource: TableSource[_]) : Unit = { + validateTableSource(tableSource) val unresolvedIdentifier = UnresolvedIdentifier.of(name) val objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier) // check if a table (source or sink) is registered @@ -418,10 +381,16 @@ abstract class TableEnvImpl( } } - private def registerTableSinkInternal( + override def registerTableSinkInternal( name: String, tableSink: TableSink[_]) : Unit = { + // validate + if (tableSink.getTableSchema.getFieldNames.length == 0) { + throw new TableException("Field names must not be empty.") + } + + validateTableSink(tableSink) val unresolvedIdentifier = UnresolvedIdentifier.of(name) val objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier) // check if a table (source or sink) is registered diff --git a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/batch/JavaTableSourceITCase.java b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/batch/JavaTableSourceITCase.java index 6787c94a2c9d3..58c21166b5d31 100644 --- a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/batch/JavaTableSourceITCase.java +++ b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/batch/JavaTableSourceITCase.java @@ -21,6 +21,7 @@ import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.java.BatchTableEnvironment; import org.apache.flink.table.runtime.utils.CommonTestData; import org.apache.flink.table.runtime.utils.TableProgramsCollectionTestBase; @@ -52,7 +53,7 @@ public void testBatchTableSourceTableAPI() throws Exception { BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env, config()); BatchTableSource csvTable = CommonTestData.getCsvTableSource(); - tableEnv.registerTableSource("persons", csvTable); + ((TableEnvironmentInternal) tableEnv).registerTableSourceInternal("persons", csvTable); Table result = tableEnv.scan("persons") .select($("id"), $("first"), $("last"), $("score")); @@ -78,7 +79,7 @@ public void testBatchTableSourceSQL() throws Exception { BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env, config()); BatchTableSource csvTable = CommonTestData.getCsvTableSource(); - tableEnv.registerTableSource("persons", csvTable); + ((TableEnvironmentInternal) tableEnv).registerTableSourceInternal("persons", csvTable); Table result = tableEnv .sqlQuery("SELECT `last`, FLOOR(id), score * 2 FROM persons WHERE score < 20"); diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala index 70c1c62e98c6f..57abf503f3c60 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala @@ -25,7 +25,7 @@ import org.apache.flink.core.fs.FileSystem.WriteMode import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => ScalaStreamExecutionEnvironment} import org.apache.flink.table.api.TableEnvironmentITCase.getPersonCsvTableSource -import org.apache.flink.table.api.internal.TableEnvironmentImpl +import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal} import org.apache.flink.table.api.java.StreamTableEnvironment import org.apache.flink.table.api.scala.{StreamTableEnvironment => ScalaStreamTableEnvironment} import org.apache.flink.table.runtime.utils.StreamITCase @@ -42,7 +42,6 @@ import org.junit.rules.{ExpectedException, TemporaryFolder} import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{After, Before, Rule, Test} - import _root_.java.io.{File, FileOutputStream, OutputStreamWriter} import _root_.java.lang.{Long => JLong} import _root_.java.util @@ -79,7 +78,8 @@ class TableEnvironmentITCase(tableEnvName: String) { StreamExecutionEnvironment.getExecutionEnvironment, settings) case _ => throw new UnsupportedOperationException("unsupported tableEnvName: " + tableEnvName) } - tEnv.registerTableSource("MyTable", getPersonCsvTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "MyTable", getPersonCsvTableSource) } @After @@ -161,7 +161,8 @@ class TableEnvironmentITCase(tableEnvName: String) { } val streamEnv = StreamExecutionEnvironment.getExecutionEnvironment val streamTableEnv = StreamTableEnvironment.create(streamEnv, settings) - streamTableEnv.registerTableSource("MyTable", getPersonCsvTableSource) + streamTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "MyTable", getPersonCsvTableSource) val sink1Path = registerCsvTableSink(streamTableEnv, Array("first"), Array(STRING), "MySink1") checkEmptyFile(sink1Path) StreamITCase.clear @@ -198,7 +199,8 @@ class TableEnvironmentITCase(tableEnvName: String) { } val streamEnv = StreamExecutionEnvironment.getExecutionEnvironment val streamTableEnv = StreamTableEnvironment.create(streamEnv, settings) - streamTableEnv.registerTableSource("MyTable", getPersonCsvTableSource) + streamTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "MyTable", getPersonCsvTableSource) val sink1Path = registerCsvTableSink(streamTableEnv, Array("first"), Array(STRING), "MySink1") checkEmptyFile(sink1Path) StreamITCase.clear @@ -233,7 +235,8 @@ class TableEnvironmentITCase(tableEnvName: String) { } val streamEnv = ScalaStreamExecutionEnvironment.getExecutionEnvironment val streamTableEnv = ScalaStreamTableEnvironment.create(streamEnv, settings) - streamTableEnv.registerTableSource("MyTable", getPersonCsvTableSource) + streamTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "MyTable", getPersonCsvTableSource) val sink1Path = registerCsvTableSink(streamTableEnv, Array("first"), Array(STRING), "MySink1") checkEmptyFile(sink1Path) StreamITCase.clear @@ -280,7 +283,7 @@ class TableEnvironmentITCase(tableEnvName: String) { val sinkPath = resultFile.getAbsolutePath val configuredSink = new TestingOverwritableTableSink(sinkPath) .configure(Array("first"), Array(STRING)) - tEnv.registerTableSink("MySink", configuredSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", configuredSink) checkEmptyFile(sinkPath) val tableResult1 = tEnv.executeSql("insert overwrite MySink select first from MyTable") @@ -335,7 +338,8 @@ class TableEnvironmentITCase(tableEnvName: String) { } val streamEnv = StreamExecutionEnvironment.getExecutionEnvironment val streamTableEnv = StreamTableEnvironment.create(streamEnv, settings) - streamTableEnv.registerTableSource("MyTable", getPersonCsvTableSource) + streamTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "MyTable", getPersonCsvTableSource) val sink1Path = registerCsvTableSink(streamTableEnv, Array("first"), Array(STRING), "MySink1") checkEmptyFile(sink1Path) StreamITCase.clear @@ -383,7 +387,7 @@ class TableEnvironmentITCase(tableEnvName: String) { val sinkPath = resultFile.getAbsolutePath val configuredSink = new TestingOverwritableTableSink(sinkPath) .configure(Array("first"), Array(STRING)) - tEnv.registerTableSink("MySink", configuredSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", configuredSink) checkEmptyFile(sinkPath) val tableResult1 = tEnv.sqlQuery("select first from MyTable").executeInsert("MySink", true) @@ -435,13 +439,15 @@ class TableEnvironmentITCase(tableEnvName: String) { val sink1Path = _tempFolder.newFile().getAbsolutePath val configuredSink1 = new TestingOverwritableTableSink(sink1Path) .configure(Array("first"), Array(STRING)) - tEnv.registerTableSink("MySink1", configuredSink1) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("MySink1", configuredSink1) checkEmptyFile(sink1Path) val sink2Path = _tempFolder.newFile().getAbsolutePath val configuredSink2 = new TestingOverwritableTableSink(sink2Path) .configure(Array("last"), Array(STRING)) - tEnv.registerTableSink("MySink2", configuredSink2) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("MySink2", configuredSink2) checkEmptyFile(sink2Path) val stmtSet = tEnv.createStatementSet() @@ -524,7 +530,7 @@ class TableEnvironmentITCase(tableEnvName: String) { val schema = new TableSchema(Array("name", "pt"), Array(Types.STRING, Types.SQL_TIMESTAMP())) val sourceType = Types.STRING val tableSource = new TestTableSourceWithTime(schema, sourceType, data, null, "pt") - tEnv.registerTableSource("T", tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("T", tableSource) val tableResult = tEnv.executeSql("select * from T") assertTrue(tableResult.getJobClient.isPresent) @@ -553,7 +559,7 @@ class TableEnvironmentITCase(tableEnvName: String) { val configuredSink = new CsvTableSink(path, ",", 1, WriteMode.OVERWRITE) .configure(fieldNames, fieldTypes) - tEnv.registerTableSink(tableName, configuredSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(tableName, configuredSink) path } diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableITCase.scala index 7b5f119baeb7d..b0eb4c7adab77 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableITCase.scala @@ -20,19 +20,16 @@ package org.apache.flink.table.api import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment import org.apache.flink.table.api.TableEnvironmentITCase.getPersonCsvTableSource -import org.apache.flink.table.api.internal.TableEnvironmentImpl +import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal} import org.apache.flink.table.api.java.StreamTableEnvironment import org.apache.flink.types.Row - import org.apache.flink.shaded.guava18.com.google.common.collect.Lists - import org.hamcrest.Matchers.containsString import org.junit.Assert.{assertEquals, assertTrue} import org.junit.rules.{ExpectedException, TemporaryFolder} import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{Before, Rule, Test} - import _root_.java.util @RunWith(classOf[Parameterized]) @@ -64,7 +61,8 @@ class TableITCase(tableEnvName: String) { StreamExecutionEnvironment.getExecutionEnvironment, settings) case _ => throw new UnsupportedOperationException("unsupported tableEnvName: " + tableEnvName) } - tEnv.registerTableSource("MyTable", getPersonCsvTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "MyTable", getPersonCsvTableSource) } @Test diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableSourceTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableSourceTest.scala index 48cda7cb79fa3..22bb32542d6c3 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableSourceTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableSourceTest.scala @@ -21,8 +21,10 @@ package org.apache.flink.table.api import _root_.java.util.{HashMap => JHashMap} import _root_.java.util.{Map => JMap} import _root_.java.sql.{Date, Time, Timestamp} + import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, SqlTimeTypeInfo, TypeInformation} import org.apache.flink.api.java.typeutils.RowTypeInfo +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.descriptors.{ConnectorDescriptor, Schema} import org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR @@ -32,7 +34,6 @@ import org.apache.flink.table.sources.{CsvTableSource, TableSource} import org.apache.flink.table.utils.TableTestUtil._ import org.apache.flink.table.utils.{TableTestBase, TestFilterableTableSource} import org.apache.flink.types.Row - import org.junit.{Assert, Test} class TableSourceTest extends TableTestBase { @@ -47,8 +48,10 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource("table1", tableSource1) - tableEnv.registerTableSource("table2", tableSource2) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal("table1", tableSource1) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal("table2", tableSource2) val table1 = tableEnv.scan("table1").where($"amount" > 2) val table2 = tableEnv.scan("table2").where($"amount" > 2) @@ -80,7 +83,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -100,7 +104,8 @@ class TableSourceTest extends TableTestBase { val (tableSource, tableName) = csvTable val util = batchTestUtil() - util.tableEnv.registerTableSource(tableName, tableSource) + util.tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val sqlQuery = s"SELECT `last`, floor(id), score * 2 FROM $tableName" @@ -119,7 +124,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -135,7 +141,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -158,7 +165,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -185,7 +193,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -211,7 +220,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -232,7 +242,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -259,7 +270,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val func = Func0 tableEnv.registerFunction("func0", func) @@ -290,7 +302,8 @@ class TableSourceTest extends TableTestBase { val util = streamTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -310,7 +323,8 @@ class TableSourceTest extends TableTestBase { val (tableSource, tableName) = csvTable val util = streamTestUtil() - util.tableEnv.registerTableSource(tableName, tableSource) + util.tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val sqlQuery = s"SELECT `last`, floor(id), score * 2 FROM $tableName" @@ -329,7 +343,8 @@ class TableSourceTest extends TableTestBase { val util = streamTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -345,7 +360,8 @@ class TableSourceTest extends TableTestBase { val util = streamTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val result = tableEnv .scan(tableName) @@ -468,7 +484,8 @@ class TableSourceTest extends TableTestBase { val util = batchTestUtil() val tableEnv = util.tableEnv - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val sqlQuery = s""" diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala index fc33f8d952476..34e7581857320 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.api.batch import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.scala.internal.BatchTableEnvironmentImpl import org.apache.flink.table.api.{Table, Types} @@ -27,7 +28,6 @@ import org.apache.flink.table.runtime.utils.CommonTestData import org.apache.flink.table.utils.MemoryTableSourceSinkUtil import org.apache.flink.table.utils.TableTestUtil.{batchTableNode, readFromResource, replaceStageId} import org.apache.flink.test.util.MultipleProgramsTestBase - import org.junit.Assert.assertEquals import org.junit._ @@ -136,12 +136,14 @@ class ExplainTest val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env) - tEnv.registerTableSource("sourceTable", CommonTestData.getCsvTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "sourceTable", CommonTestData.getCsvTableSource) val fieldNames = Array("d", "e") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING(), Types.INT()) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) tEnv.sqlUpdate("insert into targetTable select first, id from sourceTable") @@ -155,13 +157,16 @@ class ExplainTest val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env) - tEnv.registerTableSource("sourceTable", CommonTestData.getCsvTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "sourceTable", CommonTestData.getCsvTableSource) val fieldNames = Array("d", "e") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING(), Types.INT()) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable1", sink.configure(fieldNames, fieldTypes)) - tEnv.registerTableSink("targetTable2", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable1", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable2", sink.configure(fieldNames, fieldTypes)) tEnv.sqlUpdate("insert into targetTable1 select first, id from sourceTable") tEnv.sqlUpdate("insert into targetTable2 select last, id from sourceTable") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/sql/validation/InsertIntoValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/sql/validation/InsertIntoValidationTest.scala index ab0703950e52b..60208f99d1b78 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/sql/validation/InsertIntoValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/sql/validation/InsertIntoValidationTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.api.batch.sql.validation import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{Types, ValidationException} import org.apache.flink.table.utils.{MemoryTableSourceSinkUtil, TableTestBase} @@ -35,7 +36,8 @@ class InsertIntoValidationTest extends TableTestBase { val fieldNames = Array("d", "e") val fieldTypes: Array[TypeInformation[_]] = Array(Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - util.tableEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b, c FROM sourceTable" @@ -53,7 +55,8 @@ class InsertIntoValidationTest extends TableTestBase { val fieldNames = Array("d", "e", "f") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - util.tableEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b, c FROM sourceTable" @@ -71,7 +74,8 @@ class InsertIntoValidationTest extends TableTestBase { val fieldNames = Array("d", "e", "f") val fieldTypes = util.tableEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - util.tableEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable (d, f) SELECT a, c FROM sourceTable" @@ -92,8 +96,8 @@ class InsertIntoValidationTest extends TableTestBase { val fieldTypes: Array[TypeInformation[_]] = Array(Types.INT, Types.ROW (Types.INT, Types.INT, Types.INT)) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - util.tableEnv.registerTableSink("targetTable", sink.configure(fieldNames, - fieldTypes)) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b FROM sourceTable" diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/table/validation/InsertIntoValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/table/validation/InsertIntoValidationTest.scala index 210a5982feada..4e39475528d36 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/table/validation/InsertIntoValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/table/validation/InsertIntoValidationTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.api.batch.table.validation import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.{Types, ValidationException} import org.apache.flink.table.api.scala._ import org.apache.flink.table.utils.{MemoryTableSourceSinkUtil, TableTestBase} @@ -35,7 +36,8 @@ class InsertIntoValidationTest extends TableTestBase { val fieldNames = Array("d", "e") val fieldTypes: Array[TypeInformation[_]] = Array(Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - util.tableEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) // must fail because TableSink accepts fewer fields. util.tableEnv.scan("sourceTable") @@ -53,7 +55,8 @@ class InsertIntoValidationTest extends TableTestBase { val fieldNames = Array("d", "e", "f") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - util.tableEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) // must fail because types of result and TableSink do not match. util.tableEnv.scan("sourceTable") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala index 14ba09b04c79b..6f99d9abc4dac 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala @@ -21,13 +21,13 @@ package org.apache.flink.table.api.stream import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, Table, Types} import org.apache.flink.table.runtime.utils.CommonTestData import org.apache.flink.table.utils.MemoryTableSourceSinkUtil import org.apache.flink.table.utils.TableTestUtil.{readFromResource, replaceStageId, streamTableNode} import org.apache.flink.test.util.AbstractTestBase - import org.junit.Assert.assertEquals import org.junit._ @@ -72,12 +72,14 @@ class ExplainTest extends AbstractTestBase { val settings = EnvironmentSettings.newInstance().useOldPlanner().build() val tEnv = StreamTableEnvironment.create(env, settings) - tEnv.registerTableSource("sourceTable", CommonTestData.getCsvTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "sourceTable", CommonTestData.getCsvTableSource) val fieldNames = Array("d", "e") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING(), Types.INT()) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) tEnv.sqlUpdate("INSERT INTO targetTable SELECT first, id FROM sourceTable") @@ -92,13 +94,16 @@ class ExplainTest extends AbstractTestBase { val settings = EnvironmentSettings.newInstance().useOldPlanner().build() val tEnv = StreamTableEnvironment.create(env, settings) - tEnv.registerTableSource("sourceTable", CommonTestData.getCsvTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "sourceTable", CommonTestData.getCsvTableSource) val fieldNames = Array("d", "e") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING(), Types.INT()) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable1", sink.configure(fieldNames, fieldTypes)) - tEnv.registerTableSink("targetTable2", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable1", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable2", sink.configure(fieldNames, fieldTypes)) tEnv.sqlUpdate("INSERT INTO targetTable1 SELECT first, id FROM sourceTable") tEnv.sqlUpdate("INSERT INTO targetTable2 SELECT last, id FROM sourceTable") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/sql/validation/InsertIntoValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/sql/validation/InsertIntoValidationTest.scala index 0f7cf80cb18bc..293b9fb2b62ac 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/sql/validation/InsertIntoValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/sql/validation/InsertIntoValidationTest.scala @@ -20,11 +20,11 @@ package org.apache.flink.table.api.stream.sql.validation import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, Types, ValidationException} import org.apache.flink.table.runtime.utils.StreamTestData import org.apache.flink.table.utils.MemoryTableSourceSinkUtil - import org.junit.Test class InsertIntoValidationTest { @@ -41,7 +41,8 @@ class InsertIntoValidationTest { val fieldNames = Array("d", "e") val fieldTypes: Array[TypeInformation[_]] = Array(Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b, c FROM sourceTable" @@ -59,7 +60,8 @@ class InsertIntoValidationTest { val fieldNames = Array("d", "e", "f") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b, c FROM sourceTable" @@ -77,7 +79,8 @@ class InsertIntoValidationTest { val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable (d, f) SELECT a, c FROM sourceTable" diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/TableSourceTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/TableSourceTest.scala index 3bf4128fabc51..8ea3b6703dd89 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/TableSourceTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/TableSourceTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.api.stream.table import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.RowTypeInfo +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.{Over, TableSchema, Tumble, Types} import org.apache.flink.table.api.scala._ import org.apache.flink.table.utils.TableTestUtil._ @@ -41,7 +42,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rowtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -65,7 +66,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rowtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -89,7 +90,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rowtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "rowTimeT", new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), rowtime = "rowtime")) @@ -132,7 +133,7 @@ class TableSourceTest extends TableTestBase { Array("id", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "procTimeT", new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), proctime = "proctime")) @@ -160,7 +161,7 @@ class TableSourceTest extends TableTestBase { Array("id", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "procTimeT", new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), proctime = "proctime")) @@ -199,7 +200,7 @@ class TableSourceTest extends TableTestBase { Array("id", "name", "val", "rtime")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -222,7 +223,7 @@ class TableSourceTest extends TableTestBase { Array("id", "name", "val", "rtime")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -248,7 +249,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -270,7 +271,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -292,7 +293,7 @@ class TableSourceTest extends TableTestBase { Array("id", "rtime", "val", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -316,7 +317,7 @@ class TableSourceTest extends TableTestBase { val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name") val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime", mapping)) @@ -355,7 +356,7 @@ class TableSourceTest extends TableTestBase { Array("id", "deepNested", "nested", "name")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(tableSchema, returnType, Seq())) diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/InsertIntoValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/InsertIntoValidationTest.scala index 0b5117711da2a..75f708364e76c 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/InsertIntoValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/InsertIntoValidationTest.scala @@ -20,11 +20,11 @@ package org.apache.flink.table.api.stream.table.validation import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, Types, ValidationException} import org.apache.flink.table.runtime.utils.StreamTestData import org.apache.flink.table.utils.MemoryTableSourceSinkUtil - import org.junit.Test class InsertIntoValidationTest { @@ -41,7 +41,8 @@ class InsertIntoValidationTest { val fieldNames = Array("d", "f") val fieldTypes: Array[TypeInformation[_]] = Array(Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) // must fail because table sink has too few fields. tEnv.scan("sourceTable") @@ -64,7 +65,8 @@ class InsertIntoValidationTest { val fieldNames = Array("d", "e", "f") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) // must fail because field types of table sink are incompatible. tEnv.scan("sourceTable") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSinkValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSinkValidationTest.scala index 8b71b973d8708..5d1355eb320c6 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSinkValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSinkValidationTest.scala @@ -21,6 +21,7 @@ package org.apache.flink.table.api.stream.table.validation import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.table.api.TableException +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.runtime.stream.table.{TestAppendSink, TestUpsertSink} import org.apache.flink.table.runtime.utils.StreamTestData @@ -35,7 +36,8 @@ class TableSinkValidationTest extends TableTestBase { val tEnv = StreamTableEnvironment.create(env) val t = StreamTestData.get3TupleDataStream(env).toTable(tEnv, 'id, 'num, 'text) - tEnv.registerTableSink("testSink", new TestAppendSink) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("testSink", new TestAppendSink) t.groupBy('text) .select('text, 'id.count, 'num.sum) @@ -54,7 +56,8 @@ class TableSinkValidationTest extends TableTestBase { val t = StreamTestData.get3TupleDataStream(env) .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text) - tEnv.registerTableSink("testSink", new TestUpsertSink(Array("len", "cTrue"), false)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "testSink", new TestUpsertSink(Array("len", "cTrue"), false)) t.select('id, 'num, 'text.charLength() as 'len, ('id > 0) as 'cTrue) .groupBy('len, 'cTrue) @@ -72,7 +75,8 @@ class TableSinkValidationTest extends TableTestBase { val ds1 = StreamTestData.get3TupleDataStream(env).toTable(tEnv, 'a, 'b, 'c) val ds2 = StreamTestData.get5TupleDataStream(env).toTable(tEnv, 'd, 'e, 'f, 'g, 'h) - tEnv.registerTableSink("testSink", new TestAppendSink) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("testSink", new TestAppendSink) ds1.leftOuterJoin(ds2, 'a === 'd && 'b === 'h) .select('c, 'g) diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSourceValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSourceValidationTest.scala index e60c55f1229a2..31baa1d6c0655 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSourceValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TableSourceValidationTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.api.stream.table.validation import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.RowTypeInfo +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{TableException, TableSchema, Types} import org.apache.flink.table.utils.{TableTestBase, TestFilterableTableSourceWithoutExplainSourceOverride, TestProjectableTableSourceWithoutExplainSourceOverride} @@ -41,7 +42,7 @@ class TableSourceValidationTest extends TableTestBase { Array("id", "name", "val", "rtime")) val util = streamTestUtil() - util.tableEnv.registerTableSource( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSourceWithoutExplainSourceOverride( tableSchema, returnType, Seq(), "rtime", "ptime")) @@ -59,7 +60,8 @@ class TableSourceValidationTest extends TableTestBase { val tableSource = TestFilterableTableSourceWithoutExplainSourceOverride() val util = batchTestUtil() - util.tableEnv.registerTableSource("T", tableSource) + util.tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal("T", tableSource) val t = util.tableEnv .scan("T") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSinksValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSinksValidationTest.scala index 864e7e1b424cf..e72811e0bdb2a 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSinksValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSinksValidationTest.scala @@ -20,6 +20,7 @@ package org.apache.flink.table.api.validation import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{TableException, Types, ValidationException} import org.apache.flink.table.runtime.stream.table.TestAppendSink @@ -34,7 +35,8 @@ class TableSinksValidationTest extends TableTestBase { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]("MyTable", 'id, 'num, 'text) - util.tableEnv.registerTableSink("testSink", new TestAppendSink) + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "testSink", new TestAppendSink) t.groupBy('text) .select('text, 'id.count, 'num.sum) @@ -50,7 +52,7 @@ class TableSinksValidationTest extends TableTestBase { val fieldNames = Array("a", "b", "c") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG) // table name already registered - util.tableEnv.registerTableSink( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "TargetTable", new UnsafeMemoryAppendTableSink().configure(fieldNames, fieldTypes)) } @@ -63,7 +65,7 @@ class TableSinksValidationTest extends TableTestBase { val fieldNames = Array("a", "b", "c") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.LONG) - util.tableEnv.registerTableSink( + util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "TargetTable", new UnsafeMemoryAppendTableSink().configure(fieldNames, fieldTypes)) } diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSourceValidationTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSourceValidationTest.scala index 5b96e73dc650d..b7a816a22c07d 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSourceValidationTest.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/TableSourceValidationTest.scala @@ -29,12 +29,12 @@ import org.apache.flink.table.sources.tsextractors.ExistingField import org.apache.flink.table.sources.wmstrategies.AscendingTimestamps import org.apache.flink.table.utils.{TableTestBase, TestTableSourceWithTime} import org.apache.flink.types.Row - import org.junit.Test - import java.util import java.util.Collections +import org.apache.flink.table.api.internal.TableEnvironmentInternal + class TableSourceValidationTest extends TableTestBase{ val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment @@ -53,7 +53,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row]()) // should fail because schema field "value" cannot be resolved in result type - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -67,7 +67,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row]()) // should fail because types of "name" fields are different - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -80,7 +80,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row](), mapping = mapping) // should fail because mapping maps field "id" to unknown field - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -93,7 +93,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row](), mapping = mapping) // should fail because mapping maps fields with different types - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -107,7 +107,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row](), proctime = "ptime") // should fail because processing time field has invalid type - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test @@ -129,7 +129,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row](), rowtime = "rowtime") // should fail because rowtime field does not exist in the TableSchema - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test @@ -151,7 +151,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row](), proctime = "proctime") // should fail because proctime field does not exist in the TableSchema - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -166,7 +166,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row](), rowtime = "rtime") // should fail because rowtime field has invalid type - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -182,7 +182,7 @@ class TableSourceValidationTest extends TableTestBase{ new TestTableSourceWithTime(schema, rowType, Seq[Row](), rowtime = "time", proctime = "time") // should fail because rowtime field has invalid type - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -206,7 +206,7 @@ class TableSourceValidationTest extends TableTestBase{ } // should fail because timestamp extractor argument field does not exist - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } @Test(expected = classOf[ValidationException]) @@ -221,7 +221,7 @@ class TableSourceValidationTest extends TableTestBase{ val ts = new TestTableSourceWithTime(schema, rowType, Seq[Row](), rowtime = "amount") // should fail because configured rowtime field is not of type Long or Timestamp - tEnv.registerTableSource("testTable", ts) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("testTable", ts) } // CsvTableSource Tests diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala index 9032ed135eb89..409d23ad46abb 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/PartitionableSinkITCase.scala @@ -31,6 +31,7 @@ import org.apache.flink.api.java.operators.DataSink import org.apache.flink.api.java.typeutils.RowTypeInfo import org.apache.flink.api.scala.ExecutionEnvironment import org.apache.flink.configuration.Configuration +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala.BatchTableEnvironment import org.apache.flink.table.api.{DataTypes, TableSchema} import org.apache.flink.table.factories.utils.TestCollectionTableFactory.TestCollectionInputFormat @@ -40,7 +41,6 @@ import org.apache.flink.table.sources.BatchTableSource import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} import org.apache.flink.test.util.AbstractTestBase import org.apache.flink.types.Row - import org.junit.Assert.assertEquals import org.junit.rules.ExpectedException import org.junit.{Before, Rule, Test} @@ -75,7 +75,8 @@ class PartitionableSinkITCase extends AbstractTestBase { .field("b", DataTypes.BIGINT()) .field("c", DataTypes.STRING()) .build() - tEnv.registerTableSource(name, new CollectionTableSource(data, 100, tableSchema)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + name, new CollectionTableSource(data, 100, tableSchema)) } @Test @@ -121,7 +122,7 @@ class PartitionableSinkITCase extends AbstractTestBase { rowType: RowTypeInfo = type3, partitionColumns: Array[String] = Array[String]("a")): TestSink = { val testSink = new TestSink(rowType, partitionColumns) - tEnv.registerTableSink(tableName, testSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(tableName, testSink) testSink } diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableEnvironmentITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableEnvironmentITCase.scala index bfbd7f4d50b9f..4964a6fbe85ee 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableEnvironmentITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableEnvironmentITCase.scala @@ -34,19 +34,18 @@ import org.apache.flink.table.utils.{MemoryTableSourceSinkUtil, TestingOverwrita import org.apache.flink.test.util.TestBaseUtils import org.apache.flink.types.Row import org.apache.flink.util.FileUtils - import org.apache.flink.shaded.guava18.com.google.common.collect.Lists - import org.junit.Assert.{assertEquals, assertFalse, assertTrue, fail} import org.junit._ import org.junit.rules.TemporaryFolder import org.junit.runner.RunWith import org.junit.runners.Parameterized - import java.io.File import java.lang.{Long => JLong} import java.util +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.JavaConverters._ import scala.io.Source @@ -149,7 +148,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b, c FROM sourceTable" tEnv.sqlUpdate(sql) @@ -171,7 +171,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b, c FROM sourceTable" tEnv.sqlUpdate(sql) @@ -211,12 +212,14 @@ class TableEnvironmentITCase( val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env) MemoryTableSourceSinkUtil.clear() - tEnv.registerTableSource("sourceTable", TableEnvironmentITCase.getPersonCsvTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "sourceTable", TableEnvironmentITCase.getPersonCsvTableSource) val fieldNames = Array("d", "e", "f", "g") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT * FROM sourceTable where id > 7" tEnv.sqlUpdate(sql) @@ -254,7 +257,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val result = tEnv.sqlQuery("SELECT c, b, a FROM sourceTable").select('a.avg, 'b.sum, 'c.count) val resultFile = _tempFolder.newFile().getAbsolutePath @@ -288,7 +292,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink1 = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink1.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink1.configure(fieldNames, fieldTypes)) val tableResult = tEnv.executeSql("INSERT INTO targetTable SELECT a, b, c FROM sourceTable") checkInsertTableResult(tableResult, "default_catalog.default_database.targetTable") @@ -312,7 +317,7 @@ class TableEnvironmentITCase( val sinkPath = resultFile.getAbsolutePath val configuredSink = new TestingOverwritableTableSink(sinkPath) .configure(Array("d"), Array(STRING)) - tEnv.registerTableSink("MySink", configuredSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", configuredSink) val tableResult1 = tEnv.executeSql("INSERT overwrite MySink SELECT c FROM sourceTable") checkInsertTableResult(tableResult1, "default_catalog.default_database.MySink") @@ -347,7 +352,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink1 = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink1.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink1.configure(fieldNames, fieldTypes)) val sink1Path = registerCsvTableSink(tEnv, fieldNames, fieldTypes, "MySink1") assertTrue(FileUtils.readFileUtf8(new File(sink1Path)).isEmpty) @@ -384,7 +390,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val result = tEnv.sqlQuery("SELECT c, b, a FROM sourceTable").select('a.avg, 'b.sum, 'c.count) val resultFile = _tempFolder.newFile().getAbsolutePath @@ -422,7 +429,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink1 = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink1.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink1.configure(fieldNames, fieldTypes)) val table = tEnv.sqlQuery("SELECT a, b, c FROM sourceTable") val tableResult = table.executeInsert("targetTable") @@ -447,7 +455,7 @@ class TableEnvironmentITCase( val sinkPath = resultFile.getAbsolutePath val configuredSink = new TestingOverwritableTableSink(sinkPath) .configure(Array("d"), Array(STRING)) - tEnv.registerTableSink("MySink", configuredSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", configuredSink) val tableResult1 = tEnv.sqlQuery("SELECT c FROM sourceTable").executeInsert("MySink", true) checkInsertTableResult(tableResult1, "default_catalog.default_database.MySink") @@ -482,13 +490,15 @@ class TableEnvironmentITCase( val sink1Path = _tempFolder.newFile().getAbsolutePath val configuredSink1 = new TestingOverwritableTableSink(sink1Path) .configure(Array("d", "e", "f"), Array(INT, LONG, STRING)) - tEnv.registerTableSink("MySink1", configuredSink1) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("MySink1", configuredSink1) assertTrue(FileUtils.readFileUtf8(new File(sink1Path)).isEmpty) val sink2Path = _tempFolder.newFile().getAbsolutePath val configuredSink2 = new TestingOverwritableTableSink(sink2Path) .configure(Array("i", "j", "k"), Array(INT, LONG, STRING)) - tEnv.registerTableSink("MySink2", configuredSink2) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal("MySink2", configuredSink2) assertTrue(FileUtils.readFileUtf8(new File(sink2Path)).isEmpty) val stmtSet = tEnv.createStatementSet() @@ -531,7 +541,7 @@ class TableEnvironmentITCase( val sinkPath = _tempFolder.newFile().getAbsolutePath val configuredSink = new TestingOverwritableTableSink(sinkPath) .configure(Array("d", "e", "f"), Array(INT, LONG, STRING)) - tEnv.registerTableSink("MySink", configuredSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("MySink", configuredSink) assertTrue(FileUtils.readFileUtf8(new File(sinkPath)).isEmpty) val stmtSet = tEnv.createStatementSet() @@ -589,7 +599,8 @@ class TableEnvironmentITCase( val configuredSink = new CsvTableSink(path, ",", 1, WriteMode.OVERWRITE) .configure(fieldNames, fieldTypes) - tEnv.registerTableSink(tableName, configuredSink) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSinkInternal(tableName, configuredSink) path } diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableSourceITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableSourceITCase.scala index 2e6821be2c94f..b66013c4432d0 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableSourceITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableSourceITCase.scala @@ -19,6 +19,7 @@ package org.apache.flink.table.runtime.batch.sql import org.apache.flink.api.scala.ExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.runtime.utils.{CommonTestData, TableProgramsCollectionTestBase} import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode @@ -42,7 +43,7 @@ class TableSourceITCase( val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) - tEnv.registerTableSource("csvTable", csvTable) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("csvTable", csvTable) val results = tEnv.sqlQuery( "SELECT id, `first`, `last`, score FROM csvTable").collect() @@ -66,7 +67,7 @@ class TableSourceITCase( val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) - tEnv.registerTableSource("csvTable", csvTable) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("csvTable", csvTable) val results = tEnv.sqlQuery( "SELECT id, `first`, `last`, score FROM csvTable").collect() @@ -83,7 +84,8 @@ class TableSourceITCase( val tableEnv = BatchTableEnvironment.create(env, config) val nestedTable = CommonTestData.getNestedTableSource - tableEnv.registerTableSource("NestedPersons", nestedTable) + tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "NestedPersons", nestedTable) val result = tableEnv.sqlQuery("SELECT NestedPersons.firstName, NestedPersons.lastName," + "NestedPersons.address.street, NestedPersons.address.city AS city " + diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableEnvironmentITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableEnvironmentITCase.scala index 73aee33b15402..f7861cc1ace38 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableEnvironmentITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableEnvironmentITCase.scala @@ -22,6 +22,7 @@ import java.util import org.apache.flink.api.scala._ import org.apache.flink.api.scala.util.CollectionDataSets +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode import org.apache.flink.table.runtime.utils.{TableProgramsCollectionTestBase, TableProgramsTestBase} @@ -193,7 +194,8 @@ class TableEnvironmentITCase( val fieldNames = Array("d", "e", "f") val fieldTypes = tEnv.scan("sourceTable").getSchema.getFieldTypes val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) tEnv.scan("sourceTable") .select('a, 'b, 'c) diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSinkITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSinkITCase.scala index 8043e80abd6f7..f198799d16d3b 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSinkITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSinkITCase.scala @@ -19,10 +19,12 @@ package org.apache.flink.table.runtime.batch.table import java.io.File + import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala.util.CollectionDataSets import org.apache.flink.api.scala.{ExecutionEnvironment, _} import org.apache.flink.table.api.Types +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.runtime.utils.TableProgramsCollectionTestBase import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode @@ -52,7 +54,7 @@ class TableSinkITCase( val tEnv = BatchTableEnvironment.create(env, config) env.setParallelism(4) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "testSink", new CsvTableSink(path, "|").configure( Array[String]("c", "b"), Array[TypeInformation[_]](Types.STRING, Types.LONG))) @@ -83,7 +85,8 @@ class TableSinkITCase( val fieldNames = Array("c", "b") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.LONG) val sink = new UnsafeMemoryOutputFormatTableSink - tEnv.registerTableSink("testSink", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "testSink", sink.configure(fieldNames, fieldTypes)) val input = CollectionDataSets.get3TupleDataSet(env) .map(x => x).setParallelism(4) // increase DOP to 4 diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSourceITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSourceITCase.scala index 6ce14fc4d991c..c43900bfebec8 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSourceITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSourceITCase.scala @@ -26,6 +26,7 @@ import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, SqlTimeTypeInfo, Typ import org.apache.flink.api.java.typeutils.{GenericTypeInfo, RowTypeInfo} import org.apache.flink.api.java.{DataSet, ExecutionEnvironment => JExecEnv} import org.apache.flink.api.scala.ExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{TableException, TableSchema, Tumble, Types} import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode @@ -63,7 +64,7 @@ class TableSourceITCase( override def getReturnType: TypeInformation[Row] = new RowTypeInfo(fieldTypes, fieldNames) override def getTableSchema: TableSchema = new TableSchema(fieldNames, fieldTypes) } - tEnv.registerTableSource("T", tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("T", tableSource) tEnv.scan("T") .select('value, 'name) @@ -91,7 +92,7 @@ class TableSourceITCase( fieldNames) val tableSource = new TestInputFormatTableSource(schema, rowType, data) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .groupBy('name) @@ -112,7 +113,7 @@ class TableSourceITCase( val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) - tEnv.registerTableSource("csvTable", csvTable) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("csvTable", csvTable) val results = tEnv .scan("csvTable") @@ -134,7 +135,8 @@ class TableSourceITCase( val tableName = "MyTable" val env = ExecutionEnvironment.getExecutionEnvironment val tableEnv = BatchTableEnvironment.create(env, config) - tableEnv.registerTableSource(tableName, TestFilterableTableSource()) + tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + tableName, TestFilterableTableSource()) val results = tableEnv .scan(tableName) .where($"amount" > 4 && $"price" < 9) @@ -165,7 +167,7 @@ class TableSourceITCase( fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -201,7 +203,7 @@ class TableSourceITCase( fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, null, "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .where('ptime.cast(Types.LONG) > 0L) @@ -237,7 +239,7 @@ class TableSourceITCase( fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -271,7 +273,7 @@ class TableSourceITCase( fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -308,7 +310,8 @@ class TableSourceITCase( |where date_val >= DATE '2017-04-24' and date_val < DATE '2017-04-26' """.stripMargin val tableSource = TestFilterableTableSource(rowTypeInfo, rows, Set("date_val")) - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val results = tableEnv .sqlQuery(query) .collect() @@ -329,7 +332,7 @@ class TableSourceITCase( val returnType = Types.LONG val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -356,7 +359,7 @@ class TableSourceITCase( val returnType = Types.STRING val tableSource = new TestTableSourceWithTime(schema, returnType, data, null, "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .where('ptime.cast(Types.LONG) > 1) @@ -381,7 +384,7 @@ class TableSourceITCase( val returnType = Types.LONG val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) val results = tEnv.scan(tableName) .where('ptime.cast(Types.LONG) > 1) @@ -416,7 +419,7 @@ class TableSourceITCase( val mapping = Map("amount" -> "f2", "name" -> "f0", "rtime" -> "f1") val source = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime", mapping) - tEnv.registerTableSource(tableName, source) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, source) val results = tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -450,7 +453,7 @@ class TableSourceITCase( .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -485,7 +488,7 @@ class TableSourceITCase( .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -520,7 +523,7 @@ class TableSourceITCase( .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -556,7 +559,7 @@ class TableSourceITCase( .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -588,7 +591,7 @@ class TableSourceITCase( .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -624,7 +627,7 @@ class TableSourceITCase( Array("p-rtime", "p-id", "p-name", "p-val")) val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name") - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime", mapping)) @@ -689,7 +692,7 @@ class TableSourceITCase( Array(Types.LONG, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "deepNested", "nested", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(tableSchema, returnType, data)) @@ -733,7 +736,8 @@ class TableSourceITCase( |where time_val >= TIME '11:45:00' and time_val < TIME '12:14:23' """.stripMargin val tableSource = TestFilterableTableSource(rowTypeInfo, rows, Set("time_val")) - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val results = tableEnv .sqlQuery(query) .collect() @@ -766,7 +770,8 @@ class TableSourceITCase( |where ts >= TIMESTAMP '2017-07-12 11:45:00' and ts < TIMESTAMP '2017-07-14 12:14:23' """.stripMargin val tableSource = TestFilterableTableSource(rowTypeInfo, rows, Set("ts")) - tableEnv.registerTableSource(tableName, tableSource) + tableEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal(tableName, tableSource) val results = tableEnv .sqlQuery(query) .collect() diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/TimeAttributesITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/TimeAttributesITCase.scala index b61274ce066a3..07871f6fd60e4 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/TimeAttributesITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/TimeAttributesITCase.scala @@ -35,14 +35,14 @@ import org.apache.flink.table.runtime.utils.StreamITCase import org.apache.flink.table.utils.{MemoryTableSourceSinkUtil, TestTableSourceWithTime} import org.apache.flink.test.util.AbstractTestBase import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit.{Before, Test} - import java.lang.{Integer => JInt, Long => JLong} import java.math.BigDecimal import java.sql.Timestamp +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.JavaConverters._ import scala.collection.mutable @@ -176,7 +176,7 @@ class TimeAttributesITCase extends AbstractTestBase { def testTableSink(): Unit = { MemoryTableSourceSinkUtil.clear() - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "testSink", (new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink).configure( Array[String]("rowtime", "floorDay", "ceilDay"), @@ -549,7 +549,8 @@ class TimeAttributesITCase extends AbstractTestBase { fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, rows, "rowtime", "proctime") - tEnv.registerTableSource("testTable", tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal] + .registerTableSourceInternal("testTable", tableSource) val result = tEnv .scan("testTable") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/InsertIntoITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/InsertIntoITCase.scala index 828a25e82bb3e..a820f9207a74d 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/InsertIntoITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/InsertIntoITCase.scala @@ -21,13 +21,13 @@ package org.apache.flink.table.runtime.stream.sql import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, Types} import org.apache.flink.table.runtime.stream.table.{RowCollector, TestRetractSink, TestUpsertSink} import org.apache.flink.table.runtime.utils.{StreamTestData, StreamingWithStateTestBase} import org.apache.flink.table.utils.MemoryTableSourceSinkUtil import org.apache.flink.test.util.TestBaseUtils - import org.junit.Assert._ import org.junit.Test @@ -54,7 +54,8 @@ class InsertIntoITCase extends StreamingWithStateTestBase { val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.SQL_TIMESTAMP, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) tEnv.sqlUpdate( s"""INSERT INTO targetTable @@ -80,7 +81,7 @@ class InsertIntoITCase extends StreamingWithStateTestBase { .assignAscendingTimestamps(_._1.toLong) tEnv.createTemporaryView("sourceTable", t, 'id, 'num, 'text) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "targetTable", new TestRetractSink().configure( Array("len", "cntid", "sumnum"), @@ -115,7 +116,7 @@ class InsertIntoITCase extends StreamingWithStateTestBase { .assignAscendingTimestamps(_._1.toLong) tEnv.createTemporaryView("sourceTable", t, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "targetTable", new TestRetractSink().configure( Array("wend", "cntid", "sumnum"), @@ -158,7 +159,7 @@ class InsertIntoITCase extends StreamingWithStateTestBase { .assignAscendingTimestamps(_._1.toLong) tEnv.createTemporaryView("sourceTable", t, 'id, 'num, 'text) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "targetTable", new TestUpsertSink(Array("cnt", "cTrue"), false).configure( Array("cnt", "cntid", "cTrue"), @@ -200,7 +201,7 @@ class InsertIntoITCase extends StreamingWithStateTestBase { .assignAscendingTimestamps(_._1.toLong) tEnv.createTemporaryView("sourceTable", t, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "targetTable", new TestUpsertSink(Array("wend", "num"), true).configure( Array("num", "wend", "cntid"), @@ -246,7 +247,7 @@ class InsertIntoITCase extends StreamingWithStateTestBase { .assignAscendingTimestamps(_._1.toLong) tEnv.createTemporaryView("sourceTable", t, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "targetTable", new TestUpsertSink(Array("wstart", "wend", "num"), true).configure( Array("wstart", "wend", "num", "cntid"), @@ -293,7 +294,7 @@ class InsertIntoITCase extends StreamingWithStateTestBase { .assignAscendingTimestamps(_._1.toLong) tEnv.createTemporaryView("sourceTable", t, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "targetTable", new TestUpsertSink(null, true).configure( Array("wend", "cntid"), @@ -338,7 +339,7 @@ class InsertIntoITCase extends StreamingWithStateTestBase { .assignAscendingTimestamps(_._1.toLong) tEnv.createTemporaryView("sourceTable", t, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "targetTable", new TestUpsertSink(null, true).configure( Array("num", "cntid"), diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SortITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SortITCase.scala index 24c226bd113c5..1e038b689371f 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SortITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SortITCase.scala @@ -23,6 +23,7 @@ import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.functions.sink.RichSinkFunction import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.{EnvironmentSettings, Types} import org.apache.flink.table.api.scala._ import org.apache.flink.table.runtime.utils.TimeTestUtil.EventTimeSourceFunction @@ -30,7 +31,6 @@ import org.apache.flink.table.runtime.stream.sql.SortITCase.StringRowSelectorSin import org.apache.flink.table.runtime.utils.{StreamITCase, StreamTestData, StreamingWithStateTestBase} import org.apache.flink.table.utils.MemoryTableSourceSinkUtil import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit._ @@ -127,7 +127,8 @@ class SortITCase extends StreamingWithStateTestBase { val fieldTypes = Array(Types.INT, Types.LONG, Types.STRING, Types.SQL_TIMESTAMP) .asInstanceOf[Array[TypeInformation[_]]] val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) val sql = "INSERT INTO targetTable SELECT a, b, c, rowtime " + "FROM sourceTable ORDER BY rowtime, a desc" diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SqlITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SqlITCase.scala index fcc4ed94be586..318e528601ba9 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SqlITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/SqlITCase.scala @@ -25,6 +25,7 @@ import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.streaming.api.watermark.Watermark +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, Types} import org.apache.flink.table.descriptors.{Rowtime, Schema} @@ -35,7 +36,6 @@ import org.apache.flink.table.runtime.utils.TimeTestUtil.EventTimeSourceFunction import org.apache.flink.table.runtime.utils.{JavaUserDefinedTableFunctions, StreamITCase, StreamTestData, StreamingWithStateTestBase} import org.apache.flink.table.utils.{InMemoryTableFactory, MemoryTableSourceSinkUtil} import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit._ @@ -769,9 +769,9 @@ class SqlITCase extends StreamingWithStateTestBase { .toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime) tEnv.registerTable("sourceTable", t) - tEnv.registerTableSource("targetTable", + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("targetTable", new InMemoryTableFactory(3).createStreamTableSource(properties)) - tEnv.registerTableSink("targetTable", + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("targetTable", new InMemoryTableFactory(3).createStreamTableSink(properties)) tEnv.sqlUpdate("INSERT INTO targetTable SELECT a, b, c, rowtime FROM sourceTable") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/TableSourceITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/TableSourceITCase.scala index 07fc66073e477..63eff0311942b 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/TableSourceITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/sql/TableSourceITCase.scala @@ -21,11 +21,11 @@ package org.apache.flink.table.runtime.stream.sql import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.table.api.EnvironmentSettings +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.runtime.utils.{CommonTestData, StreamITCase} import org.apache.flink.test.util.AbstractTestBase import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit.Test @@ -41,7 +41,7 @@ class TableSourceITCase extends AbstractTestBase { val tEnv = StreamTableEnvironment.create(env, settings) StreamITCase.testResults = mutable.MutableList() - tEnv.registerTableSource("persons", csvTable) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("persons", csvTable) tEnv.sqlQuery( "SELECT id, `first`, `last`, score FROM persons WHERE id < 4 ") diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/AggregateITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/AggregateITCase.scala index 28976797d8145..1abd87f188543 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/AggregateITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/AggregateITCase.scala @@ -21,6 +21,7 @@ package org.apache.flink.table.runtime.stream.table import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, Types} import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.{CountDistinct, DataViewTestAgg, WeightedAvg} @@ -28,7 +29,6 @@ import org.apache.flink.table.runtime.utils.StreamITCase.RetractingSink import org.apache.flink.table.runtime.utils.{JavaUserDefinedAggFunctions, StreamITCase, StreamTestData, StreamingWithStateTestBase} import org.apache.flink.table.utils.CountMinMax import org.apache.flink.types.Row - import org.junit.Assert.assertEquals import org.junit.{Before, Test} @@ -269,7 +269,7 @@ class AggregateITCase extends StreamingWithStateTestBase { data.+=((4, 3L, "C")) data.+=((5, 3L, "C")) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "testSink", new TestUpsertSink(Array("c"), false).configure( Array[String]("c", "bMax"), Array[TypeInformation[_]](Types.STRING, Types.LONG))) diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/JoinITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/JoinITCase.scala index bad08839f86e9..b5d2abac56bd4 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/JoinITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/JoinITCase.scala @@ -21,6 +21,7 @@ package org.apache.flink.table.runtime.stream.table import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, Tumble, Types} import org.apache.flink.table.expressions.utils.Func20 @@ -28,7 +29,6 @@ import org.apache.flink.table.functions.aggfunctions.CountAggFunction import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.{CountDistinct, WeightedAvg} import org.apache.flink.table.runtime.utils.{StreamITCase, StreamTestData, StreamingWithStateTestBase} import org.apache.flink.types.Row - import org.junit.Assert._ import org.junit.{Before, Test} @@ -83,7 +83,7 @@ class JoinITCase extends StreamingWithStateTestBase { val leftTable = env.fromCollection(data1).toTable(tEnv, 'a, 'b) val rightTable = env.fromCollection(data2).toTable(tEnv, 'bb, 'c) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", new TestUpsertSink(Array("a,b"), false).configure( Array[String]("a", "b", "c"), @@ -142,7 +142,7 @@ class JoinITCase extends StreamingWithStateTestBase { val leftTable = env.fromCollection(data1).toTable(tEnv, 'a, 'b) val rightTable = env.fromCollection(data2).toTable(tEnv, 'bb, 'c, 'd) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "retractSink", new TestRetractSink().configure( Array[String]("a", "b", "c", "d"), diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSinkITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSinkITCase.scala index 9971a0f9d290e..2ecc4ad574664 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSinkITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSinkITCase.scala @@ -36,13 +36,13 @@ import org.apache.flink.table.utils.MemoryTableSourceSinkUtil import org.apache.flink.test.util.{AbstractTestBase, TestBaseUtils} import org.apache.flink.types.Row import org.apache.flink.util.Collector - import org.junit.Assert._ import org.junit.{Before, Test} - import java.io.File import java.lang.{Boolean => JBool} +import org.apache.flink.table.api.internal.TableEnvironmentInternal + import scala.collection.JavaConverters._ import scala.collection.mutable @@ -68,7 +68,8 @@ class TableSinkITCase extends AbstractTestBase { val fieldNames = Array("d", "e", "t") val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.SQL_TIMESTAMP, Types.LONG) val sink = new MemoryTableSourceSinkUtil.UnsafeMemoryAppendTableSink - tEnv.registerTableSink("targetTable", sink.configure(fieldNames, fieldTypes)) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "targetTable", sink.configure(fieldNames, fieldTypes)) input.toTable(tEnv, 'a, 'b, 'c, 't.rowtime) .where('a < 3 || 'a > 19) @@ -99,7 +100,7 @@ class TableSinkITCase extends AbstractTestBase { val settings = EnvironmentSettings.newInstance().useOldPlanner().build() val tEnv = StreamTableEnvironment.create(env, settings) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "csvSink", new CsvTableSink(path).configure( Array[String]("nullableCol", "c", "b"), @@ -136,7 +137,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "appendSink", new TestAppendSink().configure( Array[String]("t", "icnt", "nsum"), @@ -165,7 +166,7 @@ class TableSinkITCase extends AbstractTestBase { val ds1 = StreamTestData.getSmall3TupleDataStream(env).toTable(tEnv, 'a, 'b, 'c) val ds2 = StreamTestData.get5TupleDataStream(env).toTable(tEnv, 'd, 'e, 'f, 'g, 'h) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "appendSink", new TestAppendSink().configure( Array[String]("c", "g"), @@ -188,7 +189,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "retractSink", new TestRetractSink().configure( Array[String]("len", "icnt", "nsum"), @@ -221,7 +222,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "retractSink", new TestRetractSink().configure( Array[String]("t", "icnt", "nsum"), @@ -257,7 +258,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", new TestUpsertSink(Array("cnt", "cTrue"), false).configure( Array[String]("cnt", "lencnt", "cTrue"), @@ -294,7 +295,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", new TestUpsertSink(Array("wend", "num"), true).configure( Array[String]("num", "wend", "icnt"), @@ -334,7 +335,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", new TestUpsertSink(Array("wstart", "wend", "num"), true).configure( Array[String]("wstart", "wend", "num", "icnt"), @@ -374,7 +375,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", new TestUpsertSink(null, true).configure( Array[String]("wend", "cnt"), @@ -413,7 +414,7 @@ class TableSinkITCase extends AbstractTestBase { .assignAscendingTimestamps(_._1.toLong) .toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime) - tEnv.registerTableSink( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink", new TestUpsertSink(null, true).configure( Array[String]("num", "cnt"), diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSourceITCase.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSourceITCase.scala index 6b011af27872f..70608049e0a4e 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSourceITCase.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSourceITCase.scala @@ -19,6 +19,7 @@ package org.apache.flink.table.runtime.stream.table import java.lang.{Boolean => JBool, Integer => JInt, Long => JLong} + import org.apache.calcite.runtime.SqlFunctions.{internalToTimestamp => toTimestamp} import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.{GenericTypeInfo, RowTypeInfo} @@ -28,6 +29,7 @@ import org.apache.flink.streaming.api.datastream.DataStream import org.apache.flink.streaming.api.environment.{StreamExecutionEnvironment => JExecEnv} import org.apache.flink.streaming.api.functions.ProcessFunction import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment +import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{EnvironmentSettings, TableException, TableSchema, Tumble, Types} import org.apache.flink.table.runtime.utils.{CommonTestData, StreamITCase} @@ -36,7 +38,6 @@ import org.apache.flink.table.utils._ import org.apache.flink.test.util.AbstractTestBase import org.apache.flink.types.Row import org.apache.flink.util.Collector - import org.junit.Assert._ import org.junit.{Before, Test} @@ -69,7 +70,7 @@ class TableSourceITCase extends AbstractTestBase { override def getReturnType: TypeInformation[Row] = new RowTypeInfo(fieldTypes, fieldNames) override def getTableSchema: TableSchema = new TableSchema(fieldNames, fieldTypes) } - tEnv.registerTableSource("T", tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("T", tableSource) tEnv.scan("T") .select('value, 'name) @@ -110,7 +111,7 @@ class TableSourceITCase extends AbstractTestBase { val settings = EnvironmentSettings.newInstance().useOldPlanner().build() val tEnv = StreamTableEnvironment.create(env, settings) - tEnv.registerTableSource("csvTable", csvTable) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("csvTable", csvTable) tEnv.scan("csvTable") .where('id > 4) .select('last, 'score * 2) @@ -134,7 +135,8 @@ class TableSourceITCase extends AbstractTestBase { val settings = EnvironmentSettings.newInstance().useOldPlanner().build() val tEnv = StreamTableEnvironment.create(env, settings) - tEnv.registerTableSource(tableName, TestFilterableTableSource()) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + tableName, TestFilterableTableSource()) tEnv.scan(tableName) .where($"amount" > 4 && $"price" < 9) .select($"id", $"name") @@ -167,7 +169,7 @@ class TableSourceITCase extends AbstractTestBase { fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -206,7 +208,7 @@ class TableSourceITCase extends AbstractTestBase { fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, null, "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .where('ptime.cast(Types.LONG) > 0L) @@ -245,7 +247,7 @@ class TableSourceITCase extends AbstractTestBase { fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -282,7 +284,7 @@ class TableSourceITCase extends AbstractTestBase { fieldNames) val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -312,7 +314,7 @@ class TableSourceITCase extends AbstractTestBase { val returnType = Types.LONG val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -347,7 +349,7 @@ class TableSourceITCase extends AbstractTestBase { val returnType = Types.STRING val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", null) - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -378,7 +380,7 @@ class TableSourceITCase extends AbstractTestBase { val returnType = Types.STRING val tableSource = new TestTableSourceWithTime(schema, returnType, data, null, "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .where('ptime.cast(Types.LONG) > 1) @@ -406,7 +408,7 @@ class TableSourceITCase extends AbstractTestBase { val returnType = Types.LONG val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .where('ptime.cast(Types.LONG) > 1) @@ -444,7 +446,7 @@ class TableSourceITCase extends AbstractTestBase { val mapping = Map("amount" -> "f2", "name" -> "f0", "rtime" -> "f1") val source = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime", mapping) - tEnv.registerTableSource(tableName, source) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, source) tEnv.scan(tableName) .window(Tumble over 1.second on 'rtime as 'w) @@ -481,7 +483,7 @@ class TableSourceITCase extends AbstractTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -519,7 +521,7 @@ class TableSourceITCase extends AbstractTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -557,7 +559,7 @@ class TableSourceITCase extends AbstractTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "name", "val", "rtime")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -595,7 +597,7 @@ class TableSourceITCase extends AbstractTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -629,7 +631,7 @@ class TableSourceITCase extends AbstractTestBase { .asInstanceOf[Array[TypeInformation[_]]], Array("id", "rtime", "val", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime")) @@ -668,7 +670,7 @@ class TableSourceITCase extends AbstractTestBase { Array("p-rtime", "p-id", "p-name", "p-val")) val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name") - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime", mapping)) @@ -735,7 +737,7 @@ class TableSourceITCase extends AbstractTestBase { Array(Types.LONG, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]], Array("id", "deepNested", "nested", "name")) - tEnv.registerTableSource( + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( "T", new TestNestedProjectableTableSource(tableSchema, returnType, data)) @@ -783,7 +785,7 @@ class TableSourceITCase extends AbstractTestBase { fieldNames) val tableSource = new TestPreserveWMTableSource(schema, rowType, data, "rtime") - tEnv.registerTableSource(tableName, tableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource) tEnv.scan(tableName) .where('rtime.cast(Types.LONG) > 3L) diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/utils/MockTableEnvironment.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/utils/MockTableEnvironment.scala index 928559d0c895a..6b3adb1429af2 100644 --- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/utils/MockTableEnvironment.scala +++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/utils/MockTableEnvironment.scala @@ -41,15 +41,6 @@ class MockTableEnvironment extends TableEnvironment { override def registerTable(name: String, table: Table): Unit = ??? - override def registerTableSource(name: String, tableSource: TableSource[_]): Unit = ??? - - override def registerTableSink( - name: String, - fieldNames: Array[String], - fieldTypes: Array[TypeInformation[_]], tableSink: TableSink[_]): Unit = ??? - - override def registerTableSink(name: String, configuredSink: TableSink[_]): Unit = ??? - override def scan(tablePath: String*): Table = ??? override def connect(connectorDescriptor: ConnectorDescriptor): ConnectTableDescriptor = ??? diff --git a/flink-walkthroughs/flink-walkthrough-table-scala/src/main/resources/archetype-resources/src/main/scala/SpendReport.scala b/flink-walkthroughs/flink-walkthrough-table-scala/src/main/resources/archetype-resources/src/main/scala/SpendReport.scala index b421bd0b6009f..7289521c79d80 100644 --- a/flink-walkthroughs/flink-walkthrough-table-scala/src/main/resources/archetype-resources/src/main/scala/SpendReport.scala +++ b/flink-walkthroughs/flink-walkthrough-table-scala/src/main/resources/archetype-resources/src/main/scala/SpendReport.scala @@ -27,8 +27,10 @@ object SpendReport { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env) - tEnv.registerTableSource("transactions", new BoundedTransactionTableSource) - tEnv.registerTableSink("spend_report", new SpendReportTableSink) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal( + "transactions", new BoundedTransactionTableSource) + tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( + "spend_report", new SpendReportTableSink) val truncateDateToHour = new TruncateDateToHour