這篇文章主要講解了“FLINK 1.12 upsertSql怎么使用”,文中的講解內(nèi)容簡(jiǎn)單清晰,易于學(xué)習(xí)與理解,下面請(qǐng)大家跟著小編的思路慢慢深入,一起來研究和學(xué)習(xí)“FLINK 1.12 upsertSql怎么使用”吧!
創(chuàng)新互聯(lián)-云計(jì)算及IDC服務(wù)提供商,涵蓋公有云、IDC機(jī)房租用、成都移動(dòng)云計(jì)算中心、等保安全、私有云建設(shè)等企業(yè)級(jí)互聯(lián)網(wǎng)基礎(chǔ)服務(wù),咨詢電話:18982081108
package com.konka.dsp; import org.apache.flink.api.common.JobExecutionResult; import org.apache.flink.api.common.restartstrategy.RestartStrategies; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.connector.jdbc.dialect.MySQLDialect; import org.apache.flink.connector.jdbc.internal.options.JdbcOptions; import org.apache.flink.connector.jdbc.table.JdbcUpsertTableSink; import org.apache.flink.formats.json.JsonFormatFactory; import org.apache.flink.formats.json.canal.CanalJsonFormatFactory; import org.apache.flink.shaded.curator4.org.apache.curator.framework.schema.Schema; import org.apache.flink.streaming.api.CheckpointingMode; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.*; import org.apache.flink.table.api.bridge.java.BatchTableEnvironment; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.expressions.TimeIntervalUnit; import org.apache.flink.table.types.DataType; import org.apache.flink.types.Row; import org.apache.flink.util.CloseableIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Duration; import java.util.concurrent.ExecutionException; import static org.apache.flink.table.api.Expressions.*; public class SalesOrderStream { private static Logger log = LoggerFactory.getLogger(SalesOrderStream.class.getName()); public static Table report(Table transactions) { return transactions.select( $("customer_name"), $("created_date"), $("total_amount")) .groupBy($("customer_name"),$("created_date")) .select( $("customer_name"), $("total_amount").sum().as("total_amount"), $("created_date") ); } public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment .getExecutionEnvironment(); // env.setParallelism(4); // env.getConfig().setRestartStrategy(RestartStrategies.noRestart()); StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); // set default parallelism to 4 // tEnv.executeSql("CREATE TABLE sales_order_header_stream (\n" + //// " id BIGINT not null,\n" + // " customer_name STRING,\n"+ //// " dsp_org_name STRING,\n"+ // " total_amount DECIMAL(38,2),\n" + //// " total_discount DECIMAL(16,2),\n" + //// " pay_amount DECIMAL(16,2),\n" + //// " total_amount DECIMAL(16,2),\n" + // " created_date TIMESTAMP(3)\n" + // ") WITH (\n" + // " 'connector' = 'mysql-cdc',\n" + // " 'hostname' = '192.168.8.73',\n" + // " 'port' = '4000',\n"+ // " 'username' = 'flink',\n"+ // " 'password' = 'flink',\n"+ // " 'database-name' = 'dspdev',\n"+ // " 'table-name' = 'sales_order_header'\n"+ // ")"); //pay_type,over_sell tEnv.executeSql("CREATE TABLE sales_order_header_stream (\n" + " `id` BIGINT,\n"+ " `total_amount` DECIMAL(16,2) ,\n"+ " `customer_name` STRING,\n"+ " `order_no` STRING,\n"+ " `doc_type` STRING,\n"+ " `sales_org` STRING,\n"+ " `distr_chan` STRING,\n"+ " `division` STRING,\n"+ " `sales_grp` STRING,\n"+ " `sales_off` STRING,\n"+ " `purch_no_c` STRING,\n"+ " `purch_date` STRING,\n"+ " `sold_to` STRING,\n"+ " `ship_to` STRING,\n"+ " `r3_sales_order` STRING,\n"+ " `created_by_employee_name` STRING,\n"+ " `created_by_dept_name` STRING,\n"+ " `created_by_dept_name` STRING,\n"+ " `is_enable` BIGINT,\n"+ " `is_delete` BIGINT,\n"+ " `sale_order_status` STRING,\n"+ " `created_by_parent_dept_name` STRING,\n"+ " `total_discount` DECIMAL(16,2),\n"+ " `customer_sapcode` STRING,\n"+ " `sold_to_name` STRING,\n"+ " `ship_to_name` STRING,\n"+ " `total_discount_amount` DECIMAL(16,2),\n"+ " `other_discount` DECIMAL(16,2),\n"+ " `other_amount` DECIMAL(16,2),\n"+ " `pay_amount` DECIMAL(16,2),\n"+ " `dsp_org_name` STRING,\n"+ " `delivery_address` STRING,\n"+ " `delivery_person` STRING,\n"+ " `delivery_phone` STRING,\n"+ " `pay_type` STRING,\n"+ " `over_sell` STRING,\n"+ " `created_date` TIMESTAMP(3),\n"+ " PRIMARY KEY (`id`) NOT ENFORCED "+ ") WITH (\n" + "'connector' = 'kafka',\n"+ "'topic' = 'canal-data',\n"+ "'properties.bootstrap.servers' = '192.168.8.71:9092',\n"+ "'properties.group.id' = 'test',\n"+ "'format' = 'canal-json'\n"+ ")"); // tEnv.executeSql("CREATE TABLE total_day_report (\n" + // " customer_name STRING,\n" + //// " total_amount DECIMAL(16,2),\n" + //// " total_discount DECIMAL(16,2),\n" + //// " pay_amount DECIMAL(16,2),\n" + // " total_amount DECIMAL(16,2),\n" + // " created_date STRING,\n" + // " PRIMARY KEY (created_date) NOT ENFORCED" + // ") WITH (\n" + // " 'connector' = 'upsert-kafka',\n" + // " 'topic' = 'customer_amount',\n" + // " 'properties.bootstrap.servers' = '192.168.8.71:9092',\n"+ // " 'key.format' = 'json',\n"+ // " 'value.format' = 'json',\n"+ // " 'value.fields-include' = 'ALL'\n"+ // ")"); tEnv.executeSql("CREATE TABLE upsertSink (\n" + " customer_name STRING,\n" + // " total_amount DECIMAL(16,2),\n" + // " total_discount DECIMAL(16,2),\n" + // " pay_amount DECIMAL(16,2),\n" + " total_amount DECIMAL(16,2),\n" + " created_date STRING,\n" + " PRIMARY KEY (customer_name,created_date) NOT ENFORCED" + ") WITH (\n" + " 'connector' = 'tidb',\n" + " 'tidb.database.url' = 'jdbc:mysql://192.168.8.73:4000/dspdev',\n" + " 'tidb.username' = 'flink',\n"+ " 'tidb.password' = 'flink',\n"+ " 'tidb.database.name' = 'dspdev',\n"+ " 'tidb.table.name' = 'spend_report'\n"+ // " 'connector.type'='jdbc'," + // " 'connector.url'='jdbc:mysql://192.168.8.73:4000/dspdev',\n" + // " 'connector.username' = 'flink',\n"+ // " 'connector.password' = 'flink',\n"+ // " 'connector.table' = 'spend_report'" + ")"); // TableSchema tableSche = TableSchema.builder() // .field("customer_name",DataTypes.STRING().notNull()) // .field("total_amount",DataTypes.DECIMAL(16,2)) // .field("created_date",DataTypes.STRING().notNull()).build(); // // JdbcOptions jdbcOptions = JdbcOptions.builder() // .setDBUrl("jdbc:mysql://192.168.8.73:4000/dspdev") // .setTableName("spend_report") // .setUsername("flink") // .setPassword("flink") // .setDialect(new MySQLDialect()) // .setDriverName("com.mysql.jdbc.Driver") // .build(); // JdbcUpsertTableSink jdbcUpsertTableSink = JdbcUpsertTableSink.builder() // .setTableSchema(tableSche) // .setOptions(jdbcOptions) // .build(); // jdbcUpsertTableSink.setKeyFields(new String[]{"id"}); /** * SINK End */ // tEnv.re("spend_report",jdbcUpsertTableSink); Table transactions = tEnv.from("sales_order_header_stream"); // tEnv.executeSql("delete from total_day_report"); tEnv.executeSql("insert into upsertSink select dsp_org_name as customer_name,cast(sum(t.pay_amount) as decimal(16,2)) as amount,DATE_FORMAT(t.created_date,'yyyy-MM-dd') as created_date from sales_order_header_stream t group by DATE_FORMAT(t.created_date,'yyyy-MM-dd'),dsp_org_name").print(); // tEnv.executeSql("insert into spend_report select * from total_day_report"); // Table transactions = tEnv.from("total_day_report"); // report(transactions).executeInsert("spend_report"); tEnv.execute("-----------"); } }
最后數(shù)據(jù)庫結(jié)果如下:
每次都是更新替換,這樣的話省去很多麻煩,不用轉(zhuǎn)datastream在處理了,而且1.12支持upsert-kafka,最后數(shù)據(jù)疊加如下:
upsert-kafka上面已經(jīng)體現(xiàn)
感謝各位的閱讀,以上就是“FLINK 1.12 upsertSql怎么使用”的內(nèi)容了,經(jīng)過本文的學(xué)習(xí)后,相信大家對(duì)FLINK 1.12 upsertSql怎么使用這一問題有了更深刻的體會(huì),具體使用情況還需要大家實(shí)踐驗(yàn)證。這里是創(chuàng)新互聯(lián),小編將為大家推送更多相關(guān)知識(shí)點(diǎn)的文章,歡迎關(guān)注!