本篇內(nèi)容主要講解“Spark-Streaming如何處理數(shù)據(jù)到MySQL中”,感興趣的朋友不妨來看看。本文介紹的方法操作簡單快捷,實用性強。下面就讓小編來帶大家學習“Spark-Streaming如何處理數(shù)據(jù)到mysql中”吧!
我們注重客戶提出的每個要求,我們充分考慮每一個細節(jié),我們積極的做好網(wǎng)站設(shè)計、網(wǎng)站建設(shè)服務(wù),我們努力開拓更好的視野,通過不懈的努力,創(chuàng)新互聯(lián)贏得了業(yè)內(nèi)的良好聲譽,這一切,也不斷的激勵著我們更好的服務(wù)客戶。 主要業(yè)務(wù):網(wǎng)站建設(shè),網(wǎng)站制作,網(wǎng)站設(shè)計,小程序開發(fā),網(wǎng)站開發(fā),技術(shù)開發(fā)實力,DIV+CSS,PHP及ASP,ASP.Net,SQL數(shù)據(jù)庫的技術(shù)開發(fā)工程師。
數(shù)據(jù)表如下:
create database test; use test; DROP TABLE IF EXISTS car_gps; CREATE TABLE IF NOT EXISTS car_gps( deployNum VARCHAR(30) COMMENT '調(diào)度編號', plateNum VARCHAR(10) COMMENT '車牌號', timeStr VARCHAR(20) COMMENT '時間戳', lng VARCHAR(20) COMMENT '經(jīng)度', lat VARCHAR(20) COMMENT '緯度', dbtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT '數(shù)據(jù)入庫時間', PRIMARY KEY(deployNum, plateNum, timeStr))
首先引入mysql的驅(qū)動
mysql mysql-connector-java 5.1.44
package com.hoult.Streaming.work import java.sql.{Connection, DriverManager, PreparedStatement} import java.util.Properties import com.hoult.structed.bean.BusInfo import org.apache.spark.sql.ForeachWriter class JdbcHelper extends ForeachWriter[BusInfo] { var conn: Connection = _ var statement: PreparedStatement = _ override def open(partitionId: Long, epochId: Long): Boolean = { if (conn == null) { conn = JdbcHelper.openConnection } true } override def process(value: BusInfo): Unit = { //把數(shù)據(jù)寫入mysql表中 val arr: Array[String] = value.lglat.split("_") val sql = "insert into car_gps(deployNum,plateNum,timeStr,lng,lat) values(?,?,?,?,?)" statement = conn.prepareStatement(sql) statement.setString(1, value.deployNum) statement.setString(2, value.plateNum) statement.setString(3, value.timeStr) statement.setString(4, arr(0)) statement.setString(5, arr(1)) statement.executeUpdate() } override def close(errorOrNull: Throwable): Unit = { if (null != conn) conn.close() if (null != statement) statement.close() } } object JdbcHelper { var conn: Connection = _ val url = "jdbc:mysql://hadoop1:3306/test?useUnicode=true&characterEncoding=utf8" val username = "root" val password = "123456" def openConnection: Connection = { if (null == conn || conn.isClosed) { val p = new Properties Class.forName("com.mysql.jdbc.Driver") conn = DriverManager.getConnection(url, username, password) } conn } }
package com.hoult.Streaming.work import com.hoult.structed.bean.BusInfo import org.apache.spark.sql.{Column, DataFrame, Dataset, SparkSession} object KafkaToJdbc { def main(args: Array[String]): Unit = { System.setProperty("HADOOP_USER_NAME", "root") //1 獲取sparksession val spark: SparkSession = SparkSession.builder() .master("local[*]") .appName(KafkaToJdbc.getClass.getName) .getOrCreate() spark.sparkContext.setLogLevel("WARN") import spark.implicits._ //2 定義讀取kafka數(shù)據(jù)源 val kafkaDf: DataFrame = spark.readStream .format("kafka") .option("kafka.bootstrap.servers", "linux121:9092") .option("subscribe", "test_bus_info") .load() //3 處理數(shù)據(jù) val kafkaValDf: DataFrame = kafkaDf.selectExpr("CAST(value AS STRING)") //轉(zhuǎn)為ds val kafkaDs: Dataset[String] = kafkaValDf.as[String] //解析出經(jīng)緯度數(shù)據(jù),寫入redis //封裝為一個case class方便后續(xù)獲取指定字段的數(shù)據(jù) val busInfoDs: Dataset[BusInfo] = kafkaDs.map(BusInfo(_)).filter(_ != null) //將數(shù)據(jù)寫入MySQL表 busInfoDs.writeStream .foreach(new JdbcHelper) .outputMode("append") .start() .awaitTermination() } }
kafka-topics.sh --zookeeper linux121:2181/myKafka --create --topic test_bus_info --partitions 2 --replication-factor 1 kafka-console-producer.sh --broker-list linux121:9092 --topic test_bus_info
到此,相信大家對“Spark-Streaming如何處理數(shù)據(jù)到mysql中”有了更深的了解,不妨來實際操作一番吧!這里是創(chuàng)新互聯(lián)網(wǎng)站,更多相關(guān)內(nèi)容可以進入相關(guān)頻道進行查詢,關(guān)注我們,繼續(xù)學習!