wudla 4 years ago
parent
commit
f5015f4cc7

+ 38 - 29
data_collect/src/main/scala/com/persagy/iot/app/IOTApp.scala

@@ -1,11 +1,10 @@
 package com.persagy.iot.app
 
 import com.persagy.iot.bean.IOTData
-import com.persagy.iot.func.{IOTOriginalSinkFunction, OriginalDataAlarm, SplitData}
-import com.persagy.iot.utils.KafkaUtil
+import com.persagy.iot.func.{IOTAccuracySinkFunction, IOTOriginalSinkFunction, OriginalDataAlarm, SplitData}
+import com.persagy.iot.utils._
 import org.apache.flink.api.common.serialization.SimpleStringSchema
 import org.apache.flink.streaming.api.TimeCharacteristic
-import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
 import org.apache.flink.streaming.api.scala._
 import org.apache.flink.streaming.api.windowing.time.Time
 import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
@@ -19,13 +18,20 @@ object IOTApp {
   val no_value: String = "ff ff ff ff"
 
   def main(args: Array[String]): Unit = {
+
+//    val params: ParameterTool = ParameterTool.fromArgs(args)
+//    val lastTime: Long = params.getLong("lastTime")
+//    val loseTime: Long = params.getLong("loseTime")
+//    val earlyTime: Long = params.getLong("earlyTime")
+//    val timeOffer: Long = params.getLong("timeOffer")
+
     val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
 
     /* 设置并行度 */
     env.setParallelism(1)
 
     /* 设置时间语义 */
-    env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
+    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
 
     val inputStream: DataStream[String] =
       env.addSource( new FlinkKafkaConsumer[String](KafkaUtil.topic, new SimpleStringSchema(), KafkaUtil.kafkaConsumerProperties()) )
@@ -42,44 +48,47 @@ object IOTApp {
       }
     })
 
-    /* 转化为实体类流 */
-    val iotFilterDataStream: DataStream[IOTData] = filterStream.flatMap(new SplitData)
-
-    val waterStream: DataStream[IOTData] = iotFilterDataStream
-      /* 设置水位时间 */
-      .assignAscendingTimestamps(_.eventTime - 1)
+    /* 转化为实体类流,此处转换的实体类的 rowKey 不包含时间,需要在后续的处理中加入 */
+    val iotFilterDataStream: DataStream[IOTData] = filterStream.flatMap(new SplitData(1800L, 3600L, 1000L,0))
+//    val iotFilterDataStream: DataStream[IOTData] = filterStream.flatMap(new SplitData(lastTime, loseTime, earlyTime, timeOffer))
 
     /** ------------------------ 处理原始数据 start ----------------------- */
-
-    val keyedStream: DataStream[IOTData] = waterStream.keyBy(_.rowKey)
+    val keyedStream: DataStream[IOTData] = iotFilterDataStream.keyBy(_.rowKey)
       .process(new OriginalDataAlarm)
+
     keyedStream.getSideOutput(new OutputTag[IOTData]("late-data")).print("outputTag:")
-//    keyedStream.addSink(new IOTOriginalSinkFunction)
+    keyedStream.addSink(new IOTOriginalSinkFunction)
 
     /** ------------------------- 处理原始数据 end ------------------------- */
 
-    /** ------------------------ 处理分精度数据 start ------------------------ */
 
+    /** ------------------------ 处理分精度数据 start ------------------------ */
+    val waterStream: DataStream[IOTData] = iotFilterDataStream
+      .map(iotData => {
+        /* 添加时间 */
+        val key: String = iotData.rowKey
+        val time: Time = Time.minutes(15)
+        val l: Long = IOTUtils.accuracyTime(iotData.eventTime, time)
+        iotData.copy(rowKey = key + ":" + l)
+        iotData
+      })
+      .assignAscendingTimestamps(_.eventTime - 1)
 
     /* 定义侧输出流 */
-//    val sideOutputTag = new OutputTag[IOTData]("late-data")
-//
-//    val windowsStream: DataStream[IOTData] = waterStream.keyBy(_.rowKey)
-//      .timeWindow(Time.minutes(15))
-//      /* 允许处理数据的最迟时间 */
-//      .allowedLateness(Time.minutes(60))
-//      /* 侧输出流 */
-//      .sideOutputLateData(sideOutputTag)
-//      .aggregate(new IOTAgg(), new IOTWindowResult())
-    /** ------------------------ 处理分精度数据 end ------------------------ */
+    val sideOutputTag = new OutputTag[IOTData]("late-data")
 
-    /* 设置要选取的事件时间 */
-//    val assignStream: DataStream[IOTData] = iotDataStream.assignAscendingTimestamps(_.eventTime)
+    val windowsStream: DataStream[IOTData] = waterStream.keyBy(_.rowKey)
+      .timeWindow(Time.minutes(15))
+      .sideOutputLateData(sideOutputTag)
+      .aggregate(new IOTAgg(), new IOTWindowResult())
 
-    /* 开窗数据保存hbase,侧输出流的数据查询hbase对比后再存入hbase */
-//    windowsStream.addSink(new IOTOriginalSinkFunction)
-//    windowsStream.getSideOutput(sideOutputTag).addSink(new IOTOriginalSinkFunction)
+    /* 开窗数据保存hbase,侧输出流的数据查询hbase,对比后再存入hbase */
+    windowsStream.addSink(new IOTAccuracySinkFunction)
+    windowsStream.getSideOutput(sideOutputTag).addSink(new IOTAccuracySinkFunction)
+    /** ------------------------ 处理分精度数据 end ------------------------ */
 
+    windowsStream.print("result: ")
+    windowsStream.getSideOutput(sideOutputTag).print("sideOutputTag:")
     env.execute("iot data collect")
   }
 

+ 1 - 1
data_collect/src/main/scala/com/persagy/iot/app/IOTWindowResult.scala

@@ -10,6 +10,6 @@ class IOTWindowResult() extends WindowFunction[IOTData, IOTData, String, TimeWin
                      window: TimeWindow,
                      input: Iterable[IOTData],
                      out: Collector[IOTData]): Unit = {
-    out.collect(input.head.copy(windowEnd = window.getEnd))
+    out.collect(input.head)
   }
 }

+ 1 - 0
data_collect/src/main/scala/com/persagy/iot/app/SendMessage.scala

@@ -2,6 +2,7 @@ package com.persagy.iot.app
 
 import com.persagy.iot.utils.KafkaUtil
 
+
 object SendMessage {
 
   def main(args: Array[String]): Unit = {

+ 2 - 2
data_collect/src/main/scala/com/persagy/iot/func/IOTAccuracySinkFunction.scala

@@ -23,12 +23,12 @@ class IOTAccuracySinkFunction extends RichSinkFunction[IOTData] {
   var table: Table = _
 
   override def open(parameters: Configuration): Unit = {
-    println("init Hbase 数据。。。")
+    println("init Hbase ......")
     conn = HbaseUtil.getHbaseConnection()
   }
 
   /**
-   *
+   * 查询数据,对比时间戳,再修改
    */
   override def invoke(iotData: IOTData, context: SinkFunction.Context[_]): Unit = {
     val put = new Put(Bytes.toBytes(iotData.rowKey))

+ 1 - 1
data_collect/src/main/scala/com/persagy/iot/func/IOTOriginalSinkFunction.scala

@@ -34,7 +34,7 @@ class IOTOriginalSinkFunction extends RichSinkFunction[IOTData] {
 
   override def invoke(iotData: IOTData, context: SinkFunction.Context[_]): Unit = {
     // rowKey 按照 楼号:表号:功能号:时间
-    val rowKey: String = iotData.rowKey
+    val rowKey: String = iotData.rowKey + ":" + iotData.eventTime
     val put = new Put(Bytes.toBytes(rowKey))
     put.addColumn(Bytes.toBytes(family), Bytes.toBytes(value), Bytes.toBytes(iotData.value))
     put.addColumn(Bytes.toBytes(family), Bytes.toBytes(status), Bytes.toBytes(iotData.status))

+ 27 - 10
data_collect/src/main/scala/com/persagy/iot/func/OriginalDataAlarm.scala

@@ -6,31 +6,48 @@ import org.apache.flink.streaming.api.functions.KeyedProcessFunction
 import org.apache.flink.streaming.api.scala._
 import org.apache.flink.util.Collector
 
+/**
+ * 数据三种状态
+ * 1、正常数据,数据产生时间与实际入库的时间差小于 1800s
+ * 2、迟到数据,数据产生时间与实际入库的时间差大于 1800s,小于3600s
+ * 3、丢数
+ *  1、数据未到:state的时间触发器超过 3600s,侧输出流输出 rowKey 报警信息
+ *  2、数据已到:数据产生时间与实际入库的时间差大于 3600s
+ *  3、数据的产生时间如果是晚于进入系统时间,那么根据业务规则的阈值 3600s(暂定),超过阈值的数据按异常数据处理
+ */
 class OriginalDataAlarm extends KeyedProcessFunction[String, IOTData, IOTData] {
 
-//  lazy val alarmTimeState: ValueState[Long] = getRuntimeContext.getState(new ValueStateDescriptor[Long]("alarm-time", classOf[Long]))
   lazy val alarmState: ValueState[IOTData] = getRuntimeContext.getState(new ValueStateDescriptor[IOTData]("alarm-IOTData", classOf[IOTData]))
+
+  lazy val loseTime: ValueState[Long] = getRuntimeContext.getState(new ValueStateDescriptor[Long]("loseTime", classOf[Long]))
+
   override def processElement(iotData: IOTData,
                               ctx: KeyedProcessFunction[String, IOTData, IOTData]#Context,
                               out: Collector[IOTData]): Unit = {
-    // 迟到时间触发
-    val lateTime: Long = iotData.eventTime + 1000 * 60 * 30
-    // 丢数时间触发
-    val loseTime: Long = iotData.eventTime + 1000 * 60 * 60
-//    ctx.timerService().registerEventTimeTimer(loseTime)
-//    ctx.timerService().registerEventTimeTimer(lateTime)
-//    ctx.timerService().registerEventTimeTimer(System.currentTimeMillis() + 1000L * 3)
-    ctx.timerService().registerProcessingTimeTimer(System.currentTimeMillis() + 1000L * 3)
+
+    val now: Long = System.currentTimeMillis() + (10 * 1000L)
+
+    // 数据第一次过来,定义触发器
+    if (alarmState.value() != null && loseTime.value() != 0L) {
+      ctx.timerService().deleteProcessingTimeTimer(loseTime.value())
+    }
+
+    loseTime.update(now)
     alarmState.update(iotData)
+    ctx.timerService().registerProcessingTimeTimer(loseTime.value())
 
-//    out.collect(IOTData(rowKey = ???, build = ???, sign = ???, funId = ???, value = ???, eventTimeStr = ???, eventTime = ???, sysTime = ???, windowEnd = ???, status = ???))
+    out.collect(iotData)
   }
 
   override def onTimer(timestamp: Long,
                        ctx: KeyedProcessFunction[String, IOTData, IOTData]#OnTimerContext,
                        out: Collector[IOTData]): Unit = {
+
+    // 将丢数的点位放入侧输出流
     val lateDataTag = new OutputTag[IOTData]("late-data")
     ctx.output(lateDataTag, alarmState.value())
+
     alarmState.clear()
+    loseTime.clear()
   }
 }

+ 26 - 12
data_collect/src/main/scala/com/persagy/iot/func/SplitData.scala

@@ -10,8 +10,12 @@ import org.apache.flink.util.Collector
 /**
  * 由于每一条数据包含多个功能号,表号
  * 自定义 flatMap 函数
+ * @param lastTime 迟到阈值
+ * @param loseTime 丢数阈值
+ * @param earlyTime 数据产生时间大于入库时间的阈值
+ * @param timeOffer 数据产生时间时间差
  */
-class SplitData extends FlatMapFunction[String, IOTData] {
+class SplitData(lastTime: Long, loseTime: Long, earlyTime: Long, timeOffer: Long) extends FlatMapFunction[String, IOTData] {
 
 
   override def flatMap(input: String, collector: Collector[IOTData]): Unit = {
@@ -27,15 +31,15 @@ class SplitData extends FlatMapFunction[String, IOTData] {
         val build: String = arr2(0).toString
         val sign: String = arr2(5).toString
         val eventTimeStr: String = arr2(3)
-        val eventTime: Long = timestampConverter("yyyyMMddHHmmss", eventTimeStr, Time.hours(8).toMilliseconds)
+        val eventTime: Long = timestampConverter("yyyyMMddHHmmss", eventTimeStr, Time.hours(0).toMilliseconds)
         val sysTime: Long = System.currentTimeMillis()
-
         for (i <- 7 until (arr2.length, 2) if (!no_value.equals(arr2(i + 1)))) {
           val funId: String = arr2(i)
           val value: Double = arr2(i + 1).toDouble
-          val status: Int = getStatus(eventTime, sysTime)
+          val status: Int = getStatus(eventTime, sysTime, lastTime, loseTime, earlyTime)
+
           /** rowKey */
-          val rowKey = build + ":" + sign + ":" + funId + ":" + eventTime
+          val rowKey = build + ":" + sign + ":" + funId
 
           /** 转为 iotData 实体类 */
           val iotData: IOTData = IOTData(rowKey, build, sign, funId, value, eventTimeStr, eventTime, sysTime, 0L, status)
@@ -44,26 +48,36 @@ class SplitData extends FlatMapFunction[String, IOTData] {
         }
       }
     } catch {
-      case ex: Exception => return
+      case ex: Exception => ex.printStackTrace()
     }
 
   }
 
   /**
-   * 判断该数据是正常,迟到,丢数
+   * 判断该数据状态:
+   * 0:正常
+   * 1:迟到
+   * 2:丢数
+   * 3:异常(数据产生时间晚于系统时间大于阈值的状态)
    * @param eventTime 数据产生时间
    * @param sysTime 系统时间
+   * @param lastTime 迟到阈值
+   * @param loseTime 丢数阈值
+   * @param earlyTime 数据产生时间大于入库时间的阈值
+   * @param
    */
-  def getStatus(eventTime: Long, sysTime: Long): Int ={
+  def getStatus(eventTime: Long, sysTime: Long, lastTime: Long, loseTime: Long, earlyTime: Long): Int ={
 
     val var1: Long = sysTime - eventTime
-
-    if (var1 <= 1800){
+    println("sysTime: " + sysTime + "  -----  eventTime: " + eventTime + "  -----  时间差为: " + var1)
+    if (var1 < -earlyTime){
+      3
+    } else if (var1 > -earlyTime && var1 <= lastTime * 1000){
       0
-    } else if (1800 < var1 && var1 <= 3600) {
+    } else if (lastTime * 1000 < var1 && var1 <= loseTime * 1000) {
       1
     } else {
-      3
+      2
     }
 
   }

+ 16 - 3
data_collect/src/main/scala/com/persagy/iot/utils/IOTUtils.scala

@@ -1,25 +1,38 @@
 package com.persagy.iot.utils
 
+import org.apache.flink.streaming.api.windowing.time.Time
+
 import java.text.SimpleDateFormat
 
 object IOTUtils {
 
   /**
+   * 分精度时间的转换
+   * @param time 要转换的时间
+   * @param intervalTime 间隔时间段
+   * @return
+   */
+  def accuracyTime(time: Long, intervalTime: Time): Long = {
+    val var1: Long = intervalTime.toMilliseconds
+    (time / var1 * var1 + var1)
+  }
+
+  /**
    * 将时间字段转换成时间戳
    * @param dateStr 时间字段
    * @param formatPattern 要转换时间字段的格式
    * @param offer 时差(单位:毫秒)
    * @return
    */
-  def timestampConverter(formatPattern: String, dateStr: String, offer: Long): Long ={
+  def timestampConverter(formatPattern: String, dateStr: String, offer: Long): Long = {
     val simpleDateFormat = new SimpleDateFormat(formatPattern)
     val timestamp = simpleDateFormat.parse(dateStr).getTime
-    timestamp + offer
+    (timestamp + offer)
   }
 
   /**
    * 获取当前时间
    * @return
    */
-  def now(): Long = {System.currentTimeMillis ()}
+  def now(): Long = System.currentTimeMillis()
 }

+ 11 - 14
data_collect/src/main/scala/com/persagy/iot/utils/TestFunction.scala

@@ -4,14 +4,16 @@ import com.persagy.iot.app.IOTApp.no_value
 import com.persagy.iot.bean.IOTData
 import com.persagy.iot.utils.IOTUtils.timestampConverter
 
+import org.apache.flink.streaming.api.windowing.time.Time
 import java.text.SimpleDateFormat
 
 
 object TestFunction {
 
-  def main(args: Array[String]): Unit = {
-    var elem: String = "1101080259;4;report;20210111150020;58440;9919;1;11301;635.0;"
-    val arr2: Array[String] = elem.split(";")
+  def main1(args: Array[String]): Unit = {
+    var data: String = "1101080259;4;report;20210111150020;58440;9919;1;11301;635.0;"
+
+    val arr2: Array[String] = data.split(";")
 
     val build: String = arr2(0).toString
     val sign: String = arr2(5).toString
@@ -32,20 +34,15 @@ object TestFunction {
     }
   }
 
-  def main1(args: Array[String]): Unit = {
+  def main(args: Array[String]): Unit = {
     val sdf: SimpleDateFormat = new SimpleDateFormat("yyyy MM dd HH:mm:ss")
     val result1=sdf.format(1610394300000L)
     val result2=sdf.format(1610393400000L)
-    println(result1)
-    println(result2)
+    println(System.currentTimeMillis())
+    val milliseconds: Long = Time.minutes(15).toMilliseconds
+    val l: Long = System.currentTimeMillis() / milliseconds * milliseconds + milliseconds
 
-    println("to:")
-    for (i <- 2 to 5){
-      println(i)
-    }
-    println("until:")
-    for (i <- 2 until 5){
-      println(i)
-    }
+    println(l)
+    println(sdf.format(l))
   }
 }

+ 19 - 0
data_transfer/pom.xml

@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>IOT</artifactId>
+        <groupId>org.example</groupId>
+        <version>1.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>data_transfer</artifactId>
+
+    <properties>
+        <maven.compiler.source>8</maven.compiler.source>
+        <maven.compiler.target>8</maven.compiler.target>
+    </properties>
+
+</project>

+ 46 - 0
data_transfer/src/main/scala/com/persagy/energy/app/File2FileApp.scala

@@ -0,0 +1,46 @@
+package com.persagy.energy.app
+
+import com.persagy.energy.bean.EnergyBean
+import com.persagy.energy.utils.DTUtils
+import net.minidev.json.JSONObject
+import net.minidev.json.parser.JSONParser
+
+import java.io.{File, FileWriter, PrintWriter}
+import java.lang.reflect.Field
+import scala.io.BufferedSource
+
+object File2FileApp {
+
+  val filePath: String = "/Users/king/Downloads/energy-1d.txt"
+  val outPath: String = "/Users/king/Downloads/energy-1d-to-hdfs1.txt"
+
+  def main(args: Array[String]): Unit = {
+
+    /** 从文件读取数据,逐条发送 */
+    val bufferedSource: BufferedSource = scala.io.Source.fromFile(filePath)
+    var count: Int = 0
+    val writer = new PrintWriter(outPath)
+    for (line <- bufferedSource.getLines()) {
+      count += 1
+      println(line)
+      val parser = new JSONParser(1)
+      val nObject: JSONObject = parser.parse(line).asInstanceOf[JSONObject]
+
+      val energyModelSign: String = nObject.get("energyModelSign").toString
+      val energyModelNodeSign: String = nObject.get("energyModelNodeSign").toString
+      val dataTime: String = DTUtils.timestampConverter("yyyyMMddHHmmss", nObject.get("data_time").toString, 0L).toString
+      val dataValue: String = nObject.get("data_value").toString
+      val building: String = nObject.get("building").toString
+
+      val result: String = energyModelSign + "\t" + energyModelNodeSign + "\t" + dataTime + "\t" + dataValue + "\t" + building
+
+      writer.println(result)
+
+      if (count == 1000){
+        writer.flush()
+      }
+    }
+    writer.flush()
+    writer.close()
+  }
+}

+ 10 - 0
data_transfer/src/main/scala/com/persagy/energy/app/File2HDFSApp.scala

@@ -0,0 +1,10 @@
+package com.persagy.energy.app
+
+object File2HDFSApp {
+
+  def main(args: Array[String]): Unit = {
+
+
+  }
+
+}

+ 9 - 0
data_transfer/src/main/scala/com/persagy/energy/bean/EnergyBean.scala

@@ -0,0 +1,9 @@
+package com.persagy.energy.bean
+
+case class EnergyBean(
+                       energyModelSign: String,
+                       energyModelNodeSign: String,
+                       dataTime: Long,
+                       dataValue: Long,
+                       building: String
+                     )

+ 34 - 0
data_transfer/src/main/scala/com/persagy/energy/sink/JDBCSink.scala

@@ -0,0 +1,34 @@
+package com.persagy.energy.sink
+
+import com.persagy.energy.bean.EnergyBean
+import org.apache.flink.configuration.Configuration
+import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
+
+import java.sql.{Connection, DriverManager, PreparedStatement}
+
+class JDBCSink extends RichSinkFunction[EnergyBean]{
+
+  var conn: Connection = _
+  var insertStmt: PreparedStatement = _
+  var updateStmt: PreparedStatement = _
+
+  // open 主要是创建连接
+  override def open(parameters: Configuration): Unit = {
+    super.open(parameters)
+
+    conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/test", "root", "123456")
+    insertStmt = conn.prepareStatement("INSERT INTO temperatures (energyModelSign, energyModelNodeSign, dataTime, dataValue, building) VALUES (?, ?, ?, ?, ?)")
+    updateStmt = conn.prepareStatement("UPDATE temperatures SET temp = ? WHERE sensor = ?")
+  }
+  // 调用连接,执行sql
+  override def invoke(value: EnergyBean, context: SinkFunction.Context[_]): Unit = {
+
+  }
+
+  override def close(): Unit = {
+    insertStmt.close()
+    updateStmt.close()
+    conn.close()
+  }
+
+}

+ 38 - 0
data_transfer/src/main/scala/com/persagy/energy/utils/DTUtils.scala

@@ -0,0 +1,38 @@
+package com.persagy.energy.utils
+
+import org.apache.flink.streaming.api.windowing.time.Time
+
+import java.text.SimpleDateFormat
+
+object DTUtils {
+
+  /**
+   * 分精度时间的转换
+   * @param time 要转换的时间
+   * @param intervalTime 间隔时间段
+   * @return
+   */
+  def accuracyTime(time: Long, intervalTime: Time): Long = {
+    val var1: Long = intervalTime.toMilliseconds
+    (time / var1 * var1 + var1)
+  }
+
+  /**
+   * 将时间字段转换成时间戳
+   * @param dateStr 时间字段
+   * @param formatPattern 要转换时间字段的格式
+   * @param offer 时差(单位:毫秒)
+   * @return
+   */
+  def timestampConverter(formatPattern: String, dateStr: String, offer: Long): Long = {
+    val simpleDateFormat = new SimpleDateFormat(formatPattern)
+    val timestamp = simpleDateFormat.parse(dateStr).getTime
+    (timestamp + offer)
+  }
+
+  /**
+   * 获取当前时间
+   * @return
+   */
+  def now(): Long = System.currentTimeMillis()
+}

+ 54 - 0
pom.xml

@@ -10,6 +10,7 @@
     <version>1.0-SNAPSHOT</version>
     <modules>
         <module>data_collect</module>
+        <module>data_transfer</module>
     </modules>
 
     <properties>
@@ -46,6 +47,30 @@
             <artifactId>mysql-connector-java</artifactId>
             <version>5.1.49</version>
         </dependency>
+
+
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-statebackend-rocksdb_2.11</artifactId>
+            <version>1.10.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-table-planner_2.11</artifactId>
+            <version>1.10.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-table-planner-blink_2.11</artifactId>
+            <version>1.10.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-csv</artifactId>
+            <version>1.10.0</version>
+        </dependency>
+
+
         <!-- https://mvnrepository.com/artifact/com.alibaba/druid -->
         <dependency>
             <groupId>com.alibaba</groupId>
@@ -71,6 +96,35 @@
             <version>1.3.1</version>
         </dependency>
 
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+            <version>5.1.44</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/net.minidev/json-smart -->
+        <dependency>
+            <groupId>net.minidev</groupId>
+            <artifactId>json-smart</artifactId>
+            <version>2.3</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-connector-filesystem_2.12</artifactId>
+            <version>${flink.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-connector-kafka-0.11_2.12</artifactId>
+            <version>${flink.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-streaming-java_2.12</artifactId>
+            <version>${flink.version}</version>
+        </dependency>
+
     </dependencies>
 
     <build>