十年网站开发经验 + 多家企业客户 + 靠谱的建站团队
量身定制 + 运营维护+专业推广+无忧售后,网站问题一站解决
object H extends App{
val conf=new SparkConf().setMaster("local[2]").setAppName("hello")
val ss=new StreamingContext(conf,Seconds(5))
val kafkaParams=Map[String,String]("metadata.broker.list"->"myhadoop1:9092")
ss.checkpoint("hdfs://myhadoop1:8020/data")
val topic=Set[String]("wordcount1")
//kafka
val lines=KafkaUtils.createDirectStream[String,String,StringDecoder,StringDecoder](ss,kafkaParams,topic)
lines.flatMap(_._2.split(" ")).map((_,1)).updateStateByKey((seqs:Seq[Int],option:Option[Int])=>{
var oldValue=option.getOrElse(0)
for(seq<-seqs){
oldValue+=seq
}
Option[Int](oldValue)
}).print()
ss.start()
ss.awaitTermination()
}