package com.cn.wordcount
import org.apache.spark.{SparkConf, SparkContext}
object Driver {def main(args: Array[String]): Unit = {val conf = new SparkConf().setMaster("local").setAppName("wordCount")
val sc = new SparkContext(conf)
val data = sc.textFile("hdfs://hadoop01:9000/data", 2)
val result=data.flatMap{line=>line.split(" ")}.map{word=>(word,1)}.reduceByKey((x,y)=>x+y)
// result.foreach(line=>println(line))
result.saveAsTextFile("hdfs://hadoop01:9000/result03")
}
}