标签:spark countbykey
package com.latrobe.spark import org.apache.spark.{SparkContext, SparkConf} /** * Created by spark on 15-1-18. * 计算出每个key对应的value的数量 */ object CountByKey { def main(args: Array[String]) { val conf = new SparkConf().setAppName("spark-demo").setMaster("local") val sc = new SparkContext(conf) /** * 引入implicit def rddToPairRDDFunctions,目的是使用countByKey方法 */ import org.apache.spark.SparkContext._ val c = sc.parallelize(List((1,"java"),(2,"c"),(3,"c++"),(3,"iOS"))) //打印结果Map(2 -> 1, 1 -> 1, 3 -> 2) println(c.countByKey()) } }
spark rddToPairRDDFunctions countByKey
标签:spark countbykey
原文地址:http://blog.csdn.net/hi_1234567/article/details/42835561