赞
踩
- package com.fengrui
-
- import org.apache.spark.rdd.RDD
- import org.apache.spark.{SparkConf, SparkContext}
-
- /**
- * 给定一组键值对("spark",2),("hadoop",6),("javaee",3),("spark",4),("hadoop",8),("javaee",5)
- * 键值对的key表示图书名称,value表示某天图书销量,计算每种图书平均每天销量
- */
- object SellingBooks {
- def main(args: Array[String]): Unit = {
- val conf: SparkConf = new SparkConf().setAppName("SellingBooks").setMaster("local[*]")
- val sc = new SparkContext(conf)
- val books: RDD[(String, Int)] = sc.parallelize(Array(("spark",2),("hadoop",6),("javaee",3),("spark",4),("hadoop",8),("javaee",5)))
- //将RDD中每个元素变为("spark",(2,1)),("spark",(4,1)),("hadoop",(6,1))...,mapValues就是操作元素中的value
- val A: RDD[(String, (Int, Int))] = books.mapValues(x => (x,1))
- //根据reducebykey,把key相同的value(value-list)进行聚合,比如spark这本书,("spark",(6,2))
- val B: RDD[(String, (Int, Int))] = A.reduceByKey((x,y) => (x._1+y._1,x._2+y._2))
- //用value值中(value-list)x._1(图书销售总数)/x._2(天数)得出平均每天销售数量
- val avg: RDD[(String, Int)] = B.mapValues(x => (x._1 / x._2))
- //用foreach(action)将结果遍历打印出来
- avg.foreach(println)
- }
-
- }
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。