赞
踩
pom.xml配置:
<properties> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> <scala.version>2.11.8</scala.version> <spark.version>2.2.1</spark.version> <hadoop.version>2.7.5</hadoop.version> <encoding>UTF-8</encoding> </properties> <dependencies> <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-library</artifactId> <version>${ scala.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming_2.11</artifactId> <version>${ spark.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming-kafka-0-10_2.11</artifactId> <version>${ spark.version}</version> </dependency> </dependencies> <build> <pluginManagement> <plugins> <plugin> <groupId>net.alchim31.maven</groupId> <artifactId>scala-maven-plugin</artifactId> <version>3.2.2</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>3.5.1</version> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>net.alchim31.maven</groupId> <artifactId>scala-maven-plugin</artifactId> <executions> <execution> <id>scala-compile-first</id> <phase>process-resources</phase> <goals> <goal>add-source</goal> <goal>compile</goal> </goals> </execution> <execution> <id>scala-test-compile</id> <phase>process-test-resources</phase> <goals> <goal>testCompile</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <executions> <execution> <phase>compile</phase> <goals> <goal>compile</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <version>2.4.3</version> <executions> <execution> <phase>package</phase> <goals> <goal>shade</goal> </goals> <configuration> <filters> <filter> <artifact>*:*</artifact> <excludes> <exclude>META-INF/*.SF</exclude> <exclude>META-INF/*.DSA</exclude> <exclude>META-INF/*.RSA</exclude> </excludes> </filter> </filters> </configuration> </execution> </executions> </plugin> </plugins> </build>
object WordCount { def main(args: Array[String]): Unit = { //步骤一:初始化程序入口 val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount") val ssc = new StreamingContext(conf, Seconds(1)) //步骤二:获取数据流 val lines = ssc.socketTextStream("localhost", 9999) //步骤三:数据处理 val words = lines.flatMap(_.split(" ")) val pairs = words.map(word => (word, 1)) val wordCounts = pairs.reduceByKey(_ + _) //步骤四: 数据输出 wordCounts.print() //步骤五:启动任务 ssc.start() ssc.awaitTermination() ssc.stop() } }
/** * 单词统计 */ public class WordCount { public static void main(String[] args) throws Exception{ //步骤一:初始化程序入口 SparkConf conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount"); JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1)); //步骤二:获取数据源 JavaReceiverInputDStream<String> lines = jssc.socketTextStream("10.148.15.10", 9999); //步骤三:数据处理 JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(x.split(" ")).iterator()); JavaPairDStream<String, Integer> pairs = words.mapToPair(s -> new Tuple2<>(s, 1)); JavaPairDStream<String, Integer> wordCounts = pairs.reduceByKey((i1, i2) -> i1 + i2); //步骤四:数据输出 wordCounts.print(); //步骤五:启动程序 jssc.start(); jssc.awaitTermination(); jssc.stop(); } }
/** * HDFS 数据源 */ object WordCountForHDFSSource { def main(args: Array[String]): Unit = { //步骤一:初始化程序入口 val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount") val ssc = new StreamingContext(conf, Seconds(1)) //步骤二:获取数据流 val lines = ssc.textFileStream("/tmp"); //步骤三:数据处理 val words = lines.flatMap(_.split(" ")) val pairs = words.map(word => (word, 1)) val wordCounts = pairs.reduceByKey(_ + _) //步骤四: 数据输出 wordCounts.print() //步骤五:启动任务 ssc.start() ssc.awaitTermination() ssc.stop() } }
/**
* 自定义一个Receiver,这个Receiver从socket中接收数据
* 接收过来的数据解析成以 \n 分隔开的text
使用方式:nc -lk 9999
*/
object CustomReceiver {
def main(args
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。