赞
踩
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-runtime-web_2.11</artifactId>
- <version>${flink.version}</version>
- <scope>${scope.type}</scope>
- </dependency>
-
- import org.apache.flink.api.common.eventtime.WatermarkStrategy
- import org.apache.flink.api.common.serialization.SimpleStringSchema
- import org.apache.flink.configuration.{Configuration, RestOptions}
- import org.apache.flink.connector.kafka.source.KafkaSource
- import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
- import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, createTypeInformation}
- import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
-
- object DataStreamReadKafka {
-
- def main(args: Array[String]): Unit = {
-
- // 创建带webui的本地执行环境
- val conf = new Configuration()
- conf.setString(RestOptions.BIND_PORT, "8081") // 指定访问端口
- val env = StreamExecutionEnvironment
- .createLocalEnvironmentWithWebUI(conf)
- env.setParallelism(1)
- val tab_env = StreamTableEnvironment.create(env)
-
- // 读取kafka数据
- val topics = "test,test_2".split(",").toList
- val source = KafkaSource
- .builder[String]
- .setBootstrapServers("localhost:9092")
- .setTopics(topics: _*)
- .setGroupId("shy_test")
- .setStartingOffsets(OffsetsInitializer.earliest)
- .setValueOnlyDeserializer(new SimpleStringSchema())
- .build
- val input_stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka_source")
- input_stream.print()
-
- // 启动执行环境
- env.execute()
-
- }
-
- }

注意: 每个任务指定一个端口访问webUI
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。