当前位置:   article > 正文

flink如何写入es_flink 插入到es中代码

flink 插入到es中代码

提示:文章写完后,目录可以自动生成,如何生成可参考右边的帮助文档


前言

Flink sink 流数据写入到es5和es7的简单示例。


一、写入到Elasticsearch5

  • pom maven依赖
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-elasticsearch5_2.11</artifactId>
            <version>${flink.version}</version>
        </dependency>
  • 1
  • 2
  • 3
  • 4
  • 5
  • 代码如下(示例):
public class Es5SinkDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Row row=Row.of("张三","001",getTimestamp("2016-10-24 21:59:06"));
        Row row2=Row.of("张三","002",getTimestamp("2016-10-24 21:50:06"));
        Row row3=Row.of("张三","002",getTimestamp("2016-10-24 21:51:06"));
        Row row4=Row.of("李四","003",getTimestamp("2016-10-24 21:50:56"));
        Row row5=Row.of("李四","004",getTimestamp("2016-10-24 00:48:36"));
        Row row6=Row.of("王五","005",getTimestamp("2016-10-24 00:48:36"));
        DataStreamSource<Row> source =env.fromElements(row,row2,row3,row4,row5,row6);

        Map<String, String> config = new HashMap<>();
//        config.put("cluster.name", "my-cluster-name");
//        config.put("bulk.flush.max.actions", "1");

        List<InetSocketAddress> transportAddresses = new ArrayList<>();

        transportAddresses.add(new InetSocketAddress(InetAddress.getByName("10.68.8.60"), 9300));
        //Sink操作
        DataStreamSink<Row> rowDataStreamSink = source.addSink(new ElasticsearchSink<>(config, transportAddresses, new ElasticsearchSinkFunction<Row>() {
            public IndexRequest createIndexRequest(Row element) {
                Map<String, Object> json = new HashMap<>();
                json.put("name22", element.getField(0).toString());
                json.put("no22", element.getField(1));
                json.put("age", 34);
                json.put("create_time", element.getField(2));

                return Requests.indexRequest()
                        .index("cc")
                        .type("mtype")
                        .id(element.getField(1).toString())
                        .source(json);
            }

            @Override
            public void process(Row element, RuntimeContext ctx, RequestIndexer indexer) {
                //利用requestIndexer进行发送请求,写入数据
                indexer.add(createIndexRequest(element));
            }
        }));
        env.execute("es demo");
    }
    private static java.sql.Timestamp getTimestamp(String str) throws Exception {
//		String string = "2016-10-24 21:59:06";
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        java.util.Date date=sdf.parse(str);
        java.sql.Timestamp s = new java.sql.Timestamp(date.getTime());
        return s;
    }
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49

二、写入到Elasticsearch7

  • pom maven依赖
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-elasticsearch7_2.11</artifactId>
            <version>${flink.version}</version>
            <scope>provided</scope>
        </dependency>

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 代码如下(示例):
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink;
import org.apache.flink.types.Row;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class EsSinkDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Row row=Row.of("张三","001",getTimestamp("2016-10-24 21:59:06"));
        Row row2=Row.of("张三","002",getTimestamp("2016-10-24 21:50:06"));
        Row row3=Row.of("张三","002",getTimestamp("2016-10-24 21:51:06"));
        Row row4=Row.of("李四","003",getTimestamp("2016-10-24 21:50:56"));
        Row row5=Row.of("李四","004",getTimestamp("2016-10-24 00:48:36"));
        Row row6=Row.of("王五","005",getTimestamp("2016-10-24 00:48:36"));
        DataStreamSource<Row> source =env.fromElements(row,row2,row3,row4,row5,row6);

        Map<String, String> config = new HashMap<>();
//        config.put("cluster.name", "my-cluster-name");
// This instructs the sink to emit after every element, otherwise they would be buffered
//        config.put("bulk.flush.max.actions", "1");

        List<HttpHost> hosts = new ArrayList<>();
        hosts.add(new HttpHost("10.68.8.69",9200,"http"));

        ElasticsearchSink.Builder<Row> esSinkBuilder = new ElasticsearchSink.Builder<Row>(hosts,new ElasticsearchSinkFunction<Row>() {
            public IndexRequest createIndexRequest(Row element) {
                Map<String, Object> json = new HashMap<>();
                json.put("name22", element.getField(0).toString());
                json.put("no22", element.getField(1));
                json.put("age", 34);
//                json.put("create_time", element.getField(2));

                return Requests.indexRequest()
                        .index("cc")
                        .id(element.getField(1).toString())
                        .source(json);
            }

            @Override
            public void process(Row element, RuntimeContext ctx, RequestIndexer indexer) {
                //利用requestIndexer进行发送请求,写入数据
                indexer.add(createIndexRequest(element));
            }
        });
        esSinkBuilder.setBulkFlushMaxActions(100);
        //Sink操作
        source.addSink(esSinkBuilder.build());

        env.execute("es demo");
    }

    private static java.sql.Timestamp getTimestamp(String str) throws Exception {
//		String string = "2016-10-24 21:59:06";
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        java.util.Date date=sdf.parse(str);
        java.sql.Timestamp s = new java.sql.Timestamp(date.getTime());
        return s;
    }
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71

总结

flink写入es5和es7 的区别是引入不同的flink-connector-elasticsearch,es7已没有type的概念故无需再设置type。

声明:本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:【wpsshop博客】
推荐阅读
相关标签
  

闽ICP备14008679号