赞
踩
通过 FlinkCDC 实现 MySQL 数据库、表的实时变化监控,这里只把变化打印了出来,后面会实现如何再写入其他 MySQL 库中;
在 my.cnf 中开启 binlog,我这里指定了 test 库,然后重启 MySQL
- server.id=1
- log-bin=mysql-bin
- binlog-do-db=test
- mysql> create database test;
- mysql> create table user_info(id int unsigned not null auto_increment primary key, username varchar(60), sex tinyint(1), nickname varchar(60), addr varchar(255))ENGINE=InnoDB default charset=utf8mb4;
在 IDEA 中新建工程 flinkcdc
pom.xml
- <?xml version="1.0" encoding="UTF-8"?>
- <project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <groupId>com.zsoft.flinkcdc</groupId>
- <artifactId>flinkcdc</artifactId>
- <version>1.0-SNAPSHOT</version>
-
- <properties>
- <maven.compiler.source>8</maven.compiler.source>
- <maven.compiler.target>8</maven.compiler.target>
- <flink.version>1.13.1</flink.version>
- </properties>
-
- <dependencies>
- <!-- FlinkCDC DataStream 方式 -->
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-java</artifactId>
- <version>${flink.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-streaming-java_2.12</artifactId>
- <version>${flink.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-clients_2.12</artifactId>
- <version>${flink.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-client</artifactId>
- <version>3.1.3</version>
- </dependency>
-
- <dependency>
- <groupId>mysql</groupId>
- <artifactId>mysql-connector-java</artifactId>
- <version>8.0.22</version>
- </dependency>
-
- <dependency>
- <groupId>com.alibaba.ververica</groupId>
- <artifactId>flink-connector-mysql-cdc</artifactId>
- <version>1.4.0</version>
- </dependency>
-
- <dependency>
- <groupId>com.alibaba</groupId>
- <artifactId>fastjson</artifactId>
- <version>1.2.75</version>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>3.0.0</version>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
- </project>

resources/log4j.properties
- log4j.rootLogger=warn,stdout
- log4j.appender.stdout=org.apache.log4j.ConsoleAppender
- log4j.appender.stdout.target=System.out
- log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
- log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
反序列化类:
com/zsoft/flinkcdc/MyDeserializationSchema.java
- package com.zsoft.flinkcdc;
-
- import com.alibaba.fastjson.JSONObject;
- import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
- import io.debezium.data.Envelope;
- import org.apache.flink.api.common.typeinfo.TypeInformation;
- import org.apache.flink.util.Collector;
- import org.apache.kafka.connect.data.Field;
- import org.apache.kafka.connect.data.Struct;
- import org.apache.kafka.connect.source.SourceRecord;
-
- public class MyDeserializationSchema implements DebeziumDeserializationSchema<String> {
- @Override
- public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
- Struct valueStruct = (Struct) sourceRecord.value();
- Struct sourceStruct = valueStruct.getStruct("source");
-
- // 获取数据库的名称
- String database = sourceStruct.getString("db");
- // 获取表名
- String table = sourceStruct.getString("table");
-
- // 获取类型( c -> insert, u -> update)
- String type = Envelope.operationFor(sourceRecord).toString().toLowerCase();
- if(type.equals("create")){
- type = "insert";
- }
- JSONObject jsonObj = new JSONObject();
- jsonObj.put("database",database);
- jsonObj.put("table", table);
- jsonObj.put("type", type);
-
- // 获取数据 data
- Struct afterStruct = valueStruct.getStruct("after");
- JSONObject dataJsonObj = new JSONObject();
- if(afterStruct != null) {
- for(Field field : afterStruct.schema().fields()) {
- String fieldName = field.name();
- Object fieldValue = afterStruct.get(field);
- dataJsonObj.put(fieldName, fieldValue);
- }
- }
- jsonObj.put("data", dataJsonObj);
-
- collector.collect(jsonObj.toJSONString());
- }
-
- @Override
- public TypeInformation<String> getProducedType() {
- return TypeInformation.of(String.class);
- }
- }

主类:
com/zsoft/flinkcdc/FlinkCdcDataStream.java
- package com.zsoft.flinkcdc;
-
- import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
- import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
- import com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
- import org.apache.flink.api.common.restartstrategy.RestartStrategies;
- import org.apache.flink.runtime.state.filesystem.FsStateBackend;
- import org.apache.flink.streaming.api.CheckpointingMode;
- import org.apache.flink.streaming.api.datastream.DataStreamSource;
- import org.apache.flink.streaming.api.environment.CheckpointConfig;
- import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
- import org.apache.flink.streaming.api.functions.source.SourceFunction;
-
- import java.util.Properties;
-
- public class FlinkCdcDataStream {
- public static void main(String[] args) throws Exception {
- // TODO 1. 准备流处理环境
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- env.setParallelism(1);
-
- // TODO 2. 开启检查点
- // 2.1 开启 Checkpoint
- env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
- // 2.2 设置超时时间
- env.getCheckpointConfig().setCheckpointTimeout(60000);
- // 2.3 指定从 CK 自动重启策略
- env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 6000L));
- // 2.4 设置任务关闭时候保留最后一次 CK 数据
- env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
- // 2.5 设置状态后端
- env.setStateBackend(new FsStateBackend("hdfs://s1:8020/flinkCDC_DS"));
- // 2.6 设置访问 HDFS 的用户名
- System.setProperty("HADOOP_USER_NAME", "hadoop");
-
- // TODO 3. 创建 Flink-MySQL-CDC 的 Source
-
- Properties props = new Properties();
- props.setProperty("scan.startup.mode", "initial");
- SourceFunction<String> sourceFunction = MySQLSource.<String>builder()
- .hostname("s1")
- .port(3306)
- .username("root")
- .password("123456")
- .databaseList("test")
- .tableList("test.user_info")
- .startupOptions(StartupOptions.earliest())
- .debeziumProperties(props)
- .deserializer(new MyDeserializationSchema())
- .build();
-
-
- // TODO 4. 使用 CDC Source 从 MySQL 读取数据
- DataStreamSource<String> mysqlDS = env.addSource(sourceFunction).setParallelism(1);
-
- // TODO 5. 打印输出
- mysqlDS.print();
-
- // TODO 6. 执行任务
- env.execute();
- }
- }

在 IDEA 中打包项目 package
将生成的 flinkcdc-1.0-SNAPSHOT-jar-with-dependencies.jar 通过 Flink 的 webUI 上传
在 Flink 的 WebUI 中上传 jar 包
Submit New Job 页面点击 + Add New 按钮
上传后的 jar 包下填入:
点击 ”Submit“ 提交应用
此时在 MySQL 中插入如下数据:
mysql> insert into user_info values(null, 'zhangsan', 1, 'zhs','beijing');
mysql> insert into user_info values(null, 'lisi', 1, 'ls','shanghai');
mysql> insert into user_info values(null, 'wangwu', 1, 'ww','wangwu');
在 Flink 的 webUI 中 Task Managers 中点击项目,在 Stdout 中有输出日志:
- {"database":"test","data":{"sex":1,"nickname":"zhs","id":1,"addr":"beijing","username":"zhangsan"},"type":"insert","table":"user_info"}
- {"database":"test","data":{"sex":1,"nickname":"ls","id":2,"addr":"shanghai","username":"lisi"},"type":"insert","table":"user_info"}
- {"database":"test","data":{"sex":1,"nickname":"ww","id":3,"addr":"wangwu","username":"wangwu"},"type":"insert","table":"user_info"}
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。