赞
踩
unzip flinkx-1.10.zip -d /usr/local/soft/
chmod a+x flinkx
vim flinkconf/flink-conf.yaml
## web服务端口,不指定的话会随机生成一个
rest.bind-port: 8888
{ "job": { "content": [ { "reader": { "parameter": { "username": "root", "password": "123456", "connection": [ { "jdbcUrl": [ "jdbc:mysql://master:3306/student" ], "table": [ "student" ] } ], "column": [ "*" ], "customSql": "", "where": "clazz = '理科二班'", "splitPk": "", "queryTimeOut": 1000, "requestAccumulatorInterval": 2 }, "name": "mysqlreader" }, "writer": { "name": "hdfswriter", "parameter": { "path": "hdfs://master:9000/data/flinkx/student", "defaultFS": "hdfs://master:9000", "column": [ { "name": "col1", "index": 0, "type": "string" }, { "name": "col2", "index": 1, "type": "string" }, { "name": "col3", "index": 2, "type": "string" }, { "name": "col4", "index": 3, "type": "string" }, { "name": "col5", "index": 4, "type": "string" }, { "name": "col6", "index": 5, "type": "string" } ], "fieldDelimiter": ",", "fileType": "text", "writeMode": "overwrite" } } } ], "setting": { "restore": { "isRestore": false, "isStream": false }, "errorLimit": {}, "speed": { "channel": 1 } } } }
flinkx -mode local -job /usr/local/soft/flinkx-1.10/jsonConf/mysqlToHDFS.json -pluginRoot /usr/local/soft/flinkx-1.10/syncplugins/ -flinkconf /usr/local/soft/flinkx-1.10/flinkconf/
flinkx 任务启动后,会在执行命令的目录下生成一个nohup.out文件
tail -f nohup.out
http://master:8888
{ "job": { "content": [ { "reader": { "parameter": { "username": "root", "password": "123456", "connection": [ { "jdbcUrl": [ "jdbc:mysql://master:3306/student" ], "table": [ "student" ] } ], "column": [ "*" ], "customSql": "", "where": "clazz < '文科二班'", "splitPk": "", "queryTimeOut": 1000, "requestAccumulatorInterval": 2 }, "name": "mysqlreader" }, "writer": { "name": "hivewriter", "parameter": { "jdbcUrl": "jdbc:hive2://master:10000/testflinkx", "username": "", "password": "", "fileType": "text", "fieldDelimiter": ",", "writeMode": "overwrite", "compress": "", "charsetName": "UTF-8", "maxFileSize": 1073741824, "tablesColumn": "{\"student\":[{\"key\":\"id\",\"type\":\"string\"},{\"key\":\"name\",\"type\":\"string\"},{\"key\":\"age\",\"type\":\"string\"}]}", "defaultFS": "hdfs://master:9000" } } } ], "setting": { "restore": { "isRestore": false, "isStream": false }, "errorLimit": {}, "speed": { "channel": 1 } } } }
create database testflinkx;
use testflinkx;
CREATE TABLE `student`(
`id` string,
`name` string,
`age` string)
PARTITIONED BY (
`pt` string)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ',';
# 第一种方式:
hiveserver2
# 第二种方式:
hive --service hiveserver2
flinkx -mode local -job /usr/local/soft/flinkx-1.10/jsonConf/mysqlToHive.json -pluginRoot /usr/local/soft/flinkx-1.10/syncplugins/ -flinkconf /usr/local/soft/flinkx-1.10/flinkconf/
{ "job": { "content": [ { "reader": { "parameter": { "username": "root", "password": "123456", "connection": [ { "jdbcUrl": [ "jdbc:mysql://master:3306/student" ], "table": [ "score" ] } ], "column": [ "*" ], "customSql": "", "splitPk": "", "queryTimeOut": 1000, "requestAccumulatorInterval": 2 }, "name": "mysqlreader" }, "writer": { "name": "hbasewriter", "parameter": { "hbaseConfig": { "hbase.zookeeper.property.clientPort": "2181", "hbase.rootdir": "hdfs://master:9000/hbase", "hbase.cluster.distributed": "true", "hbase.zookeeper.quorum": "master,node1,node2", "zookeeper.znode.parent": "/hbase" }, "table": "testFlinkx", "rowkeyColumn": "$(info:student_id)_$(info:course_id)", "column": [ { "name": "info:student_id", "type": "string" }, { "name": "info:course_id", "type": "string" }, { "name": "info:score", "type": "string" } ] } } } ], "setting": { "restore": { "isRestore": false, "isStream": false }, "errorLimit": {}, "speed": { "channel": 1 } } } }
create 'testFlinkx','info'
flinkx -mode local -job /usr/local/soft/flinkx-1.10/jsonConf/mysqlToHBase.json -pluginRoot /usr/local/soft/flinkx-1.10/syncplugins/ -flinkconf /usr/local/soft/flinkx-1.10/flinkconf/
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。