当前位置:   article > 正文

云计算于大数据入门实验三——熟悉常用的 HBase 操作

熟悉常用的hbase操作

云计算于大数据入门实验三——熟悉常用的 HBase 操作

实验目的

理解HBase在Hadoop体系结构中的角色

熟练使用HBase操作常用的shell命令

熟悉HBase操作常用的Java API

实验要求

  1. 保存程序,并自行存档

  2. 最终的程序都必须经过测试,验证是正确的

  3. 按照实验报告格式,认真记录实验过程及结果,回答实验报告中的问题。实验报告模板在学习通的资料里面下载。学生提交的实验报告需转换成PDF文件提交

image

实验步骤

Hbase 常用命令

  1. 在 Hbase 中建表
create 'student','Sname','Ssex','Sage','Sdept','course'

image

  1. 查看表结构
describe 'student'

image

  1. 添加数据
  1. put 'student', '95001','Sname','LiYing'
  2. put 'student','95001','course:math','80'

image

  1. 查看数据
get 'student','95001'

image

  1. 删除数据
  1. delete 'student','95001','Sname:firstName'
  2. deleteall 'student','95001'

image

  1. 删除表
  1. disable 'student' #让表不可用
  2. drop 'student' #删除表

image

  1. 查询历史数据
  1. create 'teacher',{NAME=>'username',VERSIONS=>5}
  2. put 'teacher','91001','username','Mary'
  3. put 'teacher','91001','username','Mary1'
  4. put 'teacher','91001','username','Mary2'
  5. put 'teacher','91001','username','Mary3'
  6. put 'teacher','91001','username','Mary4'
  7. put 'teacher','91001','username','Mary5'
  8. get 'teacher','91001',{COLUMN=>'username',VERSIONS=>3}

image

  1. 退出 HBase
exit

image

编程实践

例子:创建表,插入数据,查看表中数据

  1. import org.apache.hadoop.conf.Configuration;
  2. import org.apache.hadoop.hbase.*;
  3. import org.apache.hadoop.hbase.client.*;
  4. import org.apache.hadoop.hbase.util.Bytes;
  5. import java.io.IOException;
  6. public class ExampleForHBase {
  7. public static Configuration configuration;
  8. public static Connection connection;
  9. public static Admin admin;
  10. public static void main(String[] args)throws IOException{
  11. init();    //主要操作就是为了连接到数据库hbase
  12. createTable("student",new String[]{"score"});    //创建表,shell命令:create '表名','列族名1','列族名2','列族名3' ...
  13. insertData("student","zhangsan","score","English","69"); //shell命令: put 'student','张三','score:English','69'
  14. insertData("student","zhangsan","score","Math","86");
  15. insertData("student","zhangsan","score","Computer","77");
  16. getData("student", "zhangsan", "score","English");
  17. close();
  18. }
  19. public static void init(){
  20. configuration = HBaseConfiguration.create();
  21. configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase");
  22. try{
  23. connection = ConnectionFactory.createConnection(configuration);
  24. admin = connection.getAdmin();
  25. }catch (IOException e){
  26. e.printStackTrace();
  27. }
  28. }
  29. public static void close(){
  30. try{
  31. if(admin != null){
  32. admin.close();
  33. }
  34. if(null != connection){
  35. connection.close();
  36. }
  37. }catch (IOException e){
  38. e.printStackTrace();
  39. }
  40. }
  41. public static void createTable(String myTableName,String[] colFamily) throws IOException {
  42. TableName tableName = TableName.valueOf(myTableName);
  43. if(admin.tableExists(tableName)){
  44. System.out.println("talbe is exists!");
  45. }else {
  46. TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
  47. for(String str:colFamily){
  48. ColumnFamilyDescriptor family =
  49. ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(str)).build();
  50. tableDescriptor.setColumnFamily(family);
  51. }
  52. admin.createTable(tableDescriptor.build());
  53. }
  54. }
  55. public static void insertData(String tableName,String rowKey,String colFamily,String col,String val) throws IOException {
  56. Table table = connection.getTable(TableName.valueOf(tableName));
  57. Put put = new Put(rowKey.getBytes());
  58. put.addColumn(colFamily.getBytes(),col.getBytes(), val.getBytes());
  59. table.put(put);
  60. table.close();
  61. }
  62. public static void getData(String tableName,String rowKey,String colFamily, String col)throws IOException{
  63. Table table = connection.getTable(TableName.valueOf(tableName));
  64. Get get = new Get(rowKey.getBytes());
  65. get.addColumn(colFamily.getBytes(),col.getBytes());
  66. Result result = table.get(get);
  67. System.out.println(new String(result.getValue(colFamily.getBytes(),col==null?null:col.getBytes())));
  68. table.close();
  69. }
  70. }

image

实验三 熟悉常用的HBase 操作

编程实现以下指定功能,并用 Hadoop 提供的 HBase Shell 命令完成相同任务:

  • 列出 HBase 所有的表的相关信息,例如表名;

  • 在终端打印出指定的表的所有记录数据;

  • 向已经创建好的表添加和删除指定的列族或列;

  • 清空指定的表的所有记录数据;

  • 统计表的行数。

列出 HBase 所有的表的相关信息,例如表名:

  1. /**
  2. * 同样是正常的建立 数据库连接,执行操作,然后最后关闭连接
  3. * 重点是:HTableDescriptor hTableDescriptors[] = admin.listTables(); 获取到 表格列表,然后遍历
  4. */
  5. import java.io.IOException;
  6. import org.apache.hadoop.conf.Configuration;
  7. import org.apache.hadoop.hbase.*;
  8. import org.apache.hadoop.hbase.client.*;
  9. import java.io.IOException;
  10. public class Test_1 {
  11. public static Configuration configuration;
  12. public static Connection connection;
  13. public static Admin admin;
  14. /**
  15. * 建立连接
  16. */
  17. public static void init() {
  18. configuration = HBaseConfiguration.create();
  19. configuration.set("hbase.rootdir", "hdfs://localhost:9000/hbase");
  20. try {
  21. connection = ConnectionFactory.createConnection(configuration);
  22. admin = connection.getAdmin();
  23. } catch (IOException e) {
  24. e.printStackTrace();
  25. }
  26. }
  27. /**
  28. * 关闭连接
  29. */
  30. public static void close() {
  31. try {
  32. if (admin != null) {
  33. admin.close();
  34. }
  35. if (null != connection) {
  36. connection.close();
  37. }
  38. } catch (IOException e) {
  39. e.printStackTrace();
  40. }
  41. }
  42. /**
  43. *
  44. * 查看已有表,通过方法listTables()
  45. *
  46. * @throws IOException
  47. *
  48. */
  49. public static void listTables() throws IOException {
  50. init();
  51. HTableDescriptor hTableDescriptors[] = admin.listTables();
  52. for (HTableDescriptor hTableDescriptor : hTableDescriptors) {
  53. System.out.println(hTableDescriptor.getNameAsString());
  54. }
  55. close();
  56. }
  57. public static void main(String[] args) {
  58. Test_1 t = new Test_1();
  59. try {
  60. System.out.println("以下为Hbase 数据库中所存的表信息");
  61. t.listTables();
  62. } catch (IOException e) {
  63. e.printStackTrace();
  64. }
  65. }
  66. }

image

在终端打印出指定的表的所有记录数据;

  1. /**
  2. * 同样是正常的建立 数据库连接,执行操作,然后最后关闭连接
  3. * 重点是:
  4. * Table table = connection.getTable(TableName.valueOf(tableName));获取到表格对象
  5. * Scan scan = new Scan(); ResultScanner scanner = table.getScanner(scan); 然后通过Scanner对象,获取到ResultScanner扫描结果对象,遍历输出
  6. */
  7. import java.io.IOException;
  8. import org.apache.hadoop.conf.Configuration;
  9. import org.apache.hadoop.hbase.*;
  10. import org.apache.hadoop.hbase.client.*;
  11. import java.io.IOException;
  12. import java.util.Scanner;
  13. public class Test_2 {
  14. public static Configuration configuration;
  15. public static Connection connection;
  16. public static Admin admin;
  17. // 建立连接
  18. public static void init() {
  19. configuration = HBaseConfiguration.create();
  20. configuration.set("hbase.rootdir", "hdfs://localhost:9000/hbase");
  21. try {
  22. connection = ConnectionFactory.createConnection(configuration);
  23. admin = connection.getAdmin();
  24. } catch (IOException e) {
  25. e.printStackTrace();
  26. }
  27. }
  28. // 关闭连接
  29. public static void close() {
  30. try {
  31. if (admin != null) {
  32. admin.close();
  33. }
  34. if (null != connection) {
  35. connection.close();
  36. }
  37. } catch (IOException e) {
  38. e.printStackTrace();
  39. }
  40. }
  41. /**
  42. *
  43. * 根据表名查找表信息
  44. *
  45. */
  46. public static void getData(String tableName) throws IOException {
  47. init();
  48. Table table = connection.getTable(TableName.valueOf(tableName));
  49. Scan scan = new Scan();
  50. ResultScanner scanner = table.getScanner(scan);
  51. for (Result result : scanner)
  52. {
  53. showCell((result));
  54. }
  55. close();
  56. }
  57. /**
  58. *
  59. * 格式化输出
  60. *
  61. * @param result
  62. *
  63. */
  64. public static void showCell(Result result) {
  65. Cell[] cells = result.rawCells();
  66. for (Cell cell : cells) {
  67. System.out.println("RowName(行键):" + new String(CellUtil.cloneRow(cell)) + " ");
  68. System.out.println("Timetamp(时间戳):" + cell.getTimestamp() + " ");
  69. System.out.println("column Family(列簇):" + new String(CellUtil.cloneFamily(cell)) + " ");
  70. System.out.println("column Name(列名):" + new String(CellUtil.cloneQualifier(cell)) + " ");
  71. System.out.println("value:(值)" + new String(CellUtil.cloneValue(cell)) + " ");
  72. System.out.println();
  73. }
  74. }
  75. public static void main(String[] args) throws IOException {
  76. // TODO Auto-generated method stub
  77. Test_2 t = new Test_2();
  78. System.out.println("请输入要查看的表名");
  79. Scanner scan = new Scanner(System.in);
  80. String tableName = scan.nextLine();
  81. System.out.println("信息如下:");
  82. t.getData(tableName);
  83. }
  84. }

image

向已经创建好的表添加和删除指定的列族或列:

  1. put 'student','95003','Sname','wangjinxuan'     (添加列)
  2. put 'student','95003','Sname:nickName','wang'    (添加列族)
  3. put 'student','95003','Sname:firstName','jinxuan' (添加列族)
  4. put的反向操作的delete:
  5. delete 'student' ,’95003’,’Sname’
  6. delete 'student' ,’95003’,’Sname:nickName’
  7. deleteall 'student' ,’95003’  (删除整个行记录)

image

image

image

image

  1. /**
  2. * hbase只关注rowkey,column Family(列族),并没有说在创建表的时候指定cq(列限定修饰符)有多少,这也是hbase列式存储的特点,
  3. * 所以在hbase API中是没有提供delete 一个列下的所有数据的
  4. *
  5. * 同样是正常的建立 数据库连接,执行操作,然后最后关闭连接
  6. * 1,Table table = connection.getTable(TableName.valueOf(tableName)); 先获取到表
  7. * 2,插入:(① 创建Put对象,② 然后通过方法 addColumn将列、列限定符、值 放到put对象,③ 最后将put对象put到表格)
  8. * Put put = new Put(rowKey.getBytes());
  9. * put.addColumn(colFamily.getBytes(), col.getBytes(), val.getBytes());
  10. * table.put(put);
  11. * 3,删除:
  12. * Table table = connection.getTable(TableName.valueOf(tableName)); 同样首先获取到表
  13. * Delete delete = new Delete(rowKey.getBytes()); //通过传入行键,new一个删除对象
  14. * //删除对象添加要被删除的列或列族
  15. * ① 删除指定列族的所有数据(此情况是列族下无列限定符时的情况):delete.addFamily(colFamily.getBytes());
  16. * ② 删除指定列的数据(此列主要说的是列限定修饰符):delete.addColumn(colFamily.getBytes(), col.getBytes());
  17. * table.delete(delete); //最后就是表格delete掉 delete对象
  18. */
  19. import java.io.IOException;
  20. import java.util.Scanner;
  21. import org.apache.hadoop.conf.Configuration;
  22. import org.apache.hadoop.hbase.Cell;
  23. import org.apache.hadoop.hbase.CellUtil;
  24. import org.apache.hadoop.hbase.HBaseConfiguration;
  25. import org.apache.hadoop.hbase.TableName;
  26. import org.apache.hadoop.hbase.client.Admin;
  27. import org.apache.hadoop.hbase.client.Connection;
  28. import org.apache.hadoop.hbase.client.ConnectionFactory;
  29. import org.apache.hadoop.hbase.client.Delete;
  30. import org.apache.hadoop.hbase.client.Put;
  31. import org.apache.hadoop.hbase.client.Result;
  32. import org.apache.hadoop.hbase.client.ResultScanner;
  33. import org.apache.hadoop.hbase.client.Scan;
  34. import org.apache.hadoop.hbase.client.Table;
  35. public class Test_3 {
  36. public static Configuration configuration;
  37. public static Connection connection;
  38. public static Admin admin;
  39. // 建立连接
  40. public static void init() {
  41. configuration = HBaseConfiguration.create();
  42. configuration.set("hbase.rootdir", "hdfs://localhost:9000/hbase");
  43. try {
  44. connection = ConnectionFactory.createConnection(configuration);
  45. admin = connection.getAdmin();
  46. } catch (IOException e) {
  47. e.printStackTrace();
  48. }
  49. }
  50. // 关闭连接
  51. public static void close() {
  52. try {
  53. if (admin != null) {
  54. admin.close();
  55. }
  56. if (null != connection) {
  57. connection.close();
  58. }
  59. } catch (IOException e) {
  60. e.printStackTrace();
  61. }
  62. }
  63. /**
  64. * 向某一行的某一列插入数据
  65. *
  66. * @param tableName 表名
  67. * @param rowKey 行键
  68. * @param colFamily 列族名
  69. * @param col 列名(如果其列族下没有子列,此参数可为空)
  70. * @param val 值
  71. * @throws IOException
  72. */
  73. public static void insertRow(String tableName, String rowKey, String colFamily, String col, String val)throws IOException {
  74. init();
  75. Table table = connection.getTable(TableName.valueOf(tableName));
  76. Put put = new Put(rowKey.getBytes());
  77. put.addColumn(colFamily.getBytes(), col.getBytes(), val.getBytes());
  78. table.put(put);
  79. table.close();
  80. close();
  81. }
  82. /**
  83. * 根据表名查找表信息
  84. */
  85. public static void getData(String tableName) throws IOException {
  86. init();
  87. Table table = connection.getTable(TableName.valueOf(tableName));
  88. Scan scan = new Scan();
  89. ResultScanner scanner = table.getScanner(scan);
  90. for (Result result : scanner) {
  91. showCell((result));
  92. }
  93. close();
  94. }
  95. /**
  96. *
  97. * 格式化输出
  98. *
  99. * @param result
  100. *
  101. */
  102. public static void showCell(Result result) {
  103. Cell[] cells = result.rawCells();
  104. for (Cell cell : cells) {
  105. System.out.println("RowName(行键):" + new String(CellUtil.cloneRow(cell)) + " ");
  106. System.out.println("Timetamp(时间戳):" + cell.getTimestamp() + " ");
  107. System.out.println("column Family(列簇):" + new String(CellUtil.cloneFamily(cell)) + " ");
  108. System.out.println("column Name(列名):" + new String(CellUtil.cloneQualifier(cell)) + " ");
  109. System.out.println("value:(值)" + new String(CellUtil.cloneValue(cell)) + " ");
  110. System.out.println();
  111. }
  112. }
  113. /**
  114. *
  115. * 删除数据
  116. *
  117. * @param tableName 表名
  118. *
  119. * @param rowKey 行键
  120. *
  121. * @param colFamily 列族名
  122. *
  123. * @param col 列名
  124. *
  125. * @throws IOException
  126. *
  127. */
  128. public static void deleteRow(String tableName, String rowKey, String colFamily, String col) throws IOException {
  129. init();
  130. Table table = connection.getTable(TableName.valueOf(tableName));
  131. Delete delete = new Delete(rowKey.getBytes());
  132. if(col == null) {
  133. //删除指定列族的所有数据(此情况是列族下无列限定符时的情况)
  134. delete.addFamily(colFamily.getBytes());
  135. table.delete(delete);
  136. table.close();
  137. }else {
  138. //删除指定列的数据(此列主要说的是列限定修饰符)
  139. delete.addColumn(colFamily.getBytes(), col.getBytes());
  140. table.delete(delete);
  141. table.close();
  142. }
  143. close();
  144. }
  145. public static void main(String[] args) {
  146. Test_3 t = new Test_3();
  147. boolean flag = true;
  148. while (flag){
  149. System.out.println("------------向已经创建好的表中添加和删除指定的列簇或列--------------------");
  150. System.out.println(" 请输入您要进行的操作 1- 添加 2-删除 ");
  151. Scanner scan = new Scanner(System.in);
  152. String choose1 = scan.nextLine();
  153. switch (choose1) {
  154. case "1":
  155. try {
  156. //put 'student','95003','Sname','wangjinxuan'     (添加列)
  157. //put 'student','95003','Sname:nickName','wang'    (添加列族)
  158. //put 'student','95003','Sname:firstName','jinxuan' (添加列族)
  159. // t.insertRow(tableName, rowKey, colFamily, col, val);
  160. t.insertRow("student", "95003", "Sname",null, "wangjingxuan");
  161. t.insertRow("student", "95003", "Sname", "nickName", "wang");
  162. t.insertRow("student", "95003", "Sname", "firstName", "jingxuan");
  163. System.out.println("插入成功:");
  164. t.getData(tableName);
  165. } catch (IOException e) {
  166. e.getMessage();
  167. }
  168. break;
  169. case "2":
  170. try {
  171. System.out.println("----------------------删除前,表的原本信息如下---------------------");
  172. t.getData(tableName);
  173. //delete 'student' ,’95003’,’Sname’
  174. //delete 'student' ,’95003’,’Sname:nickName’
  175. // t.deleteRow(tableName, rowKey, colFamily, col);
  176. t.deleteRow("student", "95003", "Sname", "firstName");
  177. System.out.println("-----------------------删除成功-----------------------------\n");
  178. System.out.println("---------------------删除后,表的信息如下---------------------");
  179. t.getData(tableName);
  180. } catch (IOException e) {
  181. e.getMessage();
  182. }
  183. break;
  184. }
  185. System.out.println(" 你要继续操作吗? 是-true 否-false ");
  186. flag = scan.nextBoolean();
  187. }
  188. System.out.println(" 程序已退出! ");
  189. }
  190. }

image

清空指定的表的所有记录数据:

  1. import java.io.IOException;
  2. import java.util.Scanner;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.Cell;
  5. import org.apache.hadoop.hbase.CellUtil;
  6. import org.apache.hadoop.hbase.HBaseConfiguration;
  7. import org.apache.hadoop.hbase.HColumnDescriptor;
  8. import org.apache.hadoop.hbase.HTableDescriptor;
  9. import org.apache.hadoop.hbase.TableName;
  10. import org.apache.hadoop.hbase.client.Admin;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.HBaseAdmin;
  14. import org.apache.hadoop.hbase.client.Result;
  15. import org.apache.hadoop.hbase.client.ResultScanner;
  16. import org.apache.hadoop.hbase.client.Scan;
  17. import org.apache.hadoop.hbase.client.Table;
  18. import org.apache.hadoop.hbase.util.Bytes;
  19. public class Test_4 {
  20. public static Configuration configuration;
  21. public static Connection connection;
  22. public static Admin admin;
  23. // 建立连接
  24. public static void init() {
  25. configuration = HBaseConfiguration.create();
  26. configuration.set("hbase.rootdir", "hdfs://localhost:9000/hbase");
  27. try {
  28. connection = ConnectionFactory.createConnection(configuration);
  29. admin = connection.getAdmin();
  30. } catch (IOException e) {
  31. e.printStackTrace();
  32. }
  33. }
  34. // 关闭连接
  35. public static void close() {
  36. try {
  37. if (admin != null) {
  38. admin.close();
  39. }
  40. if (null != connection) {
  41. connection.close();
  42. }
  43. } catch (IOException e) {
  44. e.printStackTrace();
  45. }
  46. }
  47. /**
  48. *
  49. * 清空制定的表的所有记录数据
  50. *
  51. * @param args
  52. *
  53. * @throws IOException
  54. *
  55. */
  56. public static void clearRows(String tableName) throws IOException {
  57. init();
  58. HBaseAdmin admin1 = new HBaseAdmin(configuration);
  59. // 读取了之前表的表名 列簇等信息,然后再进行删除操作。
  60. HTableDescriptor tDescriptor = admin1.getTableDescriptor(Bytes.toBytes(tableName));
  61. // 总思想是先将原表结构保留下来,然后进行删除,再重新依据保存的信息重新创建表。
  62. TableName tablename = TableName.valueOf(tableName);
  63. // 删除表
  64. admin.disableTable(tablename);
  65. admin.deleteTable(tablename);
  66. // 重新建表
  67. admin.createTable(tDescriptor);
  68. close();
  69. }
  70. /**
  71. *
  72. * 根据表名查找表信息
  73. *
  74. */
  75. public static void getData(String tableName) throws IOException {
  76. init();
  77. Table table = connection.getTable(TableName.valueOf(tableName));
  78. Scan scan = new Scan();
  79. ResultScanner scanner = table.getScanner(scan);
  80. for (Result result : scanner){
  81. showCell((result));
  82. }
  83. close();
  84. }
  85. /**
  86. *
  87. * 格式化输出
  88. *
  89. * @param result
  90. *
  91. */
  92. public static void showCell(Result result) {
  93. Cell[] cells = result.rawCells();
  94. for (Cell cell : cells) {
  95. System.out.println("RowName(行键):" + new String(CellUtil.cloneRow(cell)) + " ");
  96. System.out.println("Timetamp(时间戳):" + cell.getTimestamp() + " ");
  97. System.out.println("column Family(列簇):" + new String(CellUtil.cloneFamily(cell)) + " ");
  98. System.out.println("column Name(列名):" + new String(CellUtil.cloneQualifier(cell)) + " ");
  99. System.out.println("value:(值)" + new String(CellUtil.cloneValue(cell)) + " ");
  100. System.out.println();
  101. }
  102. }
  103. public static void main(String[] args) {
  104. Test_4 test_4 = new Test_4();
  105. Scanner scan = new Scanner(System.in);
  106. System.out.println("请输入要清空的表名");
  107. String tableName = scan.nextLine();
  108. try {
  109. System.out.println("表原来的信息:");
  110. test_4.getData(tableName);
  111. test_4.clearRows(tableName);
  112. System.out.println("表已清空:");
  113. } catch (IOException e) {
  114. e.printStackTrace();
  115. }
  116. }
  117. }

image

统计表的行数:

  1. import java.io.IOException;
  2. import java.util.Scanner;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.TableName;
  6. import org.apache.hadoop.hbase.client.Admin;
  7. import org.apache.hadoop.hbase.client.Connection;
  8. import org.apache.hadoop.hbase.client.ConnectionFactory;
  9. import org.apache.hadoop.hbase.client.Result;
  10. import org.apache.hadoop.hbase.client.ResultScanner;
  11. import org.apache.hadoop.hbase.client.Scan;
  12. import org.apache.hadoop.hbase.client.Table;
  13. public class Test_5 {
  14. public static Configuration configuration;
  15. public static Connection connection;
  16. public static Admin admin;
  17. //建立连接
  18. public static void init() {
  19. configuration = HBaseConfiguration.create();
  20. configuration.set("hbase.rootdir", "hdfs://localhost:9000/hbase");
  21. try {
  22. connection = ConnectionFactory.createConnection(configuration);
  23. admin = connection.getAdmin();
  24. } catch (IOException e) {
  25. e.printStackTrace();
  26. }
  27. }
  28. // 关闭连接
  29. public static void close() {
  30. try {
  31. if (admin != null) {
  32. admin.close();
  33. }
  34. if (null != connection) {
  35. connection.close();
  36. }
  37. } catch (IOException e) {
  38. e.printStackTrace();
  39. }
  40. }
  41. public static void countRows(String tableName) throws IOException{
  42. init();
  43. Table table = connection.getTable(TableName.valueOf(tableName));
  44. Scan scan = new Scan();
  45. ResultScanner scanner = table.getScanner(scan);
  46. int num = 0;
  47. for (Result result = scanner.next(); result != null; result = scanner.next()){
  48. num++;
  49. }
  50. System.out.println("行数:" + num);
  51. scanner.close();
  52. close();
  53. }
  54. public static void main(String[] args) throws IOException {
  55. Test_5 test_5 = new Test_5();
  56. Scanner scan = new Scanner(System.in);
  57. System.out.println("请输入要统计行数的表名");
  58. String tableName = scan.nextLine();
  59. test_5.countRows(tableName);
  60. }
  61. }

image

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/羊村懒王/article/detail/673506
推荐阅读
相关标签
  

闽ICP备14008679号