Java操作HBase的基本用法

本文总阅读量

一、在maven工程中加入Hbase的依赖

1
2
3
4
5
6
7
8
9
10
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>0.98.8-hadoop2</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>0.98.8-hadoop2</version>
</dependency>

二、将hbase-site.xml放入src/main/resources目录下

三、确保HDFS、YARN、HBase都正常启动

四、编写代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
/*
* HBaseTest.java
*
* Created on: 2016年4月10日 下午4:30:48
* Author: Wayne 13186259527@163.com
*/

package HBase;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;

/**
* @author Administrator
*
*/

public class HBasetest {
static HBaseAdmin admin = null;
Configuration conf = null;

/**
* 构造函数初始化加载
*/

public HBasetest() throws MasterNotRunningException,
ZooKeeperConnectionException, IOException {

/**
* 前提:要把hbase-site.xml放在src/main/resources目录下
*/

conf = HBaseConfiguration.create();
admin = new HBaseAdmin(conf);
System.out.println("hbase连接成功!");
}

public static void main(String[] args) throws Exception {

HBasetest hBase = new HBasetest();
// System.out.println(admin);
//创建表
hBase.createTable("Tabletest", "info", "scores");
//插入一条记录
hBase.addOneData("Tabletest", "one", "info", "weight", "50kg");
hBase.addOneData("Tabletest", "one", "info", "hight", "175cm");
//获得所有表中记录
hBase.getAllData("Tabletest");
//获得表中一行记录
hBase.getOneRcord("Tabletest", "one");
//删除表中一行记录
hBase.deleteAllOnedata("Tabletest", "one");
//删除表中一条记录
hBase.deleteOneRcord("Tabletest", "one", "info", "hight");
//删除一个表
hBase.deleteTable("Tabletest");

// hBase.getAllTable();
admin.close();

}

/**
* @param string
* @param string2
* @throws IOException
*/

private static void createTable(String tableName, String family1,
String family2) throws IOException
{

if (admin.tableExists(tableName)) {
System.out.println(tableName + "表已存在");
} else {
System.out.println("start to create table");
HTableDescriptor hTableDescriptor = new HTableDescriptor(
TableName.valueOf(tableName));
hTableDescriptor.addFamily(new HColumnDescriptor(family1));
hTableDescriptor.addFamily(new HColumnDescriptor(family2));
admin.createTable(hTableDescriptor);
System.out.println(tableName + "表创建成功!");
}
}

/**
* 查询所有记录
*
* @param tableName
*/

private void getAllData(String tableName) {
try {
HTable hTable = new HTable(conf, tableName);
Scan scan = new Scan();
ResultScanner scanner = hTable.getScanner(scan);
for (Result result : scanner) {
if (result.raw().length > 0) {
for (KeyValue kv : result.raw()) {
System.out.println(new String(kv.getRow()) + "\t"
+ new String(kv.getValue()));
}
}
}
} catch (IOException e) {
e.printStackTrace();
}

}

/**
* 往表中添加一条数据
*
* @param tableName
* @param rowkey
* @param family
* @param qualifier
* @param value
*/

private void addOneData(String tableName, String rowkey, String family,
String qualifier, String value)
{

HTablePool hTablePool = new HTablePool(conf, 1000);
HTableInterface table = hTablePool.getTable(tableName);
Put put = new Put(rowkey.getBytes());
put.add(family.getBytes(), qualifier.getBytes(), value.getBytes());
try {
table.put(put);
System.out.println("记录" + rowkey + "添加成功!");
} catch (IOException e) {
e.printStackTrace();
System.out.println("记录" + rowkey + "添加失败!");
}
}

/**
* 获取所有的列表
*/

private void getAllTable() {
if (admin != null) {
try {
HTableDescriptor[] listTables = admin.listTables();
if (listTables.length > 0) {
for (HTableDescriptor hTableDescriptor : listTables) {
System.out.println(hTableDescriptor.getNameAsString());
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}

private void getOneRcord(String tableName, String rowkey) {
HTablePool hTablePool = new HTablePool(conf, 1000);
HTableInterface table = hTablePool.getTable(tableName);
Get get = new Get(rowkey.getBytes());
try {
Result result = table.get(get);
if (result.raw().length > 0) {
for (KeyValue kv : result.raw()) {
/*
* System.out.println(new String(kv.getRow()) + "\t" + new
* String(kv.getValue()));
*/


System.out.println(new String(kv.getRowArray()) + "\t");
// + new String(kv.getValueArray()));
/*
* System.out.println(new String(kv.getKey()) + "\t" + new
* String(kv.getValue()));
*/

}
}
} catch (IOException e) {
e.printStackTrace();
}
}

/**
* 删除一条记录
*
* @param string
* @param string2
*/

private void deleteAllOnedata(String tableName, String rowkey) {
HTablePool hTablePool = new HTablePool(conf, 1000);
HTableInterface table = hTablePool.getTable(tableName);
Delete delete = new Delete(rowkey.getBytes());
try {
table.delete(delete);
System.out.println(rowkey + " 记录删除成功!");
} catch (IOException e) {
e.printStackTrace();
System.out.println(rowkey + " 记录删除失败!");
}
}

/**
* 删除一条记录的一个值
*
* @param tableName
* @param rowkey
* @param family
* @param qualifier
*/

private void deleteOneRcord(String tableName, String rowkey, String family,
String qualifier)
{

HTablePool hTablePool = new HTablePool(conf, 1000);
HTableInterface table = hTablePool.getTable(tableName);
Delete delete = new Delete(rowkey.getBytes());
delete.deleteColumn(family.getBytes(), qualifier.getBytes());
try {
table.delete(delete);
System.out.println(tableName + " " + rowkey + "," + family + ":"
+ qualifier + "值删除成功!");
} catch (IOException e) {
e.printStackTrace();
System.out.println(tableName + " " + rowkey + "," + family + ":"
+ qualifier + "值删除失败s!");
}

}

/**
* 删除一张表
*
* @param tableName
*/

private void deleteTable(String tableName) {
if (admin != null) {
try {
admin.disableTable(tableName);
admin.deleteTable(tableName);
System.out.println(tableName + "表删除成功!");
} catch (IOException e) {
e.printStackTrace();
System.out.println(tableName + "表删除失败!");
}
}
}
}

五、遇到的问题

  • 点击运行,可以打印出admin的信息,但是一开始测试的创建表的操作始终不出结果。
    一直等啊,等,等啊,等
    最后终于出了结果:

unknown crxy

crxy 是我的虚拟机的主机名

我一想,可能是主机名没有在windows映射成地址,试试~

打开:C:\Windows\System32\drivers\etc\hosts

增加: 地址 主机名

搞定~

六、追加

上面实现的扫描一张表的实现不完善,没有把表中的每一项(行键、列族、列、值)都显示出来。
下面是更新的方法:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
/*
* Scanner.java
*
* Created on: 2016年4月10日 下午6:27:10
* Author: Wayne 13186259527@163.com
*/

package HBase;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;

/**
* @author Administrator
*
*/

public class Scanner {
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
Scan scan = new Scan();
HTable hTable = new HTable(conf, "test1");
ResultScanner scanner = hTable.getScanner(scan);
for (Result result : scanner) {
/**
* 这里用的都是keyValue里面旧的方法来获取行键,列族,列和值
* 新的方法后面都有个Array,但是显示出来中间总有乱码,
* 我猜测是时间戳在中间,但不知道怎么解析。
* 以后再来解决
*/

for (KeyValue keyValue : result.raw()) {

System.out.println("row:"+new String(keyValue.getRow())+
"\tcolumnfamily:"+ new String(keyValue.getFamily()) +
"\tcolumn:"+ new String(keyValue.getQualifier()) +
"\tvalue:"+ new String(keyValue.getValue()));
}
}
}
}