1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 package org.apache.hadoop.hbase.util;
22
23 import java.io.IOException;
24 import java.util.ArrayList;
25 import java.util.List;
26
27 import org.apache.commons.logging.Log;
28 import org.apache.commons.logging.LogFactory;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.*;
31 import org.apache.hadoop.hbase.client.Get;
32 import org.apache.hadoop.hbase.client.Put;
33 import org.apache.hadoop.hbase.client.Result;
34 import org.apache.hadoop.hbase.client.Scan;
35 import org.apache.hadoop.hbase.regionserver.wal.HLog;
36 import org.apache.hadoop.hbase.regionserver.HRegion;
37 import org.apache.hadoop.hbase.regionserver.InternalScanner;
38 import org.apache.hadoop.hdfs.MiniDFSCluster;
39 import org.apache.hadoop.util.ToolRunner;
40
41
42 public class TestMergeTool extends HBaseTestCase {
43 static final Log LOG = LogFactory.getLog(TestMergeTool.class);
44 HBaseTestingUtility TEST_UTIL;
45
46 static final byte [] FAMILY = Bytes.toBytes("contents");
47 static final byte [] QUALIFIER = Bytes.toBytes("dc");
48
49 private final HRegionInfo[] sourceRegions = new HRegionInfo[5];
50 private final HRegion[] regions = new HRegion[5];
51 private HTableDescriptor desc;
52 private byte [][][] rows;
53 private MiniDFSCluster dfsCluster = null;
54
55 @Override
56 public void setUp() throws Exception {
57
58 this.conf.setLong("hbase.zookeeper.recoverable.waittime", 1000);
59
60
61
62
63 this.conf.setInt("hbase.zookeeper.property.clientPort", 10001);
64
65 this.conf.set("hbase.hstore.compactionThreshold", "2");
66
67
68 this.desc = new HTableDescriptor("TestMergeTool");
69 this.desc.addFamily(new HColumnDescriptor(FAMILY));
70
71
72
73
74
75 sourceRegions[0] = new HRegionInfo(this.desc.getName(),
76 Bytes.toBytes("row_0200"),
77 Bytes.toBytes("row_0300"));
78
79
80
81 sourceRegions[1] =
82 new HRegionInfo(this.desc.getName(),
83 Bytes.toBytes("row_0250"),
84 Bytes.toBytes("row_0400"));
85
86
87
88 sourceRegions[2] =
89 new HRegionInfo(this.desc.getName(),
90 Bytes.toBytes("row_0100"),
91 Bytes.toBytes("row_0200"));
92
93
94
95
96 sourceRegions[3] =
97 new HRegionInfo(this.desc.getName(),
98 Bytes.toBytes("row_0500"),
99 Bytes.toBytes("row_0600"));
100
101
102 sourceRegions[4] =
103 new HRegionInfo(this.desc.getName(),
104 HConstants.EMPTY_BYTE_ARRAY,
105 HConstants.EMPTY_BYTE_ARRAY);
106
107
108
109
110 this.rows = new byte [5][][];
111 this.rows[0] = Bytes.toByteArrays(new String[] { "row_0210", "row_0280" });
112 this.rows[1] = Bytes.toByteArrays(new String[] { "row_0260", "row_0350",
113 "row_035" });
114 this.rows[2] = Bytes.toByteArrays(new String[] { "row_0110", "row_0175",
115 "row_0175", "row_0175"});
116 this.rows[3] = Bytes.toByteArrays(new String[] { "row_0525", "row_0560",
117 "row_0560", "row_0560", "row_0560"});
118 this.rows[4] = Bytes.toByteArrays(new String[] { "row_0050", "row_1000",
119 "row_1000", "row_1000", "row_1000", "row_1000" });
120
121
122 TEST_UTIL = new HBaseTestingUtility(conf);
123 this.dfsCluster = TEST_UTIL.startMiniDFSCluster(2);
124 this.fs = this.dfsCluster.getFileSystem();
125 System.out.println("fs=" + this.fs);
126 this.conf.set("fs.defaultFS", fs.getUri().toString());
127 Path parentdir = fs.getHomeDirectory();
128 conf.set(HConstants.HBASE_DIR, parentdir.toString());
129 fs.mkdirs(parentdir);
130 FSUtils.setVersion(fs, parentdir);
131
132
133
134
135 super.setUp();
136 try {
137
138 createRootAndMetaRegions();
139 FSTableDescriptors.createTableDescriptor(this.fs, this.testDir, this.desc);
140
141
142
143 for (int i = 0; i < sourceRegions.length; i++) {
144 regions[i] =
145 HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf,
146 this.desc);
147
148
149
150 for (int j = 0; j < rows[i].length; j++) {
151 byte [] row = rows[i][j];
152 Put put = new Put(row);
153 put.add(FAMILY, QUALIFIER, row);
154 regions[i].put(put);
155 }
156 HRegion.addRegionToMETA(meta, regions[i]);
157 }
158
159 closeRootAndMeta();
160
161 } catch (Exception e) {
162 TEST_UTIL.shutdownMiniCluster();
163 throw e;
164 }
165 }
166
167 @Override
168 public void tearDown() throws Exception {
169 super.tearDown();
170 TEST_UTIL.shutdownMiniCluster();
171 }
172
173
174
175
176
177
178
179
180
181
182 private HRegion mergeAndVerify(final String msg, final String regionName1,
183 final String regionName2, final HLog log, final int upperbound)
184 throws Exception {
185 Merge merger = new Merge(this.conf);
186 LOG.info(msg);
187 System.out.println("fs2=" + this.conf.get("fs.defaultFS"));
188 int errCode = ToolRunner.run(this.conf, merger,
189 new String[] {this.desc.getNameAsString(), regionName1, regionName2}
190 );
191 assertTrue("'" + msg + "' failed with errCode " + errCode, errCode == 0);
192 HRegionInfo mergedInfo = merger.getMergedHRegionInfo();
193
194
195
196 HRegion merged = HRegion.openHRegion(mergedInfo, this.desc, log, this.conf);
197 verifyMerge(merged, upperbound);
198 merged.close();
199 LOG.info("Verified " + msg);
200 return merged;
201 }
202
203 private void verifyMerge(final HRegion merged, final int upperbound)
204 throws IOException {
205
206 Scan scan = new Scan();
207 scan.addFamily(FAMILY);
208 InternalScanner scanner = merged.getScanner(scan);
209 try {
210 List<KeyValue> testRes = null;
211 while (true) {
212 testRes = new ArrayList<KeyValue>();
213 boolean hasNext = scanner.next(testRes);
214 if (!hasNext) {
215 break;
216 }
217 }
218 } finally {
219 scanner.close();
220 }
221
222
223
224 for (int i = 0; i < upperbound; i++) {
225 for (int j = 0; j < rows[i].length; j++) {
226 Get get = new Get(rows[i][j]);
227 get.addFamily(FAMILY);
228 Result result = merged.get(get, null);
229 assertEquals(1, result.size());
230 byte [] bytes = result.raw()[0].getValue();
231 assertNotNull(Bytes.toStringBinary(rows[i][j]), bytes);
232 assertTrue(Bytes.equals(bytes, rows[i][j]));
233 }
234 }
235 }
236
237
238
239
240
241 public void testMergeTool() throws Exception {
242
243
244 for (int i = 0; i < regions.length; i++) {
245 for (int j = 0; j < rows[i].length; j++) {
246 Get get = new Get(rows[i][j]);
247 get.addFamily(FAMILY);
248 Result result = regions[i].get(get, null);
249 byte [] bytes = result.raw()[0].getValue();
250 assertNotNull(bytes);
251 assertTrue(Bytes.equals(bytes, rows[i][j]));
252 }
253
254 regions[i].close();
255 regions[i].getLog().closeAndDelete();
256 }
257
258
259 Path logPath = new Path("/tmp", HConstants.HREGION_LOGDIR_NAME + "_" +
260 System.currentTimeMillis());
261 LOG.info("Creating log " + logPath.toString());
262 Path oldLogDir = new Path("/tmp", HConstants.HREGION_OLDLOGDIR_NAME);
263 HLog log = new HLog(this.fs, logPath, oldLogDir, this.conf);
264 try {
265
266 HRegion merged = mergeAndVerify("merging regions 0 and 1",
267 this.sourceRegions[0].getRegionNameAsString(),
268 this.sourceRegions[1].getRegionNameAsString(), log, 2);
269
270
271 merged = mergeAndVerify("merging regions 0+1 and 2",
272 merged.getRegionInfo().getRegionNameAsString(),
273 this.sourceRegions[2].getRegionNameAsString(), log, 3);
274
275
276 merged = mergeAndVerify("merging regions 0+1+2 and 3",
277 merged.getRegionInfo().getRegionNameAsString(),
278 this.sourceRegions[3].getRegionNameAsString(), log, 4);
279
280
281 merged = mergeAndVerify("merging regions 0+1+2+3 and 4",
282 merged.getRegionInfo().getRegionNameAsString(),
283 this.sourceRegions[4].getRegionNameAsString(), log, rows.length);
284 } finally {
285 log.closeAndDelete();
286 }
287 }
288 }