1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.io.hfile;
21
22 import java.io.DataInput;
23 import java.io.DataOutput;
24 import java.io.IOException;
25 import java.nio.ByteBuffer;
26 import java.util.Arrays;
27
28 import org.apache.commons.logging.Log;
29 import org.apache.commons.logging.LogFactory;
30 import org.apache.hadoop.fs.FSDataInputStream;
31 import org.apache.hadoop.fs.FSDataOutputStream;
32 import org.apache.hadoop.fs.Path;
33 import org.apache.hadoop.hbase.HBaseTestCase;
34 import org.apache.hadoop.hbase.HBaseTestingUtility;
35 import org.apache.hadoop.hbase.KeyValue.KeyComparator;
36 import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
37 import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
38 import org.apache.hadoop.hbase.util.Bytes;
39 import org.apache.hadoop.io.Writable;
40
41
42
43
44
45
46
47
48
49 public class TestHFile extends HBaseTestCase {
50 static final Log LOG = LogFactory.getLog(TestHFile.class);
51
52 private String ROOT_DIR;
53 private final int minBlockSize = 512;
54 private static String localFormatter = "%010d";
55 private static CacheConfig cacheConf = null;
56
57 public void setUp() throws Exception {
58 super.setUp();
59 ROOT_DIR = this.getUnitTestdir("TestHFile").toString();
60 }
61
62
63
64
65
66
67 public void testEmptyHFile() throws IOException {
68 if (cacheConf == null) cacheConf = new CacheConfig(conf);
69 Path f = new Path(ROOT_DIR, getName());
70 Writer w = HFile.getWriterFactory(conf, cacheConf).createWriter(this.fs, f);
71 w.close();
72 Reader r = HFile.createReader(fs, f, cacheConf);
73 r.loadFileInfo();
74 assertNull(r.getFirstKey());
75 assertNull(r.getLastKey());
76 }
77
78
79
80 private int writeSomeRecords(Writer writer, int start, int n)
81 throws IOException {
82 String value = "value";
83 for (int i = start; i < (start + n); i++) {
84 String key = String.format(localFormatter, Integer.valueOf(i));
85 writer.append(Bytes.toBytes(key), Bytes.toBytes(value + key));
86 }
87 return (start + n);
88 }
89
90 private void readAllRecords(HFileScanner scanner) throws IOException {
91 readAndCheckbytes(scanner, 0, 100);
92 }
93
94
95 private int readAndCheckbytes(HFileScanner scanner, int start, int n)
96 throws IOException {
97 String value = "value";
98 int i = start;
99 for (; i < (start + n); i++) {
100 ByteBuffer key = scanner.getKey();
101 ByteBuffer val = scanner.getValue();
102 String keyStr = String.format(localFormatter, Integer.valueOf(i));
103 String valStr = value + keyStr;
104 byte [] keyBytes = Bytes.toBytes(key);
105 assertTrue("bytes for keys do not match " + keyStr + " " +
106 Bytes.toString(Bytes.toBytes(key)),
107 Arrays.equals(Bytes.toBytes(keyStr), keyBytes));
108 byte [] valBytes = Bytes.toBytes(val);
109 assertTrue("bytes for vals do not match " + valStr + " " +
110 Bytes.toString(valBytes),
111 Arrays.equals(Bytes.toBytes(valStr), valBytes));
112 if (!scanner.next()) {
113 break;
114 }
115 }
116 assertEquals(i, start + n - 1);
117 return (start + n);
118 }
119
120 private byte[] getSomeKey(int rowId) {
121 return String.format(localFormatter, Integer.valueOf(rowId)).getBytes();
122 }
123
124 private void writeRecords(Writer writer) throws IOException {
125 writeSomeRecords(writer, 0, 100);
126 writer.close();
127 }
128
129 private FSDataOutputStream createFSOutput(Path name) throws IOException {
130
131 FSDataOutputStream fout = fs.create(name);
132 return fout;
133 }
134
135
136
137
138 void basicWithSomeCodec(String codec) throws IOException {
139 if (cacheConf == null) cacheConf = new CacheConfig(conf);
140 Path ncTFile = new Path(ROOT_DIR, "basic.hfile." + codec.toString());
141 FSDataOutputStream fout = createFSOutput(ncTFile);
142 Writer writer = HFile.getWriterFactory(conf, cacheConf).createWriter(fout,
143 minBlockSize, Compression.getCompressionAlgorithmByName(codec), null);
144 LOG.info(writer);
145 writeRecords(writer);
146 fout.close();
147 FSDataInputStream fin = fs.open(ncTFile);
148 Reader reader = HFile.createReader(ncTFile, fs.open(ncTFile),
149 fs.getFileStatus(ncTFile).getLen(), cacheConf);
150 System.out.println(cacheConf.toString());
151
152 reader.loadFileInfo();
153
154 HFileScanner scanner = reader.getScanner(true, false);
155
156 scanner.seekTo();
157 readAllRecords(scanner);
158 scanner.seekTo(getSomeKey(50));
159 assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50)) == 0);
160
161 ByteBuffer readKey = scanner.getKey();
162 assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50),
163 Bytes.toBytes(readKey)));
164
165 scanner.seekTo(new byte[0]);
166 ByteBuffer val1 = scanner.getValue();
167 scanner.seekTo(new byte[0]);
168 ByteBuffer val2 = scanner.getValue();
169 assertTrue(Arrays.equals(Bytes.toBytes(val1), Bytes.toBytes(val2)));
170
171 reader.close();
172 fin.close();
173 fs.delete(ncTFile, true);
174 }
175
176 public void testTFileFeatures() throws IOException {
177 basicWithSomeCodec("none");
178 basicWithSomeCodec("gz");
179 }
180
181 private void writeNumMetablocks(Writer writer, int n) {
182 for (int i = 0; i < n; i++) {
183 writer.appendMetaBlock("HFileMeta" + i, new Writable() {
184 private int val;
185 public Writable setVal(int val) { this.val = val; return this; }
186
187 @Override
188 public void write(DataOutput out) throws IOException {
189 out.write(("something to test" + val).getBytes());
190 }
191
192 @Override
193 public void readFields(DataInput in) throws IOException { }
194 }.setVal(i));
195 }
196 }
197
198 private void someTestingWithMetaBlock(Writer writer) {
199 writeNumMetablocks(writer, 10);
200 }
201
202 private void readNumMetablocks(Reader reader, int n) throws IOException {
203 for (int i = 0; i < n; i++) {
204 ByteBuffer actual = reader.getMetaBlock("HFileMeta" + i, false);
205 ByteBuffer expected =
206 ByteBuffer.wrap(("something to test" + i).getBytes());
207 assertTrue("failed to match metadata", actual.compareTo(expected) == 0);
208 }
209 }
210
211 private void someReadingWithMetaBlock(Reader reader) throws IOException {
212 readNumMetablocks(reader, 10);
213 }
214
215 private void metablocks(final String compress) throws Exception {
216 if (cacheConf == null) cacheConf = new CacheConfig(conf);
217 Path mFile = new Path(ROOT_DIR, "meta.hfile");
218 FSDataOutputStream fout = createFSOutput(mFile);
219 Writer writer = HFile.getWriterFactory(conf, cacheConf).createWriter(fout,
220 minBlockSize, Compression.getCompressionAlgorithmByName(compress),
221 null);
222 someTestingWithMetaBlock(writer);
223 writer.close();
224 fout.close();
225 FSDataInputStream fin = fs.open(mFile);
226 Reader reader = HFile.createReader(mFile, fs.open(mFile),
227 this.fs.getFileStatus(mFile).getLen(), cacheConf);
228 reader.loadFileInfo();
229
230 assertFalse(reader.getScanner(false, false).seekTo());
231 someReadingWithMetaBlock(reader);
232 fs.delete(mFile, true);
233 reader.close();
234 fin.close();
235 }
236
237
238 public void testMetaBlocks() throws Exception {
239 metablocks("none");
240 metablocks("gz");
241 }
242
243 public void testNullMetaBlocks() throws Exception {
244 if (cacheConf == null) cacheConf = new CacheConfig(conf);
245 for (Compression.Algorithm compressAlgo :
246 HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
247 Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
248 FSDataOutputStream fout = createFSOutput(mFile);
249 Writer writer = HFile.getWriterFactory(conf, cacheConf).createWriter(fout,
250 minBlockSize, compressAlgo, null);
251 writer.append("foo".getBytes(), "value".getBytes());
252 writer.close();
253 fout.close();
254 Reader reader = HFile.createReader(fs, mFile, cacheConf);
255 reader.loadFileInfo();
256 assertNull(reader.getMetaBlock("non-existant", false));
257 }
258 }
259
260
261
262
263 public void testCompressionOrdinance() {
264 assertTrue(Compression.Algorithm.LZO.ordinal() == 0);
265 assertTrue(Compression.Algorithm.GZ.ordinal() == 1);
266 assertTrue(Compression.Algorithm.NONE.ordinal() == 2);
267 }
268
269 public void testComparator() throws IOException {
270 if (cacheConf == null) cacheConf = new CacheConfig(conf);
271 Path mFile = new Path(ROOT_DIR, "meta.tfile");
272 FSDataOutputStream fout = createFSOutput(mFile);
273 Writer writer = HFile.getWriterFactory(conf, cacheConf).createWriter(fout,
274 minBlockSize, (Compression.Algorithm) null, new KeyComparator() {
275 @Override
276 public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,
277 int l2) {
278 return -Bytes.compareTo(b1, s1, l1, b2, s2, l2);
279
280 }
281 @Override
282 public int compare(byte[] o1, byte[] o2) {
283 return compare(o1, 0, o1.length, o2, 0, o2.length);
284 }
285 });
286 writer.append("3".getBytes(), "0".getBytes());
287 writer.append("2".getBytes(), "0".getBytes());
288 writer.append("1".getBytes(), "0".getBytes());
289 writer.close();
290 }
291
292 }