1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.thrift;
20
21 import java.nio.ByteBuffer;
22 import java.util.ArrayList;
23 import java.util.List;
24 import java.util.TreeMap;
25
26 import org.apache.hadoop.hbase.HColumnDescriptor;
27 import org.apache.hadoop.hbase.KeyValue;
28 import org.apache.hadoop.hbase.client.Result;
29 import org.apache.hadoop.hbase.io.hfile.Compression;
30 import org.apache.hadoop.hbase.regionserver.StoreFile;
31 import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
32 import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
33 import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
34 import org.apache.hadoop.hbase.thrift.generated.TCell;
35 import org.apache.hadoop.hbase.thrift.generated.TRowResult;
36 import org.apache.hadoop.hbase.util.Bytes;
37
38 public class ThriftUtilities {
39
40
41
42
43
44
45
46
47
48
49 static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
50 throws IllegalArgument {
51 Compression.Algorithm comp =
52 Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
53 StoreFile.BloomType bt =
54 BloomType.valueOf(in.bloomFilterType);
55
56 if (in.name == null || !in.name.hasRemaining()) {
57 throw new IllegalArgument("column name is empty");
58 }
59 byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
60 HColumnDescriptor col = new HColumnDescriptor(parsedName,
61 in.maxVersions, comp.getName(), in.inMemory, in.blockCacheEnabled,
62 in.timeToLive, bt.toString());
63 return col;
64 }
65
66
67
68
69
70
71
72
73
74 static public ColumnDescriptor colDescFromHbase(HColumnDescriptor in) {
75 ColumnDescriptor col = new ColumnDescriptor();
76 col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY));
77 col.maxVersions = in.getMaxVersions();
78 col.compression = in.getCompression().toString();
79 col.inMemory = in.isInMemory();
80 col.blockCacheEnabled = in.isBlockCacheEnabled();
81 col.bloomFilterType = in.getBloomFilterType().toString();
82 return col;
83 }
84
85
86
87
88
89
90
91
92
93 static public List<TCell> cellFromHBase(KeyValue in) {
94 List<TCell> list = new ArrayList<TCell>(1);
95 if (in != null) {
96 list.add(new TCell(ByteBuffer.wrap(in.getValue()), in.getTimestamp()));
97 }
98 return list;
99 }
100
101
102
103
104
105
106
107 static public List<TCell> cellFromHBase(KeyValue[] in) {
108 List<TCell> list = null;
109 if (in != null) {
110 list = new ArrayList<TCell>(in.length);
111 for (int i = 0; i < in.length; i++) {
112 list.add(new TCell(ByteBuffer.wrap(in[i].getValue()), in[i].getTimestamp()));
113 }
114 } else {
115 list = new ArrayList<TCell>(0);
116 }
117 return list;
118 }
119
120
121
122
123
124
125
126
127
128
129 static public List<TRowResult> rowResultFromHBase(Result[] in) {
130 List<TRowResult> results = new ArrayList<TRowResult>();
131 for ( Result result_ : in) {
132 if(result_ == null || result_.isEmpty()) {
133 continue;
134 }
135 TRowResult result = new TRowResult();
136 result.row = ByteBuffer.wrap(result_.getRow());
137 result.columns = new TreeMap<ByteBuffer, TCell>();
138 for(KeyValue kv : result_.raw()) {
139 result.columns.put(
140 ByteBuffer.wrap(KeyValue.makeColumn(kv.getFamily(),
141 kv.getQualifier())),
142 new TCell(ByteBuffer.wrap(kv.getValue()), kv.getTimestamp()));
143 }
144 results.add(result);
145 }
146 return results;
147 }
148
149 static public List<TRowResult> rowResultFromHBase(Result in) {
150 Result [] result = { in };
151 return rowResultFromHBase(result);
152 }
153 }