1   /*
2    * Copyright 2011 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.regionserver;
21  
22  import static org.junit.Assert.assertEquals;
23  
24  import java.io.IOException;
25  import java.util.ArrayList;
26  import java.util.List;
27  
28  import org.apache.commons.logging.Log;
29  import org.apache.commons.logging.LogFactory;
30  import org.apache.hadoop.hbase.HBaseTestingUtility;
31  import org.apache.hadoop.hbase.client.HTable;
32  import org.apache.hadoop.hbase.client.HTableUtil;
33  import org.apache.hadoop.hbase.client.Put;
34  import org.apache.hadoop.hbase.client.Result;
35  import org.apache.hadoop.hbase.client.Row;
36  import org.apache.hadoop.hbase.client.Scan;
37  import org.apache.hadoop.hbase.io.hfile.BlockCache;
38  import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary;
39  import org.apache.hadoop.hbase.io.hfile.CacheConfig;
40  import org.apache.hadoop.hbase.util.Bytes;
41  import org.junit.AfterClass;
42  import org.junit.BeforeClass;
43  import org.junit.Test;
44  
45  /**
46   * Tests the block cache summary functionality in StoreFile, 
47   * which contains the BlockCache
48   *
49   */
50  public class TestStoreFileBlockCacheSummary {
51    final Log LOG = LogFactory.getLog(getClass());
52    private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();  
53    private static final String TEST_TABLE = "testTable";
54    private static final String TEST_TABLE2 = "testTable2";
55    private static final String TEST_CF = "testFamily";
56    private static byte [] FAMILY = Bytes.toBytes(TEST_CF);
57    private static byte [] QUALIFIER = Bytes.toBytes("testQualifier");
58    private static byte [] VALUE = Bytes.toBytes("testValue");
59  
60    private final int TOTAL_ROWS = 4;
61    
62    /**
63     * @throws java.lang.Exception exception
64     */
65    @BeforeClass
66    public static void setUpBeforeClass() throws Exception {
67      TEST_UTIL.startMiniCluster();
68    }
69  
70    /**
71     * @throws java.lang.Exception exception
72     */
73    @AfterClass
74    public static void tearDownAfterClass() throws Exception {
75      TEST_UTIL.shutdownMiniCluster();
76    }
77    
78  
79    private Put createPut(byte[] family, String row) {
80      Put put = new Put( Bytes.toBytes(row));
81      put.add(family, QUALIFIER, VALUE);
82      return put;
83    }
84    
85    /**
86    * This test inserts data into multiple tables and then reads both tables to ensure
87    * they are in the block cache.
88    *
89    * @throws Exception exception
90    */
91   @Test
92   public void testBlockCacheSummary() throws Exception {
93     HTable ht = TEST_UTIL.createTable(Bytes.toBytes(TEST_TABLE), FAMILY);
94     addRows(ht, FAMILY);
95  
96     HTable ht2 = TEST_UTIL.createTable(Bytes.toBytes(TEST_TABLE2), FAMILY);
97     addRows(ht2, FAMILY);
98  
99     TEST_UTIL.flush();
100    
101    scan(ht, FAMILY);
102    scan(ht2, FAMILY);
103       
104    BlockCache bc =
105      new CacheConfig(TEST_UTIL.getConfiguration()).getBlockCache();
106    List<BlockCacheColumnFamilySummary> bcs = 
107      bc.getBlockCacheColumnFamilySummaries(TEST_UTIL.getConfiguration());
108    LOG.info("blockCacheSummary: " + bcs);
109 
110    assertEquals("blockCache summary has entries", 3, bcs.size());
111    
112    BlockCacheColumnFamilySummary e = bcs.get(0);
113    assertEquals("table", "-ROOT-", e.getTable());
114    assertEquals("cf", "info", e.getColumnFamily());
115 
116    e = bcs.get(1);
117    assertEquals("table", TEST_TABLE, e.getTable());
118    assertEquals("cf", TEST_CF, e.getColumnFamily());
119 
120    e = bcs.get(2);
121    assertEquals("table", TEST_TABLE2, e.getTable());
122    assertEquals("cf", TEST_CF, e.getColumnFamily());
123 
124  }
125 
126  private void addRows(HTable ht, byte[] family) throws IOException {
127  
128    List<Row> rows = new ArrayList<Row>();
129    for (int i = 0; i < TOTAL_ROWS;i++) {
130      rows.add(createPut(family, "row" + i));
131    }
132    
133    HTableUtil.bucketRsBatch( ht, rows);
134  }
135 
136  private void scan(HTable ht, byte[] family) throws IOException {
137    Scan scan = new Scan();
138    scan.addColumn(family, QUALIFIER);
139    
140    int count = 0;
141    for(@SuppressWarnings("unused") Result result : ht.getScanner(scan)) {
142      count++;
143    }
144    if (TOTAL_ROWS != count) {
145      throw new IOException("Incorrect number of rows!");
146    }
147  }
148 }