1   /**
2    * Copyright 2009 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.io.hfile;
21  
22  import java.nio.ByteBuffer;
23  
24  import java.util.LinkedList;
25  
26  import junit.framework.TestCase;
27  
28  public class TestCachedBlockQueue extends TestCase {
29  
30    public void testQueue() throws Exception {
31  
32      CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
33      CachedBlock cb2 = new CachedBlock(1500, "cb2", 2);
34      CachedBlock cb3 = new CachedBlock(1000, "cb3", 3);
35      CachedBlock cb4 = new CachedBlock(1500, "cb4", 4);
36      CachedBlock cb5 = new CachedBlock(1000, "cb5", 5);
37      CachedBlock cb6 = new CachedBlock(1750, "cb6", 6);
38      CachedBlock cb7 = new CachedBlock(1000, "cb7", 7);
39      CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
40      CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
41      CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
42  
43      CachedBlockQueue queue = new CachedBlockQueue(10000,1000);
44  
45      queue.add(cb1);
46      queue.add(cb2);
47      queue.add(cb3);
48      queue.add(cb4);
49      queue.add(cb5);
50      queue.add(cb6);
51      queue.add(cb7);
52      queue.add(cb8);
53      queue.add(cb9);
54      queue.add(cb10);
55  
56      // We expect cb1 through cb8 to be in the queue
57      long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() +
58        cb4.heapSize() + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() +
59        cb8.heapSize();
60  
61      assertEquals(queue.heapSize(), expectedSize);
62  
63      LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks =
64        queue.get();
65      assertEquals(blocks.poll().getName(), "cb1");
66      assertEquals(blocks.poll().getName(), "cb2");
67      assertEquals(blocks.poll().getName(), "cb3");
68      assertEquals(blocks.poll().getName(), "cb4");
69      assertEquals(blocks.poll().getName(), "cb5");
70      assertEquals(blocks.poll().getName(), "cb6");
71      assertEquals(blocks.poll().getName(), "cb7");
72      assertEquals(blocks.poll().getName(), "cb8");
73  
74    }
75  
76    public void testQueueSmallBlockEdgeCase() throws Exception {
77  
78      CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
79      CachedBlock cb2 = new CachedBlock(1500, "cb2", 2);
80      CachedBlock cb3 = new CachedBlock(1000, "cb3", 3);
81      CachedBlock cb4 = new CachedBlock(1500, "cb4", 4);
82      CachedBlock cb5 = new CachedBlock(1000, "cb5", 5);
83      CachedBlock cb6 = new CachedBlock(1750, "cb6", 6);
84      CachedBlock cb7 = new CachedBlock(1000, "cb7", 7);
85      CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
86      CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
87      CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
88  
89      CachedBlockQueue queue = new CachedBlockQueue(10000,1000);
90  
91      queue.add(cb1);
92      queue.add(cb2);
93      queue.add(cb3);
94      queue.add(cb4);
95      queue.add(cb5);
96      queue.add(cb6);
97      queue.add(cb7);
98      queue.add(cb8);
99      queue.add(cb9);
100     queue.add(cb10);
101 
102     CachedBlock cb0 = new CachedBlock(10 + CachedBlock.PER_BLOCK_OVERHEAD, "cb0", 0);
103     queue.add(cb0);
104 
105     // This is older so we must include it, but it will not end up kicking
106     // anything out because (heapSize - cb8.heapSize + cb0.heapSize < maxSize)
107     // and we must always maintain heapSize >= maxSize once we achieve it.
108 
109     // We expect cb0 through cb8 to be in the queue
110     long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() +
111       cb4.heapSize() + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() +
112       cb8.heapSize() + cb0.heapSize();
113 
114     assertEquals(queue.heapSize(), expectedSize);
115 
116     LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks = queue.get();
117     assertEquals(blocks.poll().getName(), "cb0");
118     assertEquals(blocks.poll().getName(), "cb1");
119     assertEquals(blocks.poll().getName(), "cb2");
120     assertEquals(blocks.poll().getName(), "cb3");
121     assertEquals(blocks.poll().getName(), "cb4");
122     assertEquals(blocks.poll().getName(), "cb5");
123     assertEquals(blocks.poll().getName(), "cb6");
124     assertEquals(blocks.poll().getName(), "cb7");
125     assertEquals(blocks.poll().getName(), "cb8");
126 
127   }
128 
129   private static class CachedBlock extends org.apache.hadoop.hbase.io.hfile.CachedBlock
130   {
131     public CachedBlock(final long heapSize, String name, long accessTime) {
132       super(name,
133           new Cacheable(){
134             @Override
135             public long heapSize() {
136               return ((int)(heapSize - CachedBlock.PER_BLOCK_OVERHEAD));
137             }
138 
139             @Override
140             public int getSerializedLength() {
141               return 0;
142             }
143 
144             @Override
145             public void serialize(ByteBuffer destination) {
146             }
147 
148 
149             @Override
150             public CacheableDeserializer<Cacheable> getDeserializer() {
151               // TODO Auto-generated method stub
152               return null;
153             }},
154           accessTime,false);
155     }
156   }
157 }