View Javadoc

1   /**
2    * Copyright 2011 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase;
21  
22  import java.util.ArrayList;
23  import java.util.Comparator;
24  import java.util.List;
25  import java.util.Map;
26  import java.util.NavigableSet;
27  import java.util.TreeMap;
28  import java.util.TreeSet;
29  
30  
31  /**
32   * Data structure to describe the distribution of HDFS blocks amount hosts
33   */
34  public class HDFSBlocksDistribution {
35    private Map<String,HostAndWeight> hostAndWeights = null;
36    private long uniqueBlocksTotalWeight = 0;
37      
38    /**
39     * Stores the hostname and weight for that hostname.
40     *
41     * This is used when determining the physical locations of the blocks making
42     * up a region.
43     *
44     * To make a prioritized list of the hosts holding the most data of a region,
45     * this class is used to count the total weight for each host.  The weight is
46     * currently just the size of the file.
47     */
48    public static class HostAndWeight {
49  
50      private String host;
51      private long weight;
52  
53      /**
54       * Constructor
55       * @param host the host name
56       * @param weight the weight
57       */    
58      public HostAndWeight(String host, long weight) {
59        this.host = host;
60        this.weight = weight;
61      }
62  
63      /**
64       * add weight
65       * @param weight the weight
66       */        
67      public void addWeight(long weight) {
68        this.weight += weight;
69      }
70  
71      /**
72       * @return the host name
73       */            
74      public String getHost() {
75        return host;
76      }
77  
78      /**
79       * @return the weight
80       */                
81      public long getWeight() {
82        return weight;
83      }
84  
85      /**
86       * comparator used to sort hosts based on weight
87       */                
88      public static class WeightComparator implements Comparator<HostAndWeight> {
89        @Override
90        public int compare(HostAndWeight l, HostAndWeight r) {
91          if(l.getWeight() == r.getWeight()) {
92            return l.getHost().compareTo(r.getHost());
93          }
94          return l.getWeight() < r.getWeight() ? -1 : 1;
95        }
96      }
97    }
98    
99    /**
100    * Constructor
101    */
102   public HDFSBlocksDistribution() {
103     this.hostAndWeights =
104       new TreeMap<String,HostAndWeight>();
105   }
106 
107   /**
108    * @see java.lang.Object#toString()
109    */
110   @Override
111   public synchronized String toString() {
112     return "number of unique hosts in the disribution=" +
113       this.hostAndWeights.size();
114   }
115 
116   /**
117    * add some weight to a list of hosts, update the value of unique block weight
118    * @param hosts the list of the host
119    * @param weight the weight
120    */
121   public void addHostsAndBlockWeight(String[] hosts, long weight) {
122     if (hosts == null || hosts.length == 0) {
123       throw new NullPointerException("empty hosts");
124     }
125     addUniqueWeight(weight);
126     for (String hostname : hosts) {
127       addHostAndBlockWeight(hostname, weight);
128     }
129   }
130 
131   /**
132    * add some weight to the total unique weight
133    * @param weight the weight
134    */        
135   private void addUniqueWeight(long weight) {
136     uniqueBlocksTotalWeight += weight;
137   }
138   
139   
140   /**
141    * add some weight to a specific host
142    * @param host the host name
143    * @param weight the weight
144    */
145   private void addHostAndBlockWeight(String host, long weight) {
146     if (host == null) {
147       throw new NullPointerException("Passed hostname is null");
148     }
149 
150     HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
151     if(hostAndWeight == null) {
152       hostAndWeight = new HostAndWeight(host, weight);
153       this.hostAndWeights.put(host, hostAndWeight);
154     } else {
155       hostAndWeight.addWeight(weight);
156     }
157   }
158 
159   /**
160    * @return the hosts and their weights
161    */
162   public Map<String,HostAndWeight> getHostAndWeights() {
163     return this.hostAndWeights;
164   }
165 
166   /**
167    * return the weight for a specific host, that will be the total bytes of all
168    * blocks on the host
169    * @param host the host name
170    * @return the weight of the given host
171    */
172   public long getWeight(String host) {
173     long weight = 0;
174     if (host != null) {
175       HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
176       if(hostAndWeight != null) {
177         weight = hostAndWeight.getWeight();
178       }
179     }
180     return weight;
181   }
182   
183   /**
184    * @return the sum of all unique blocks' weight
185    */
186   public long getUniqueBlocksTotalWeight() {
187     return uniqueBlocksTotalWeight;
188   }
189   
190   /**
191    * return the locality index of a given host
192    * @param host the host name
193    * @return the locality index of the given host
194    */
195   public float getBlockLocalityIndex(String host) {
196     float localityIndex = 0;
197     HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
198     if (hostAndWeight != null && uniqueBlocksTotalWeight != 0) {
199       localityIndex=(float)hostAndWeight.weight/(float)uniqueBlocksTotalWeight;
200     }
201     return localityIndex;
202   }
203   
204   
205   /**
206    * This will add the distribution from input to this object
207    * @param otherBlocksDistribution the other hdfs blocks distribution
208    */
209   public void add(HDFSBlocksDistribution otherBlocksDistribution) {
210     Map<String,HostAndWeight> otherHostAndWeights =
211       otherBlocksDistribution.getHostAndWeights();
212     for (Map.Entry<String, HostAndWeight> otherHostAndWeight:
213       otherHostAndWeights.entrySet()) {
214       addHostAndBlockWeight(otherHostAndWeight.getValue().host,
215         otherHostAndWeight.getValue().weight);
216     }
217     addUniqueWeight(otherBlocksDistribution.getUniqueBlocksTotalWeight());
218   }
219   
220   /**
221    * return the sorted list of hosts in terms of their weights
222    */
223   public List<String> getTopHosts() {
224     NavigableSet<HostAndWeight> orderedHosts = new TreeSet<HostAndWeight>(
225       new HostAndWeight.WeightComparator());
226     orderedHosts.addAll(this.hostAndWeights.values());
227     List<String> topHosts = new ArrayList<String>(orderedHosts.size());
228     for(HostAndWeight haw : orderedHosts.descendingSet()) {
229       topHosts.add(haw.getHost());
230     }
231     return topHosts;
232   }
233 
234 }