值得注意的点
- 哈希函数的选择
- murmur哈希函数
该函数是非加密型哈希,性能高,且发生哈希碰撞的概率据说很低 - md5
- SHA
- 可以选择guava包,提供了丰富的哈希函数的API
-
支持虚拟节点+加权,因为不同的节点可能资源配置不同,加权可以使负载均衡最大化,虚拟节点,可以降低某个节点出现问题后对整个哈希环的冲击
-
考虑到不同场景用来作哈希的key可能不一样,所以提供一个包装类Node,可以自定义key,且可以自定义权重
-
安全问题,添加节点和删除节点是需要重建哈希环,此处要考虑并发情况的发生(此处暂未实现),一般情况下只需初始化一次即可。
-
哈希后的取值,本处为了通用采用String类型,可以针对特殊场景作优化,个人认为只有最适合的,没有最好的
实现
- 包装类node
public final class Node<T> {
public static final int DEFAULT_WEIGHT = 1;
private T target;
private String key;
private int weight = DEFAULT_WEIGHT;
public Node(T target) {
this.target = target;
this.key = target.toString();
}
public Node(T target, String key) {
this.target = target;
this.key = key;
}
public Node(T target, String key, int weight) {
this.target = target;
this.key = key;
this.weight = weight;
}
public T getTarget() {
return target;
}
public void setTarget(T target) {
this.target = target;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public int getWeight() {
return weight;
}
public void setWeight(int weight) {
this.weight = weight;
}
}
- 一致性哈希
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import java.nio.charset.Charset;
import java.util.*;
/**
* 一致性哈希
* @param <T>
*/
public class ConsistentHash<T> {
//哈希函数
private HashFunction hashFunction = Hashing.murmur3_32();
//哈希环
private TreeMap<String, Node<T>> nodeMappings = new TreeMap<>();
//虚拟节点数
private int replicas = 160;
//真实节点
private List<Node<T>> realNodes;
public ConsistentHash(HashFunction hashFunction, List<Node<T>> realNodes, int replicas) {
this.hashFunction = hashFunction;
this.realNodes = realNodes;
this.replicas = replicas;
}
public ConsistentHash(HashFunction hashFunction, List<Node<T>> realNodes) {
this.hashFunction = hashFunction;
this.realNodes = realNodes;
}
public ConsistentHash(List<Node<T>> realNodes, int replicas) {
this.realNodes = realNodes;
this.replicas = replicas;
}
public ConsistentHash(List<Node<T>> realNodes) {
this.realNodes = realNodes;
}
/**
* 初始化哈希环
*/
public void init(){
realNodes.forEach(node -> {
for (int i = 0; i < replicas*node.getWeight(); i++) {
StringBuilder sb = new StringBuilder(node.getKey());
sb.append(i);
nodeMappings.put(hashCode(sb.toString()), node);
}
});
}
public String hashCode(String key){
return hashFunction.hashString(key, Charset.defaultCharset()).asBytes().toString();
}
public Node<T> getNode(String key){
SortedMap<String, Node<T>> tailMap = nodeMappings.tailMap(hashCode(key));
if(tailMap.isEmpty()){
return nodeMappings.get(nodeMappings.firstKey());
}
return tailMap.get(tailMap.firstKey());
}
public static void main(String[] args) {
List<Node<String>> realNodes = Arrays.asList(new Node("127.0.0.1"),new Node("192.168.4.175"),new Node("192.168.3.175"), new Node("172.147.0.101"));
ConsistentHash consistentHash = new ConsistentHash(Hashing.md5(), realNodes);
consistentHash.init();
List<Node> one = new ArrayList<>();
List<Node> two = new ArrayList<>();
List<Node> three = new ArrayList<>();
List<Node> four = new ArrayList<>();
for (int i = 0; i <10000 ; i++) {
Node<String> node = consistentHash.getNode("node"+i);
if(node.getKey().equals("127.0.0.1")){
one.add(node);
}
if(node.getKey().equals("192.168.4.175")){
two.add(node);
}
if(node.getKey().equals("192.168.3.175")){
three.add(node);
}
if(node.getKey().equals("172.147.0.101")){
four.add(node);
}
}
System.out.println(one.size());
System.out.println(two.size());
System.out.println(three.size());
System.out.println(four.size());
}