diff --git a/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAAbstractMap.java b/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAAbstractMap.java
index dc7ff57..1eeee18 100644
--- a/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAAbstractMap.java
+++ b/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAAbstractMap.java
@@ -309,9 +309,28 @@ public void clear() {
* Each of these fields are initialized to contain an instance of the
* appropriate view the first time this view is requested. The views are
* stateless, so there's no reason to create more than one of each.
+ *
+ *
Since there is no synchronization performed while accessing these fields,
+ * it is expected that java.util.Map view classes using these fields have
+ * no non-final fields (or any fields at all except for outer-this). Adhering
+ * to this rule would make the races on these fields benign.
+ *
+ *
It is also imperative that implementations read the field only once,
+ * as in:
+ *
+ *
{@code
+ * public Set keySet() {
+ * Set ks = keySet; // single racy read
+ * if (ks == null) {
+ * ks = new KeySet();
+ * keySet = ks;
+ * }
+ * return ks;
+ * }
+ *}
*/
- transient volatile Set keySet;
- transient volatile Collection values;
+ transient Set keySet;
+ transient Collection values;
/**
* {@inheritDoc}
@@ -330,8 +349,9 @@ public void clear() {
* method will not all return the same set.
*/
public Set keySet() {
- if (keySet == null) {
- keySet = new AbstractSet() {
+ Set ks = keySet;
+ if (ks == null) {
+ ks = new AbstractSet() {
public Iterator iterator() {
return new Iterator() {
private Iterator> i = entrySet().iterator();
@@ -366,8 +386,9 @@ public boolean contains(Object k) {
return SAAbstractMap.this.containsKey(k);
}
};
+ keySet = ks;
}
- return keySet;
+ return ks;
}
/**
@@ -387,8 +408,9 @@ public boolean contains(Object k) {
* method will not all return the same collection.
*/
public Collection values() {
- if (values == null) {
- values = new AbstractCollection() {
+ Collection vals = values;
+ if (vals == null) {
+ vals = new AbstractCollection() {
public Iterator iterator() {
return new Iterator() {
private Iterator> i = entrySet().iterator();
@@ -423,8 +445,9 @@ public boolean contains(Object v) {
return SAAbstractMap.this.containsValue(v);
}
};
+ values = vals;
}
- return values;
+ return vals;
}
public abstract Set> entrySet();
@@ -587,7 +610,7 @@ private static boolean eq(Object o1, Object o2) {
* @since 1.6
*/
public static class SimpleEntry
- implements Entry, java.io.Serializable
+ implements Entry, java.io.Serializable
{
private static final long serialVersionUID = -8499721149061103585L;
@@ -691,7 +714,7 @@ public boolean equals(Object o) {
*/
public int hashCode() {
return (key == null ? 0 : key.hashCode()) ^
- (value == null ? 0 : value.hashCode());
+ (value == null ? 0 : value.hashCode());
}
/**
@@ -717,7 +740,7 @@ public String toString() {
* @since 1.6
*/
public static class SimpleImmutableEntry
- implements Entry, java.io.Serializable
+ implements Entry, java.io.Serializable
{
private static final long serialVersionUID = 7138329143949025153L;
@@ -822,7 +845,7 @@ public boolean equals(Object o) {
*/
public int hashCode() {
return (key == null ? 0 : key.hashCode()) ^
- (value == null ? 0 : value.hashCode());
+ (value == null ? 0 : value.hashCode());
}
/**
diff --git a/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAHashMap.java b/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAHashMap.java
index 688e357..6232f0a 100644
--- a/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAHashMap.java
+++ b/ObjectLayout-examples/src/main/java/org/ObjectLayout/examples/util/SAHashMap.java
@@ -286,21 +286,21 @@ public class SAHashMap extends SAAbstractMap
* TreeNode subclass, and in LinkedHashMap for its Entry subclass.)
*/
static class Node implements Map.Entry {
+ private static final Object SENTINEL = new Object();
int hash;
K key;
V value;
Node next;
- private boolean isSentinel = true;
Node(int hash, K key, V value, Node next) {
this.hash = hash;
this.key = key;
this.value = value;
this.next = next;
- this.isSentinel = false;
}
Node() {
+ this.key = (K) SENTINEL;
}
public void setContents(int hash, K key, V value, Node next) {
@@ -308,7 +308,6 @@ public void setContents(int hash, K key, V value, Node next) {
this.key = key;
this.value = value;
this.next = next;
- this.isSentinel = false;
}
public void setContents(Node e) {
@@ -317,18 +316,20 @@ public void setContents(Node e) {
this.key = e.key;
this.value = e.value;
this.next = e.next;
- this.isSentinel = e.isSentinel;
} else {
- this.isSentinel = true;
+ this.setSentinel();
}
}
public boolean isSentinel() {
- return isSentinel;
+ return this.key == SENTINEL;
}
- public void setSentinel(boolean isSentinel) {
- this.isSentinel = isSentinel;
+ public void setSentinel() {
+ this.key = (K) SENTINEL;
+ // make value and next eligible for garbage collection
+ this.value = null;
+ this.next = null;
}
public final K getKey() { return key; }
@@ -351,7 +352,7 @@ public final boolean equals(Object o) {
if (o instanceof Map.Entry) {
Map.Entry,?> e = (Map.Entry,?>)o;
if (Objects.equals(key, e.getKey()) &&
- Objects.equals(value, e.getValue()))
+ Objects.equals(value, e.getValue()))
return true;
}
return false;
@@ -393,10 +394,10 @@ static Class> comparableClassFor(Object x) {
if ((ts = c.getGenericInterfaces()) != null) {
for (int i = 0; i < ts.length; ++i) {
if (((t = ts[i]) instanceof ParameterizedType) &&
- ((p = (ParameterizedType)t).getRawType() ==
- Comparable.class) &&
- (as = p.getActualTypeArguments()) != null &&
- as.length == 1 && as[0] == c) // type arg is c
+ ((p = (ParameterizedType)t).getRawType() ==
+ Comparable.class) &&
+ (as = p.getActualTypeArguments()) != null &&
+ as.length == 1 && as[0] == c) // type arg is c
return c;
}
}
@@ -517,12 +518,12 @@ public static NodeTable newInstance(final long length) {
public SAHashMap(int initialCapacity, float loadFactor) {
if (initialCapacity < 0)
throw new IllegalArgumentException("Illegal initial capacity: " +
- initialCapacity);
+ initialCapacity);
if (initialCapacity > MAXIMUM_CAPACITY)
initialCapacity = MAXIMUM_CAPACITY;
if (loadFactor <= 0 || Float.isNaN(loadFactor))
throw new IllegalArgumentException("Illegal load factor: " +
- loadFactor);
+ loadFactor);
this.loadFactor = loadFactor;
this.threshold = tableSizeFor(initialCapacity);
}
@@ -573,7 +574,7 @@ final void putMapEntries(Map extends K, ? extends V> m, boolean evict) {
if (saTable == null) { // pre-size
float ft = ((float)s / loadFactor) + 1.0F;
int t = ((ft < (float)MAXIMUM_CAPACITY) ?
- (int)ft : MAXIMUM_CAPACITY);
+ (int)ft : MAXIMUM_CAPACITY);
if (t > threshold)
threshold = tableSizeFor(t);
}
@@ -637,16 +638,16 @@ public V get(Object key) {
final Node getNode(int hash, Object key) {
NodeTable tab; Node first, e; int n; K k;
if ((tab = saTable) != null && (n = (int)tab.getLength()) > 0 &&
- (first = tab.get((n - 1) & hash)).isSentinel() != true) {
+ (first = tab.get((n - 1) & hash)).isSentinel() != true) {
if (first.hash == hash && // always check first node
- ((k = first.key) == key || (key != null && key.equals(k))))
+ ((k = first.key) == key || (key != null && key.equals(k))))
return first;
if ((e = first.next) != null) {
-// if (first instanceof TreeNode)
-// return ((TreeNode)first).getTreeNode(hash, key);
+ if (e instanceof TreeNode)
+ return ((TreeNode)e).getTreeNode(hash, key);
do {
if (e.hash == hash &&
- ((k = e.key) == key || (key != null && key.equals(k))))
+ ((k = e.key) == key || (key != null && key.equals(k))))
return e;
} while ((e = e.next) != null);
}
@@ -703,16 +704,16 @@ final V putVal(int hash, K key, V value, boolean onlyIfAbsent,
else {
Node e; K k;
if (p.hash == hash &&
- ((k = p.key) == key || (key != null && key.equals(k))))
+ ((k = p.key) == key || (key != null && key.equals(k))))
e = p;
-// else if (p instanceof TreeNode)
-// e = ((TreeNode)p).putTreeVal(this, tab, hash, key, value);
+ else if (p.next instanceof TreeNode)
+ e = ((TreeNode)p.next).putTreeVal(this, tab, hash, key, value);
else {
for (int binCount = 0; ; ++binCount) {
if ((e = p.next) == null) {
p.next = newNode(hash, key, value, null);
-// if (binCount >= TREEIFY_THRESHOLD - 1) // -1 for 1st
-// treeifyBin(tab, hash);
+ if (binCount >= TREEIFY_THRESHOLD - 1) // -1 for 1st
+ treeifyBin(tab, hash);
break;
}
if (e.hash == hash &&
@@ -748,7 +749,7 @@ final V putVal(int hash, K key, V value, boolean onlyIfAbsent,
final NodeTable resize() {
// Node[] oldTab = table;
NodeTable oldTab = saTable;
- int oldCap = (oldTab == null) ? 0 : (int) oldTab.getLength();
+ int oldCap = (oldTab == null) ? 0 : (int)oldTab.getLength();
int oldThr = threshold;
int newCap, newThr = 0;
if (oldCap > 0) {
@@ -757,7 +758,7 @@ final NodeTable resize() {
return oldTab;
}
else if ((newCap = oldCap << 1) < MAXIMUM_CAPACITY &&
- oldCap >= DEFAULT_INITIAL_CAPACITY)
+ oldCap >= DEFAULT_INITIAL_CAPACITY)
newThr = oldThr << 1; // double threshold
}
else if (oldThr > 0) // initial capacity was placed in threshold
@@ -769,26 +770,22 @@ else if (oldThr > 0) // initial capacity was placed in threshold
if (newThr == 0) {
float ft = (float)newCap * loadFactor;
newThr = (newCap < MAXIMUM_CAPACITY && ft < (float)MAXIMUM_CAPACITY ?
- (int)ft : Integer.MAX_VALUE);
+ (int)ft : Integer.MAX_VALUE);
}
threshold = newThr;
@SuppressWarnings({"rawtypes","unchecked"})
-// Node[] newTab = (Node[])new Node[newCap];
- NodeTable newTab = NodeTable.newInstance(newCap);
+// Node[] newTab = (Node[])new Node[newCap];
+ NodeTable newTab = NodeTable.newInstance(newCap);
saTable = newTab;
if (oldTab != null) {
for (int j = 0; j < oldCap; ++j) {
Node e;
- if ((e = oldTab.get(j)).isSentinel != true) {
+ if ((e = oldTab.get(j)).isSentinel() != true) {
// oldTab[j] = null;
-// oldTab.get(j).setSentinel(true);
-// if (e.next == null)
-// newTab[e.hash & (newCap - 1)] = e;
- if (e.next == null) {
+ if (e.next == null)
newTab.get(e.hash & (newCap - 1)).setContents(e);
- }
-// else if (e instanceof TreeNode)
-// ((TreeNode)e).split(this, newTab, j, oldCap);
+ else if (e.next instanceof TreeNode)
+ ((TreeNode)e.next).split(this, newTab, j, oldCap, e);
else { // preserve order
Node loHead = null, loTail = null;
Node hiHead = null, hiTail = null;
@@ -821,6 +818,7 @@ else if (oldThr > 0) // initial capacity was placed in threshold
newTab.get(j + oldCap).setContents(hiHead);
}
}
+ oldTab.get(j).setSentinel();
}
}
}
@@ -831,26 +829,28 @@ else if (oldThr > 0) // initial capacity was placed in threshold
* Replaces all linked nodes in bin at index for given hash unless
* table is too small, in which case resizes instead.
*/
-// final void treeifyBin(NodeTable tab, int hash) {
-// int n, index; Node e;
-// if (tab == null || (n = (int)tab.getLength()) < MIN_TREEIFY_CAPACITY)
-// resize();
-// else if ((e = tab.get(index = (n - 1) & hash)).isSentinel != true) {
-// TreeNode hd = null, tl = null;
-// do {
-// TreeNode p = replacementTreeNode(e, null);
-// if (tl == null)
-// hd = p;
-// else {
-// p.prev = tl;
-// tl.next = p;
-// }
-// tl = p;
-// } while ((e = e.next) != null);
-// if ((tab[index] = hd) != null)
-// hd.treeify(tab);
-// }
-// }
+ final void treeifyBin(NodeTable tab, int hash) {
+ int n, index; Node first, e;
+ if (tab == null || (n = (int)tab.getLength()) < MIN_TREEIFY_CAPACITY)
+ resize();
+ else if ((first = tab.get((n - 1) & hash)) != null) {
+ e = first.next;
+ TreeNode hd = null, tl = null;
+ do {
+ TreeNode p = replacementTreeNode(e, null);
+ if (tl == null)
+ hd = p;
+ else {
+ p.prev = tl;
+ tl.next = p;
+ }
+ tl = p;
+ } while ((e = e.next) != null);
+ first.next = hd;
+ if (hd != null)
+ hd.treeify(tab);
+ }
+ }
/**
* Copies all of the mappings from the specified map to this map.
@@ -876,7 +876,7 @@ public void putAll(Map extends K, ? extends V> m) {
public V remove(Object key) {
Node e;
return (e = removeNode(hash(key), key, null, false, true)) == null ?
- null : e.value;
+ null : e.value;
}
/**
@@ -893,40 +893,40 @@ final Node removeNode(int hash, Object key, Object value,
boolean matchValue, boolean movable) {
NodeTable tab; Node p; int n, index;
if ((tab = saTable) != null && (n = (int)tab.getLength()) > 0 &&
- (p = tab.get(index = (n - 1) & hash)).isSentinel != true) {
+ (p = tab.get(index = (n - 1) & hash)).isSentinel() != true) {
Node node = null, e; K k; V v;
if (p.hash == hash &&
- ((k = p.key) == key || (key != null && key.equals(k))))
+ ((k = p.key) == key || (key != null && key.equals(k))))
node = p;
else if ((e = p.next) != null) {
-// if (p instanceof TreeNode)
-// node = ((TreeNode)p).getTreeNode(hash, key);
-// else {
+ if (e instanceof TreeNode)
+ node = ((TreeNode)e).getTreeNode(hash, key);
+ else {
do {
if (e.hash == hash &&
- ((k = e.key) == key ||
- (key != null && key.equals(k)))) {
+ ((k = e.key) == key ||
+ (key != null && key.equals(k)))) {
node = e;
break;
}
p = e;
} while ((e = e.next) != null);
-// }
+ }
}
if (node != null && (!matchValue || (v = node.value) == value ||
- (value != null && value.equals(v)))) {
-// if (node instanceof TreeNode)
-// ((TreeNode)node).removeTreeNode(this, tab, movable);
-// else if (node == p)
-// tab[index] = node.next;
- if (node == p) {
+ (value != null && value.equals(v)))) {
+ if (node instanceof TreeNode)
+ ((TreeNode)node).removeTreeNode(this, tab, movable);
+ else if (node == p) {
// We are removign the head of the list. We need to create an
// independent node with the same contents that will be returned.
// Also needs to be passed to afterNodeRemoval().
// Danger. Danger. (what if afterNodeRemoval was holding some links on a specific node)
// XXX GGG TODO
node = new Node(node.hash, node.key, node.value, node.next);
- tab.get(index).setContents(node.next);
+ p.setContents(node.next);
+ if (node.next instanceof TreeNode)
+ ((TreeNode) node.next).removeTreeNode(this, tab, movable);
}
else
p.next = node.next;
@@ -948,9 +948,9 @@ public void clear() {
modCount++;
if ((tab = saTable) != null && size > 0) {
size = 0;
- for (int i = 0; i < tab.getLength(); ++i)
-// tab[i] = null;
- tab.get(i).setSentinel(true);
+ for (int i = 0; i < (int)tab.getLength(); ++i)
+// tab[i] = null;
+ tab.get(i).setSentinel();
}
}
@@ -965,10 +965,10 @@ public void clear() {
public boolean containsValue(Object value) {
NodeTable tab; V v;
if ((tab = saTable) != null && size > 0) {
- for (int i = 0; i < tab.getLength(); ++i) {
+ for (int i = 0; i < (int)tab.getLength(); ++i) {
for (Node e = tab.get(i); (e != null) && (!e.isSentinel()); e = e.next) {
if ((v = e.value) == value ||
- (value != null && value.equals(v)))
+ (value != null && value.equals(v)))
return true;
}
}
@@ -992,8 +992,12 @@ public boolean containsValue(Object value) {
* @return a set view of the keys contained in this map
*/
public Set keySet() {
- Set ks;
- return (ks = keySet) == null ? (keySet = new KeySet()) : ks;
+ Set ks = keySet;
+ if (ks == null) {
+ ks = new KeySet();
+ keySet = ks;
+ }
+ return ks;
}
final class KeySet extends AbstractSet {
@@ -1005,7 +1009,7 @@ public final boolean remove(Object key) {
return removeNode(hash(key), key, null, false, true) != null;
}
public final Spliterator spliterator() {
- return new KeySpliterator(SAHashMap.this, 0, -1, 0, 0);
+ return new KeySpliterator<>(SAHashMap.this, 0, -1, 0, 0);
}
public final void forEach(Consumer super K> action) {
NodeTable tab;
@@ -1013,8 +1017,8 @@ public final void forEach(Consumer super K> action) {
throw new NullPointerException();
if (size > 0 && (tab = saTable) != null) {
int mc = modCount;
- for (int i = 0; i < tab.getLength(); ++i) {
- for (Node e = tab.get(i); ((e != null) && !e.isSentinel()) ; e = e.next)
+ for (int i = 0; i < (int)tab.getLength(); ++i) {
+ for (Node e = tab.get(i); ((e != null) && !e.isSentinel()); e = e.next)
action.accept(e.key);
}
if (modCount != mc)
@@ -1039,8 +1043,12 @@ public final void forEach(Consumer super K> action) {
* @return a view of the values contained in this map
*/
public Collection values() {
- Collection vs;
- return (vs = values) == null ? (values = new Values()) : vs;
+ Collection vs = values;
+ if (vs == null) {
+ vs = new Values();
+ values = vs;
+ }
+ return vs;
}
final class Values extends AbstractCollection {
@@ -1049,7 +1057,7 @@ final class Values extends AbstractCollection {
public final Iterator iterator() { return new ValueIterator(); }
public final boolean contains(Object o) { return containsValue(o); }
public final Spliterator spliterator() {
- return new ValueSpliterator(SAHashMap.this, 0, -1, 0, 0);
+ return new ValueSpliterator<>(SAHashMap.this, 0, -1, 0, 0);
}
public final void forEach(Consumer super V> action) {
NodeTable tab;
@@ -1057,7 +1065,7 @@ public final void forEach(Consumer super V> action) {
throw new NullPointerException();
if (size > 0 && (tab = saTable) != null) {
int mc = modCount;
- for (int i = 0; i < tab.getLength(); ++i) {
+ for (int i = 0; i < (int)tab.getLength(); ++i) {
for (Node e = tab.get(i); ((e != null) && !e.isSentinel()); e = e.next)
action.accept(e.value);
}
@@ -1112,7 +1120,7 @@ public final boolean remove(Object o) {
return false;
}
public final Spliterator> spliterator() {
- return new EntrySpliterator(SAHashMap.this, 0, -1, 0, 0);
+ return new EntrySpliterator<>(SAHashMap.this, 0, -1, 0, 0);
}
public final void forEach(Consumer super Map.Entry> action) {
NodeTable tab;
@@ -1120,7 +1128,7 @@ public final void forEach(Consumer super Map.Entry> action) {
throw new NullPointerException();
if (size > 0 && (tab = saTable) != null) {
int mc = modCount;
- for (int i = 0; i < tab.getLength(); ++i) {
+ for (int i = 0; i < (int)tab.getLength(); ++i) {
for (Node e = tab.get(i); ((e != null) && !e.isSentinel()); e = e.next)
action.accept(e);
}
@@ -1152,7 +1160,7 @@ public boolean remove(Object key, Object value) {
public boolean replace(K key, V oldValue, V newValue) {
Node e; V v;
if ((e = getNode(hash(key), key)) != null &&
- ((v = e.value) == oldValue || (v != null && v.equals(oldValue)))) {
+ ((v = e.value) == oldValue || (v != null && v.equals(oldValue)))) {
e.value = newValue;
afterNodeAccess(e);
return true;
@@ -1178,27 +1186,32 @@ public V computeIfAbsent(K key,
if (mappingFunction == null)
throw new NullPointerException();
int hash = hash(key);
- NodeTable tab; Node first; int n, i;
+ NodeTable tab; Node first, e; int n, i;
int binCount = 0;
-// TreeNode t = null;
+ TreeNode t = null;
Node old = null;
if (size > threshold || (tab = saTable) == null ||
- (n = (int)tab.getLength()) == 0)
+ (n = (int)tab.getLength()) == 0)
n = (int)(tab = resize()).getLength();
if ((first = tab.get(i = (n - 1) & hash)).isSentinel() != true) {
-// if (first instanceof TreeNode)
-// old = (t = (TreeNode)first).getTreeNode(hash, key);
-// else {
- Node e = first; K k;
- do {
- if (e.hash == hash &&
+ K k;
+ if (first.hash == hash && // always check first node
+ ((k = first.key) == key || (key != null && key.equals(k))))
+ old = first;
+ else if ((e = first.next) != null) {
+ if (e instanceof TreeNode)
+ old = (t = (TreeNode)e).getTreeNode(hash, key);
+ else {
+ do {
+ if (e.hash == hash &&
((k = e.key) == key || (key != null && key.equals(k)))) {
- old = e;
- break;
- }
- ++binCount;
- } while ((e = e.next) != null);
-// }
+ old = e;
+ break;
+ }
+ ++binCount;
+ } while ((e = e.next) != null);
+ }
+ }
V oldValue;
if (old != null && (oldValue = old.value) != null) {
afterNodeAccess(old);
@@ -1213,15 +1226,15 @@ public V computeIfAbsent(K key,
afterNodeAccess(old);
return v;
}
-// else if (t != null)
-// t.putTreeVal(this, tab, hash, key, v);
-// else {
-// tab[i] = newNode(hash, key, v, first);
-// if (binCount >= TREEIFY_THRESHOLD - 1)
-// treeifyBin(tab, hash);
-// }
- tab.get(i).setContents(hash, key, v, first);
-
+ else if (t != null)
+ t.putTreeVal(this, tab, hash, key, v);
+ else {
+ // we need a copy of the first node since it's part of the StructuredArray
+ Node firstCopy = new Node<>(first.hash, first.key, first.value, first.next);
+ tab.get(i).setContents(hash, key, v, firstCopy);
+ if (binCount >= TREEIFY_THRESHOLD - 1)
+ treeifyBin(tab, hash);
+ }
++modCount;
++size;
afterNodeInsertion(true);
@@ -1235,7 +1248,7 @@ public V computeIfPresent(K key,
Node e; V oldValue;
int hash = hash(key);
if ((e = getNode(hash, key)) != null &&
- (oldValue = e.value) != null) {
+ (oldValue = e.value) != null) {
V v = remappingFunction.apply(key, oldValue);
if (v != null) {
e.value = v;
@@ -1254,27 +1267,32 @@ public V compute(K key,
if (remappingFunction == null)
throw new NullPointerException();
int hash = hash(key);
- NodeTable tab; Node first; int n, i;
+ NodeTable tab; Node first, e; int n, i;
int binCount = 0;
-// TreeNode t = null;
+ TreeNode t = null;
Node old = null;
if (size > threshold || (tab = saTable) == null ||
- (n = (int)tab.getLength()) == 0)
+ (n = (int)tab.getLength()) == 0)
n = (int)(tab = resize()).getLength();
if ((first = tab.get(i = (n - 1) & hash)).isSentinel() != true) {
-// if (first instanceof TreeNode)
-// old = (t = (TreeNode)first).getTreeNode(hash, key);
-// else {
- Node e = first; K k;
- do {
- if (e.hash == hash &&
+ K k;
+ if (first.hash == hash && // always check first node
+ ((k = first.key) == key || (key != null && key.equals(k))))
+ old = first;
+ else if ((e = first.next) != null) {
+ if (e instanceof TreeNode)
+ old = (t = (TreeNode)e).getTreeNode(hash, key);
+ else {
+ do {
+ if (e.hash == hash &&
((k = e.key) == key || (key != null && key.equals(k)))) {
- old = e;
- break;
- }
- ++binCount;
- } while ((e = e.next) != null);
-// }
+ old = e;
+ break;
+ }
+ ++binCount;
+ } while ((e = e.next) != null);
+ }
+ }
}
V oldValue = (old == null) ? null : old.value;
V v = remappingFunction.apply(key, oldValue);
@@ -1287,15 +1305,15 @@ public V compute(K key,
removeNode(hash, key, null, false, true);
}
else if (v != null) {
-// if (t != null)
-// t.putTreeVal(this, tab, hash, key, v);
-// else {
-// tab[i] = newNode(hash, key, v, first);
-// if (binCount >= TREEIFY_THRESHOLD - 1)
-// treeifyBin(tab, hash);
-// }
- tab.get(i).setContents(hash, key, v, first);
-
+ if (t != null)
+ t.putTreeVal(this, tab, hash, key, v);
+ else {
+ // we need a copy of the first node since it's part of the StructuredArray
+ Node firstCopy = new Node<>(first.hash, first.key, first.value, first.next);
+ tab.get(i).setContents(hash, key, v, firstCopy);
+ if (binCount >= TREEIFY_THRESHOLD - 1)
+ treeifyBin(tab, hash);
+ }
++modCount;
++size;
afterNodeInsertion(true);
@@ -1311,27 +1329,32 @@ public V merge(K key, V value,
if (remappingFunction == null)
throw new NullPointerException();
int hash = hash(key);
- NodeTable tab; Node first; int n, i;
+ NodeTable tab; Node first, e; int n, i;
int binCount = 0;
-// TreeNode t = null;
+ TreeNode t = null;
Node old = null;
if (size > threshold || (tab = saTable) == null ||
- (n = (int)tab.getLength()) == 0)
+ (n = (int)tab.getLength()) == 0)
n = (int)(tab = resize()).getLength();
if ((first = tab.get(i = (n - 1) & hash)).isSentinel() != true) {
-// if (first instanceof TreeNode)
-// old = (t = (TreeNode)first).getTreeNode(hash, key);
-// else {
- Node e = first; K k;
- do {
- if (e.hash == hash &&
+ K k;
+ if (first.hash == hash && // always check first node
+ ((k = first.key) == key || (key != null && key.equals(k))))
+ old = first;
+ else if ((e = first.next) != null) {
+ if (e instanceof TreeNode)
+ old = (t = (TreeNode)e).getTreeNode(hash, key);
+ else {
+ do {
+ if (e.hash == hash &&
((k = e.key) == key || (key != null && key.equals(k)))) {
- old = e;
- break;
- }
- ++binCount;
- } while ((e = e.next) != null);
-// }
+ old = e;
+ break;
+ }
+ ++binCount;
+ } while ((e = e.next) != null);
+ }
+ }
}
if (old != null) {
V v;
@@ -1348,15 +1371,15 @@ public V merge(K key, V value,
return v;
}
if (value != null) {
-// if (t != null)
-// t.putTreeVal(this, tab, hash, key, value);
-// else {
-// tab[i] = newNode(hash, key, value, first);
-// if (binCount >= TREEIFY_THRESHOLD - 1)
-// treeifyBin(tab, hash);
-// }
- tab.get(i).setContents(hash, key, value, first);
-
+ if (t != null)
+ t.putTreeVal(this, tab, hash, key, value);
+ else {
+ // we need a copy of the first node since it's part of the StructuredArray
+ Node firstCopy = new Node<>(first.hash, first.key, first.value, first.next);
+ tab.get(i).setContents(hash, key, value, firstCopy);
+ if (binCount >= TREEIFY_THRESHOLD - 1)
+ treeifyBin(tab, hash);
+ }
++modCount;
++size;
afterNodeInsertion(true);
@@ -1371,7 +1394,7 @@ public void forEach(BiConsumer super K, ? super V> action) {
throw new NullPointerException();
if (size > 0 && (tab = saTable) != null) {
int mc = modCount;
- for (int i = 0; i < tab.getLength(); ++i) {
+ for (int i = 0; i < (int)tab.getLength(); ++i) {
for (Node e = tab.get(i); ((e != null) && !e.isSentinel()); e = e.next)
action.accept(e.key, e.value);
}
@@ -1387,7 +1410,7 @@ public void replaceAll(BiFunction super K, ? super V, ? extends V> function) {
throw new NullPointerException();
if (size > 0 && (tab = saTable) != null) {
int mc = modCount;
- for (int i = 0; i < tab.getLength(); ++i) {
+ for (int i = 0; i < (int)tab.getLength(); ++i) {
for (Node e = tab.get(i); ((e != null) && !e.isSentinel()); e = e.next) {
e.value = function.apply(e.key, e.value);
}
@@ -1425,8 +1448,8 @@ public Object clone() {
final float loadFactor() { return loadFactor; }
final int capacity() {
return (saTable != null) ? (int)saTable.getLength() :
- (threshold > 0) ? threshold :
- DEFAULT_INITIAL_CAPACITY;
+ (threshold > 0) ? threshold :
+ DEFAULT_INITIAL_CAPACITY;
}
/**
@@ -1441,7 +1464,7 @@ final int capacity() {
* emitted in no particular order.
*/
private void writeObject(java.io.ObjectOutputStream s)
- throws IOException {
+ throws IOException {
int buckets = capacity();
// Write out the threshold, loadfactor, and any hidden stuff
s.defaultWriteObject();
@@ -1455,42 +1478,42 @@ private void writeObject(java.io.ObjectOutputStream s)
* deserialize it).
*/
private void readObject(java.io.ObjectInputStream s)
- throws IOException, ClassNotFoundException {
+ throws IOException, ClassNotFoundException {
// Read in the threshold (ignored), loadfactor, and any hidden stuff
s.defaultReadObject();
reinitialize();
if (loadFactor <= 0 || Float.isNaN(loadFactor))
throw new InvalidObjectException("Illegal load factor: " +
- loadFactor);
+ loadFactor);
s.readInt(); // Read and ignore number of buckets
int mappings = s.readInt(); // Read number of mappings (size)
if (mappings < 0)
throw new InvalidObjectException("Illegal mappings count: " +
- mappings);
+ mappings);
else if (mappings > 0) { // (if zero, use defaults)
// Size the table using given load factor only if within
// range of 0.25...4.0
float lf = Math.min(Math.max(0.25f, loadFactor), 4.0f);
float fc = (float)mappings / lf + 1.0f;
int cap = ((fc < DEFAULT_INITIAL_CAPACITY) ?
- DEFAULT_INITIAL_CAPACITY :
- (fc >= MAXIMUM_CAPACITY) ?
- MAXIMUM_CAPACITY :
- tableSizeFor((int)fc));
+ DEFAULT_INITIAL_CAPACITY :
+ (fc >= MAXIMUM_CAPACITY) ?
+ MAXIMUM_CAPACITY :
+ tableSizeFor((int)fc));
float ft = (float)cap * lf;
threshold = ((cap < MAXIMUM_CAPACITY && ft < MAXIMUM_CAPACITY) ?
- (int)ft : Integer.MAX_VALUE);
+ (int)ft : Integer.MAX_VALUE);
@SuppressWarnings({"rawtypes","unchecked"})
-// Node[] tab = (Node[])new Node[cap];
- NodeTable tab = NodeTable.newInstance(cap);
+// Node[] tab = (Node[])new Node[cap];
+ NodeTable tab = NodeTable.newInstance(cap);
saTable = tab;
// Read the keys and values, and put the mappings in the HashMap
for (int i = 0; i < mappings; i++) {
@SuppressWarnings("unchecked")
- K key = (K) s.readObject();
+ K key = (K) s.readObject();
@SuppressWarnings("unchecked")
- V value = (V) s.readObject();
+ V value = (V) s.readObject();
putVal(hash(key), key, value, false, false);
}
}
@@ -1511,12 +1534,12 @@ abstract class HashIterator {
current = next = null;
index = 0;
if (t != null && size > 0) { // advance to first entry
- do {} while (index < t.getLength() && (next = t.get(index++)) == null);
+ do {} while (index < (int)t.getLength() && (next = t.get(index++)).isSentinel());
}
}
public final boolean hasNext() {
- return next != null;
+ return next != null && !next.isSentinel();
}
final Node nextNode() {
@@ -1527,14 +1550,14 @@ final Node nextNode() {
if (e == null)
throw new NoSuchElementException();
if ((next = (current = e).next) == null && (t = saTable) != null) {
- do {} while (index < t.getLength() && (next = t.get(index++)) == null);
+ do {} while (index < (int)t.getLength() && (next = t.get(index++)).isSentinel());
}
return e;
}
public final void remove() {
Node p = current;
- if (p == null)
+ if (p == null || p.isSentinel())
throw new IllegalStateException();
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
@@ -1546,17 +1569,17 @@ public final void remove() {
}
final class KeyIterator extends HashIterator
- implements Iterator {
+ implements Iterator {
public final K next() { return nextNode().key; }
}
final class ValueIterator extends HashIterator
- implements Iterator {
+ implements Iterator {
public final V next() { return nextNode().value; }
}
final class EntryIterator extends HashIterator
- implements Iterator> {
+ implements Iterator> {
public final Map.Entry next() { return nextNode(); }
}
@@ -1600,8 +1623,8 @@ public final long estimateSize() {
}
static final class KeySpliterator
- extends HashMapSpliterator
- implements Spliterator {
+ extends HashMapSpliterator
+ implements Spliterator {
KeySpliterator(SAHashMap m, int origin, int fence, int est,
int expectedModCount) {
super(m, origin, fence, est, expectedModCount);
@@ -1610,8 +1633,8 @@ static final class KeySpliterator
public KeySpliterator trySplit() {
int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
return (lo >= mid || ((current != null) && !current.isSentinel())) ? null :
- new KeySpliterator(map, lo, index = mid, est >>>= 1,
- expectedModCount);
+ new KeySpliterator<>(map, lo, index = mid, est >>>= 1,
+ expectedModCount);
}
public void forEachRemaining(Consumer super K> action) {
@@ -1626,18 +1649,18 @@ public void forEachRemaining(Consumer super K> action) {
}
else
mc = expectedModCount;
- if (tab != null && tab.getLength() >= hi &&
- (i = index) >= 0 && (i < (index = hi) || ((current != null) && !current.isSentinel()))) {
+ if (tab != null && (int)tab.getLength() >= hi &&
+ (i = index) >= 0 && (i < (index = hi) || ((current != null) && !current.isSentinel()))) {
Node p = current;
current = null;
do {
- if (p == null)
+ if (p == null || p.isSentinel())
p = tab.get(i++);
else {
action.accept(p.key);
p = p.next;
}
- } while (((p != null) && !p.isSentinel) || i < hi);
+ } while (((p != null) && !p.isSentinel()) || i < hi);
if (m.modCount != mc)
throw new ConcurrentModificationException();
}
@@ -1650,7 +1673,7 @@ public boolean tryAdvance(Consumer super K> action) {
NodeTable tab = map.saTable;
if (tab != null && tab.getLength() >= (hi = getFence()) && index >= 0) {
while (((current != null) && !current.isSentinel()) || index < hi) {
- if (current == null)
+ if (current == null || current.isSentinel())
current = tab.get(index++);
else {
K k = current.key;
@@ -1667,13 +1690,13 @@ public boolean tryAdvance(Consumer super K> action) {
public int characteristics() {
return (fence < 0 || est == map.size ? Spliterator.SIZED : 0) |
- Spliterator.DISTINCT;
+ Spliterator.DISTINCT;
}
}
static final class ValueSpliterator
- extends HashMapSpliterator
- implements Spliterator {
+ extends HashMapSpliterator
+ implements Spliterator {
ValueSpliterator(SAHashMap m, int origin, int fence, int est,
int expectedModCount) {
super(m, origin, fence, est, expectedModCount);
@@ -1682,8 +1705,8 @@ static final class ValueSpliterator
public ValueSpliterator trySplit() {
int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
return (lo >= mid || ((current != null) && !current.isSentinel())) ? null :
- new ValueSpliterator(map, lo, index = mid, est >>>= 1,
- expectedModCount);
+ new ValueSpliterator<>(map, lo, index = mid, est >>>= 1,
+ expectedModCount);
}
public void forEachRemaining(Consumer super V> action) {
@@ -1698,12 +1721,12 @@ public void forEachRemaining(Consumer super V> action) {
}
else
mc = expectedModCount;
- if (tab != null && tab.getLength() >= hi &&
- (i = index) >= 0 && (i < (index = hi) || ((current != null) && !current.isSentinel()))) {
+ if (tab != null && (int)tab.getLength() >= hi &&
+ (i = index) >= 0 && (i < (index = hi) || ((current != null) && !current.isSentinel()))) {
Node p = current;
current = null;
do {
- if (p == null)
+ if (p == null || p.isSentinel())
p = tab.get(i++);
else {
action.accept(p.value);
@@ -1720,9 +1743,9 @@ public boolean tryAdvance(Consumer super V> action) {
if (action == null)
throw new NullPointerException();
NodeTable tab = map.saTable;
- if (tab != null && tab.getLength() >= (hi = getFence()) && index >= 0) {
+ if (tab != null && (int)tab.getLength() >= (hi = getFence()) && index >= 0) {
while (((current != null) && !current.isSentinel()) || index < hi) {
- if (current == null)
+ if (current == null || current.isSentinel())
current = tab.get(index++);
else {
V v = current.value;
@@ -1743,8 +1766,8 @@ public int characteristics() {
}
static final class EntrySpliterator
- extends HashMapSpliterator
- implements Spliterator> {
+ extends HashMapSpliterator
+ implements Spliterator> {
EntrySpliterator(SAHashMap m, int origin, int fence, int est,
int expectedModCount) {
super(m, origin, fence, est, expectedModCount);
@@ -1753,8 +1776,8 @@ static final class EntrySpliterator
public EntrySpliterator trySplit() {
int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
return (lo >= mid || ((current != null) && !current.isSentinel())) ? null :
- new EntrySpliterator(map, lo, index = mid, est >>>= 1,
- expectedModCount);
+ new EntrySpliterator<>(map, lo, index = mid, est >>>= 1,
+ expectedModCount);
}
public void forEachRemaining(Consumer super Map.Entry> action) {
@@ -1769,12 +1792,12 @@ public void forEachRemaining(Consumer super Map.Entry> action) {
}
else
mc = expectedModCount;
- if (tab != null && tab.getLength() >= hi &&
- (i = index) >= 0 && (i < (index = hi) || ((current != null) && !current.isSentinel()))) {
+ if (tab != null && (int)tab.getLength() >= hi &&
+ (i = index) >= 0 && (i < (index = hi) || ((current != null) && !current.isSentinel()))) {
Node p = current;
current = null;
do {
- if (p == null)
+ if (p == null || p.isSentinel())
p = tab.get(i++);
else {
action.accept(p);
@@ -1791,9 +1814,9 @@ public boolean tryAdvance(Consumer super Map.Entry> action) {
if (action == null)
throw new NullPointerException();
NodeTable tab = map.saTable;
- if (tab != null && tab.getLength() >= (hi = getFence()) && index >= 0) {
+ if (tab != null && (int)tab.getLength() >= (hi = getFence()) && index >= 0) {
while (((current != null) && !current.isSentinel()) || index < hi) {
- if (current == null)
+ if (current == null || current.isSentinel())
current = tab.get(index++);
else {
Node e = current;
@@ -1810,10 +1833,11 @@ public boolean tryAdvance(Consumer super Map.Entry> action) {
public int characteristics() {
return (fence < 0 || est == map.size ? Spliterator.SIZED : 0) |
- Spliterator.DISTINCT;
+ Spliterator.DISTINCT;
}
}
+
/* ------------------------------------------------------------ */
// LinkedHashMap support
@@ -1828,23 +1852,23 @@ public int characteristics() {
// Create a regular (non-tree) node
Node newNode(int hash, K key, V value, Node next) {
- return new Node(hash, key, value, next);
+ return new Node<>(hash, key, value, next);
}
// For conversion from TreeNodes to plain nodes
Node replacementNode(Node p, Node next) {
- return new Node(p.hash, p.key, p.value, next);
+ return new Node<>(p.hash, p.key, p.value, next);
+ }
+
+ // Create a tree bin node
+ TreeNode newTreeNode(int hash, K key, V value, Node next) {
+ return new TreeNode<>(hash, key, value, next);
}
-// // Create a tree bin node
-// TreeNode newTreeNode(int hash, K key, V value, Node next) {
-// return new TreeNode(hash, key, value, next);
-// }
-//
-// // For treeifyBin
-// TreeNode replacementTreeNode(Node p, Node next) {
-// return new TreeNode(p.hash, p.key, p.value, next);
-// }
+ // For treeifyBin
+ TreeNode replacementTreeNode(Node p, Node next) {
+ return new TreeNode<>(p.hash, p.key, p.value, next);
+ }
/**
* Reset to initial default state. Called by clone and readObject.
@@ -1868,8 +1892,8 @@ void afterNodeRemoval(Node p) { }
void internalWriteEntries(java.io.ObjectOutputStream s) throws IOException {
NodeTable tab;
if (size > 0 && (tab = saTable) != null) {
- for (int i = 0; i < tab.getLength(); ++i) {
- for (Node e = tab.get(i); ((e != null) & !e.isSentinel()); e = e.next) {
+ for (int i = 0; i < (int)tab.getLength(); ++i) {
+ for (Node e = tab.get(i); ((e != null) && !e.isSentinel()); e = e.next) {
s.writeObject(e.key);
s.writeObject(e.value);
}
@@ -1877,600 +1901,642 @@ void internalWriteEntries(java.io.ObjectOutputStream s) throws IOException {
}
}
-// /* ------------------------------------------------------------ */
-// // Tree bins
-//
-// /**
-// * Entry for Tree bins. Extends LinkedHashMap.Entry (which in turn
-// * extends Node) so can be used as extension of either regular or
-// * linked node.
-// */
-// static final class TreeNode extends SALinkedHashMap.Entry {
-// TreeNode parent; // red-black tree links
-// TreeNode left;
-// TreeNode right;
-// TreeNode prev; // needed to unlink next upon deletion
-// boolean red;
-// TreeNode(int hash, K key, V val, Node next) {
-// super(hash, key, val, next);
-// }
-//
-// /**
-// * Returns root of tree containing this node.
-// */
-// final TreeNode root() {
-// for (TreeNode r = this, p;;) {
-// if ((p = r.parent) == null)
-// return r;
-// r = p;
-// }
-// }
-//
-// /**
-// * Ensures that the given root is the first node of its bin.
-// */
-// static void moveRootToFront(Node[] tab, TreeNode root) {
-// int n;
-// if (root != null && tab != null && (n = tab.length) > 0) {
-// int index = (n - 1) & root.hash;
-// TreeNode first = (TreeNode)tab[index];
-// if (root != first) {
-// Node rn;
+
+ /*
+ * Implementation note. A previous version of this class was
+ * internally structured a little differently. Because superclass
+ * HashMap now uses trees for some of its nodes, class
+ * LinkedHashMap.Entry is now treated as intermediary node class
+ * that can also be converted to tree form. The name of this
+ * class, LinkedHashMap.Entry, is confusing in several ways in its
+ * current context, but cannot be changed. Otherwise, even though
+ * it is not exported outside this package, some existing source
+ * code is known to have relied on a symbol resolution corner case
+ * rule in calls to removeEldestEntry that suppressed compilation
+ * errors due to ambiguous usages. So, we keep the name to
+ * preserve unmodified compilability.
+ *
+ * The changes in node classes also require using two fields
+ * (head, tail) rather than a pointer to a header node to maintain
+ * the doubly-linked before/after list. This class also
+ * previously used a different style of callback methods upon
+ * access, insertion, and removal.
+ */
+
+ /**
+ * HashMap.Node subclass for normal LinkedHashMap entries.
+ */
+ static class Entry extends Node {
+ Entry before, after;
+ Entry(int hash, K key, V value, Node next) {
+ super(hash, key, value, next);
+ }
+ }
+
+ /* ------------------------------------------------------------ */
+ // Tree bins
+
+ /**
+ * Entry for Tree bins. Extends LinkedHashMap.Entry (which in turn
+ * extends Node) so can be used as extension of either regular or
+ * linked node.
+ */
+ static final class TreeNode extends Entry {
+ TreeNode parent; // red-black tree links
+ TreeNode left;
+ TreeNode right;
+ TreeNode prev; // needed to unlink next upon deletion
+ boolean red;
+ TreeNode(int hash, K key, V val, Node next) {
+ super(hash, key, val, next);
+ }
+
+ /**
+ * Returns root of tree containing this node.
+ */
+ final TreeNode root() {
+ for (TreeNode r = this, p;;) {
+ if ((p = r.parent) == null)
+ return r;
+ r = p;
+ }
+ }
+
+ /**
+ * Ensures that the given root is the first node of its bin.
+ */
+ static void moveRootToFront(NodeTable tab, TreeNode root) {
+ int n;
+ if (root != null && tab != null && (n = (int)tab.getLength()) > 0) {
+ int index = (n - 1) & root.hash;
+ TreeNode first = (TreeNode)tab.get(index).next;
+ tab.get(index).next = root;
+ if (root != first) {
+ Node