@@ -31,6 +31,7 @@ import scala.collection.generic.DefaultSerializable
31
31
* @define orderDependent
32
32
* @define orderDependentFold
33
33
*/
34
+ @ deprecatedInheritance(" LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value" , " 2.13.11" )
34
35
class LinkedHashMap [K , V ]
35
36
extends AbstractMap [K , V ]
36
37
with SeqMap [K , V ]
@@ -44,17 +45,23 @@ class LinkedHashMap[K, V]
44
45
45
46
// stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper
46
47
// would not return the elements in insertion order
48
+
47
49
private [collection] type Entry = LinkedHashMap .LinkedEntry [K , V ]
50
+
48
51
private [collection] def _firstEntry : Entry = firstEntry
49
52
50
53
@ transient protected var firstEntry : Entry = null
51
54
52
55
@ transient protected var lastEntry : Entry = null
53
56
54
- @ transient private [this ] var table = new Array [Entry ](tableSizeFor(LinkedHashMap .defaultinitialSize))
57
+ /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant:
58
+ * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i.
59
+ * - Every bucket is sorted in ascendant hash order
60
+ * - The sum of the lengths of all buckets is equal to contentSize.
61
+ */
62
+ @ transient private [this ] var table = new Array [Entry ](tableSizeFor(LinkedHashMap .defaultinitialSize))
55
63
56
64
private [this ] var threshold : Int = newThreshold(table.length)
57
- private [this ] def newThreshold (size : Int ) = (size.toDouble * LinkedHashMap .defaultLoadFactor).toInt
58
65
59
66
private [this ] var contentSize = 0
60
67
@@ -77,6 +84,7 @@ class LinkedHashMap[K, V]
77
84
override def size = contentSize
78
85
override def knownSize : Int = size
79
86
override def isEmpty : Boolean = size == 0
87
+
80
88
def get (key : K ): Option [V ] = {
81
89
val e = findEntry(key)
82
90
if (e == null ) None
@@ -90,23 +98,16 @@ class LinkedHashMap[K, V]
90
98
super .contains(key) // A subclass might override `get`, use the default implementation `contains`.
91
99
}
92
100
93
- override def put (key : K , value : V ): Option [V ] = {
94
- put0(key, value, true ) match {
101
+ override def put (key : K , value : V ): Option [V ] = put0(key, value, true ) match {
95
102
case null => None
96
103
case sm => sm
97
104
}
98
- }
99
-
100
- override def update (key : K , value : V ): Unit = {
101
- put0(key, value, false )
102
105
103
- }
106
+ override def update ( key : K , value : V ) : Unit = put0(key, value, false )
104
107
105
- override def remove (key : K ): Option [V ] = {
106
- removeEntry0(key) match {
108
+ override def remove (key : K ): Option [V ] = removeEntry0(key) match {
107
109
case null => None
108
110
case nd => Some (nd.value)
109
- }
110
111
}
111
112
112
113
private [this ] def removeEntry0 (elem : K ): Entry = removeEntry0(elem, computeHash(elem))
@@ -155,17 +156,23 @@ class LinkedHashMap[K, V]
155
156
156
157
@ `inline` private [this ] def index (hash : Int ) = hash & (table.length - 1 )
157
158
158
- @ `inline` private [this ] def findEntry (key : K ): Entry = {
159
+ @ `inline` private [this ] def findEntry (key : K ): Entry = {
159
160
val hash = computeHash(key)
160
161
table(index(hash)) match {
161
162
case null => null
162
163
case nd => nd.findEntry(key, hash)
163
164
}
164
165
}
165
166
166
- def addOne (kv : (K , V )): this .type = { put(kv._1, kv._2); this }
167
+ def addOne (kv : (K , V )): this .type = {
168
+ put(kv._1, kv._2)
169
+ this
170
+ }
167
171
168
- def subtractOne (key : K ): this .type = { remove(key); this }
172
+ def subtractOne (key : K ): this .type = {
173
+ remove(key)
174
+ this
175
+ }
169
176
170
177
def iterator : Iterator [(K , V )] = new AbstractIterator [(K , V )] {
171
178
private [this ] var cur = firstEntry
@@ -195,8 +202,8 @@ class LinkedHashMap[K, V]
195
202
val hash = computeHash(key)
196
203
val indexedHash = index(hash)
197
204
198
- var foundEntry = null . asInstanceOf [ Entry ]
199
- var previousEntry = null . asInstanceOf [ Entry ]
205
+ var foundEntry : Entry = null
206
+ var previousEntry : Entry = null
200
207
table(indexedHash) match {
201
208
case null =>
202
209
case nd =>
@@ -273,36 +280,39 @@ class LinkedHashMap[K, V]
273
280
lastEntry = null
274
281
}
275
282
276
- private [this ] def tableSizeFor (capacity : Int ) =
277
- (Integer .highestOneBit((capacity- 1 ).max(4 ))* 2 ).min(1 << 30 )
283
+ private [this ] def tableSizeFor (capacity : Int ) =
284
+ (Integer .highestOneBit((capacity - 1 ).max(4 )) * 2 ).min(1 << 30 )
285
+
286
+ private [this ] def newThreshold (size : Int ) = (size.toDouble * LinkedHashMap .defaultLoadFactor).toInt
278
287
279
288
/* create a new entry. If table is empty(firstEntry is null), then the
280
289
* new entry will be the firstEntry. If not, just set the new entry to
281
290
* be the lastEntry.
282
291
* */
283
- private [this ] def createNewEntry (key : K , hash : Int , value : V ): Entry =
284
- {
285
- val e = new Entry (key, hash, value)
286
- if (firstEntry eq null ) firstEntry = e
287
- else { lastEntry.later = e; e.earlier = lastEntry }
288
- lastEntry = e
289
- e
292
+ private [this ] def createNewEntry (key : K , hash : Int , value : V ): Entry = {
293
+ val e = new Entry (key, hash, value)
294
+ if (firstEntry eq null ) firstEntry = e
295
+ else {
296
+ lastEntry.later = e
297
+ e.earlier = lastEntry
290
298
}
299
+ lastEntry = e
300
+ e
301
+ }
291
302
292
- /* delete the entry from the linkedhashmap. set its earlier entry's later entry
293
- * and later entry's earlier entry correctly.and set its earlier and later to
294
- * be null.*/
303
+ /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */
295
304
private [this ] def deleteEntry (e : Entry ): Unit = {
296
305
if (e.earlier eq null ) firstEntry = e.later
297
306
else e.earlier.later = e.later
298
307
if (e.later eq null ) lastEntry = e.earlier
299
308
else e.later.earlier = e.earlier
300
- e.earlier = null // Null references to prevent nepotism
309
+ e.earlier = null
301
310
e.later = null
311
+ e.next = null
302
312
}
303
313
304
314
private [this ] def put0 (key : K , value : V , getOld : Boolean ): Some [V ] = {
305
- if (contentSize + 1 >= threshold) growTable(table.length * 2 )
315
+ if (contentSize + 1 >= threshold) growTable(table.length * 2 )
306
316
val hash = computeHash(key)
307
317
val idx = index(hash)
308
318
put0(key, value, getOld, hash, idx)
@@ -311,29 +321,27 @@ class LinkedHashMap[K, V]
311
321
private [this ] def put0 (key : K , value : V , getOld : Boolean , hash : Int , idx : Int ): Some [V ] = {
312
322
table(idx) match {
313
323
case null =>
314
- val nnode = createNewEntry(key, hash, value)
315
- nnode.next = null
316
- table(idx) = nnode
324
+ table(idx) = createNewEntry(key, hash, value)
317
325
case old =>
318
- var prev = null . asInstanceOf [ Entry ]
326
+ var prev : Entry = null
319
327
var n = old
320
- while ((n ne null ) && n.hash <= hash) {
321
- if (n.hash == hash && key == n.key) {
328
+ while ((n ne null ) && n.hash <= hash) {
329
+ if (n.hash == hash && key == n.key) {
322
330
val old = n.value
323
331
n.value = value
324
- return if (getOld) Some (old) else null
332
+ return if (getOld) Some (old) else null
325
333
}
326
334
prev = n
327
335
n = n.next
328
336
}
329
337
val nnode = createNewEntry(key, hash, value)
330
- if (prev eq null ) {
331
- table(idx) = nnode
338
+ if (prev eq null ) {
332
339
nnode.next = old
333
- }
334
- else {
340
+ table(idx) = nnode
341
+ } else {
335
342
nnode.next = prev.next
336
- prev.next = nnode}
343
+ prev.next = nnode
344
+ }
337
345
}
338
346
contentSize += 1
339
347
null
@@ -416,6 +424,7 @@ class LinkedHashMap[K, V]
416
424
index += 1
417
425
}
418
426
}
427
+
419
428
private def readObject (in : java.io.ObjectInputStream ): Unit = {
420
429
in.defaultReadObject()
421
430
serializeFrom(in, (in.readObject().asInstanceOf [K ], in.readObject().asInstanceOf [V ]))
@@ -444,8 +453,7 @@ object LinkedHashMap extends MapFactory[LinkedHashMap] {
444
453
445
454
/** Class for the linked hash map entry, used internally.
446
455
*/
447
- private [mutable] final class LinkedEntry [K , V ](val key : K , val hash : Int , var value : V )
448
- {
456
+ private [mutable] final class LinkedEntry [K , V ](val key : K , val hash : Int , var value : V ) {
449
457
var earlier : LinkedEntry [K , V ] = null
450
458
var later : LinkedEntry [K , V ] = null
451
459
var next : LinkedEntry [K , V ] = null
@@ -455,22 +463,11 @@ object LinkedHashMap extends MapFactory[LinkedHashMap] {
455
463
if (h == hash && k == key) this
456
464
else if ((next eq null ) || (hash > h)) null
457
465
else next.findEntry(k, h)
458
-
459
- @ tailrec
460
- final def foreach [U ](f : ((K , V )) => U ): Unit = {
461
- f((key, value))
462
- if (next ne null ) next.foreach(f)
463
- }
464
-
465
- @ tailrec
466
- final def foreachEntry [U ](f : (K , V ) => U ): Unit = {
467
- f(key, value)
468
- if (next ne null ) next.foreachEntry(f)
469
- }
470
-
471
466
}
472
467
473
- private [collection] final def defaultLoadFactor : Double = 0.75 // corresponds to 75%
468
+ /** The default load factor for the hash table */
469
+ private [collection] final def defaultLoadFactor : Double = 0.75
470
+
474
471
/** The default initial capacity for the hash table */
475
472
private [collection] final def defaultinitialSize : Int = 16
476
473
}
0 commit comments