• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

bitfaster / BitFaster.Caching / 13093624836

01 Feb 2025 11:59PM UTC coverage: 99.113% (-0.1%) from 99.229%
13093624836

Pull #664

github

web-flow
Merge 661d8c11e into 01805a51b
Pull Request #664: Eagerly purged deleted items from internal LRU queues

1110 of 1134 branches covered (97.88%)

Branch coverage included in aggregate %.

46 of 49 new or added lines in 6 files covered. (93.88%)

5 existing lines in 2 files now uncovered.

4813 of 4842 relevant lines covered (99.4%)

54486246.33 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

99.36
/BitFaster.Caching/Lru/ConcurrentLruCore.cs
1
using System;
2
using System.Collections;
3
using System.Collections.Concurrent;
4
using System.Collections.Generic;
5
using System.Diagnostics;
6
using System.Diagnostics.CodeAnalysis;
7
using System.Linq;
8
using System.Runtime.CompilerServices;
9
using System.Threading;
10
using System.Threading.Tasks;
11

12
namespace BitFaster.Caching.Lru
13
{
14
    /// <summary>
15
    /// A pseudo LRU based on the TU-Q eviction policy. The LRU list is composed of 3 segments: hot, warm and cold. 
16
    /// Cost of maintaining segments is amortized across requests. Items are only cycled when capacity is exceeded. 
17
    /// Pure read does not cycle items if all segments are within capacity constraints. There are no global locks. 
18
    /// On cache miss, a new item is added. Tail items in each segment are dequeued, examined, and are either enqueued 
19
    /// or discarded.
20
    /// The TU-Q scheme of hot, warm and cold is similar to that used in MemCached (https://memcached.org/blog/modern-lru/)
21
    /// and OpenBSD (https://flak.tedunangst.com/post/2Q-buffer-cache-algorithm), but does not use a background thread
22
    /// to maintain the internal queues.
23
    /// </summary>
24
    /// <remarks>
25
    /// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
26
    /// <list type="number">
27
    ///   <item><description>New items are added to hot, WasAccessed = false.</description></item>
28
    ///   <item><description>When items are accessed, update WasAccessed = true.</description></item>
29
    ///   <item><description>When items are moved WasAccessed is set to false.</description></item>
30
    ///   <item><description>When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.</description></item>
31
    ///   <item><description>When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.</description></item>
32
    ///   <item><description>When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.</description></item>
33
    ///</list>
34
    /// </remarks>
35
    public class ConcurrentLruCore<K, V, I, P, T> : ICacheExt<K, V>, IAsyncCacheExt<K, V>, IEnumerable<KeyValuePair<K, V>>
36
        where K : notnull
37
        where I : LruItem<K, V>
38
        where P : struct, IItemPolicy<K, V, I>
39
        where T : struct, ITelemetryPolicy<K, V>
40
    {
41
        private readonly ConcurrentDictionary<K, I> dictionary;
42

43
        private readonly ConcurrentQueue<I> hotQueue;
44
        private readonly ConcurrentQueue<I> warmQueue;
45
        private readonly ConcurrentQueue<I> coldQueue;
46

47
        // maintain count outside ConcurrentQueue, since ConcurrentQueue.Count holds a global lock
48
        private PaddedQueueCount counter;
49

50
        private readonly ICapacityPartition capacity;
51

52
        private readonly P itemPolicy;
53
        private bool isWarm = false;
2,009✔
54

55
        /// <summary>
56
        /// The telemetry policy.
57
        /// </summary>
58
        /// <remarks>
59
        /// Since T is a struct, making it readonly will force the runtime to make defensive copies
60
        /// if mutate methods are called. Therefore, field must be mutable to maintain count.
61
        /// </remarks>
62
        protected T telemetryPolicy;
63

64
        /// <summary>
65
        /// Initializes a new instance of the ConcurrentLruCore class with the specified concurrencyLevel, capacity, equality comparer, item policy and telemetry policy.
66
        /// </summary>
67
        /// <param name="concurrencyLevel">The concurrency level.</param>
68
        /// <param name="capacity">The capacity.</param>
69
        /// <param name="comparer">The equality comparer.</param>
70
        /// <param name="itemPolicy">The item policy.</param>
71
        /// <param name="telemetryPolicy">The telemetry policy.</param>
72
        /// <exception cref="ArgumentNullException"></exception>
73
        public ConcurrentLruCore(
2,009✔
74
            int concurrencyLevel,
2,009✔
75
            ICapacityPartition capacity,
2,009✔
76
            IEqualityComparer<K> comparer,
2,009✔
77
            P itemPolicy,
2,009✔
78
            T telemetryPolicy)
2,009✔
79
        {
2,009✔
80
            if (capacity == null)
2,009✔
81
                Throw.ArgNull(ExceptionArgument.capacity);
4✔
82

83
            if (comparer == null)
2,005✔
84
                Throw.ArgNull(ExceptionArgument.comparer);
4✔
85

86
            capacity.Validate();
2,001✔
87
            this.capacity = capacity;
1,997✔
88

89
            this.hotQueue = new ConcurrentQueue<I>();
1,997✔
90
            this.warmQueue = new ConcurrentQueue<I>();
1,997✔
91
            this.coldQueue = new ConcurrentQueue<I>();
1,997✔
92

93
            int dictionaryCapacity = ConcurrentDictionarySize.Estimate(this.Capacity);
1,997✔
94

95
            this.dictionary = new ConcurrentDictionary<K, I>(concurrencyLevel, dictionaryCapacity, comparer);
1,997✔
96
            this.itemPolicy = itemPolicy;
1,989✔
97
            this.telemetryPolicy = telemetryPolicy;
1,989✔
98
            this.telemetryPolicy.SetEventSource(this);
1,989✔
99
        }
1,989✔
100

101
        // No lock count: https://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/
102
        ///<inheritdoc/>
103
        public int Count => this.dictionary.Where(i => !itemPolicy.ShouldDiscard(i.Value)).Count();
13,967,024✔
104

105
        ///<inheritdoc/>
106
        public int Capacity => this.capacity.Hot + this.capacity.Warm + this.capacity.Cold;
1,600,002,941✔
107

108
        ///<inheritdoc/>
109
        public Optional<ICacheMetrics> Metrics => CreateMetrics(this);
108✔
110

111
        ///<inheritdoc/>
112
        public Optional<ICacheEvents<K, V>> Events => CreateEvents(this);
1,056✔
113

114
        ///<inheritdoc/>
115
        public CachePolicy Policy => CreatePolicy(this);
269✔
116

117
        /// <summary>
118
        /// Gets the number of hot items.
119
        /// </summary>
120
        public int HotCount => Volatile.Read(ref this.counter.hot);
1,268,880✔
121

122
        /// <summary>
123
        /// Gets the number of warm items.
124
        /// </summary>
125
        public int WarmCount => Volatile.Read(ref this.counter.warm);
1,268,872✔
126

127
        /// <summary>
128
        /// Gets the number of cold items.
129
        /// </summary>
130
        public int ColdCount => Volatile.Read(ref this.counter.cold);
1,268,876✔
131

132
        /// <summary>
133
        /// Gets a collection containing the keys in the cache.
134
        /// </summary>
135
        public ICollection<K> Keys => this.dictionary.Keys;
800✔
136

137
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
138
        /// <returns>An enumerator for the cache.</returns>
139
        /// <remarks>
140
        /// The enumerator returned from the cache is safe to use concurrently with
141
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
142
        /// The contents exposed through the enumerator may contain modifications
143
        /// made after <see cref="GetEnumerator"/> was called.
144
        /// </remarks>
145
        public IEnumerator<KeyValuePair<K, V>> GetEnumerator()
146
        {
252✔
147
            foreach (var kvp in this.dictionary)
1,296✔
148
            {
272✔
149
                if (!itemPolicy.ShouldDiscard(kvp.Value))
272✔
150
                { 
260✔
151
                    yield return new KeyValuePair<K, V>(kvp.Key, kvp.Value.Value); 
260✔
152
                }
256✔
153
            }
268✔
154
        }
248✔
155

156
        ///<inheritdoc/>
157
        public bool TryGet(K key, [MaybeNullWhen(false)] out V value)
158
        {
1,798,276,194✔
159
            if (dictionary.TryGetValue(key, out var item))
1,798,276,194✔
160
            {
1,619,013,506✔
161
                return GetOrDiscard(item, out value);
1,619,013,506✔
162
            }
163

164
            value = default;
179,262,688✔
165
            this.telemetryPolicy.IncrementMiss();
179,262,688✔
166
            return false;
179,262,688✔
167
        }
1,798,276,194✔
168

169
        // AggressiveInlining forces the JIT to inline policy.ShouldDiscard(). For LRU policy 
170
        // the first branch is completely eliminated due to JIT time constant propogation.
171
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
172
        private bool GetOrDiscard(I item, [MaybeNullWhen(false)] out V value)
173
        {
1,619,013,506✔
174
            if (this.itemPolicy.ShouldDiscard(item))
1,619,013,506✔
175
            {
3,188,760✔
176
                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Evicted);
3,188,760✔
177
                this.telemetryPolicy.IncrementMiss();
3,188,760✔
178
                value = default;
3,188,760✔
179
                return false;
3,188,760✔
180
            }
181

182
            value = item.Value;
1,615,824,746✔
183

184
            this.itemPolicy.Touch(item);
1,615,824,746✔
185
            this.telemetryPolicy.IncrementHit();
1,615,824,746✔
186
            return true;
1,615,824,746✔
187
        }
1,619,013,506✔
188

189
        private bool TryAdd(K key, V value)
190
        {
182,450,928✔
191
            var newItem = this.itemPolicy.CreateItem(key, value);
182,450,928✔
192

193
            if (this.dictionary.TryAdd(key, newItem))
182,450,928✔
194
            {
173,512,691✔
195
                this.hotQueue.Enqueue(newItem);
173,512,691✔
196
                Cycle(Interlocked.Increment(ref counter.hot));
173,512,691✔
197
                return true;
173,512,691✔
198
            }
199

200
            Disposer<V>.Dispose(newItem.Value);
8,938,237✔
201
            return false;
8,938,237✔
202
        }
182,450,928✔
203

204
        ///<inheritdoc/>
205
        public V GetOrAdd(K key, Func<K, V> valueFactory)
206
        {
1,736,936,517✔
207
            while (true)
1,744,219,727✔
208
            {
1,744,219,727✔
209
                if (this.TryGet(key, out var value))
1,744,219,727✔
210
                {
1,607,140,814✔
211
                    return value;
1,607,140,814✔
212
                }
213

214
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
215
                value = valueFactory(key);
137,078,913✔
216

217
                if (TryAdd(key, value))
137,078,913✔
218
                {
129,795,703✔
219
                    return value;
129,795,703✔
220
                }
221
            }
7,283,210✔
222
        }
1,736,936,517✔
223

224
        /// <summary>
225
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
226
        /// existing value if the key already exists.
227
        /// </summary>
228
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
229
        /// <param name="key">The key of the element to add.</param>
230
        /// <param name="valueFactory">The factory function used to generate a value for the key.</param>
231
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
232
        /// <returns>The value for the key. This will be either the existing value for the key if the key is already 
233
        /// in the cache, or the new value if the key was not in the cache.</returns>
234
        public V GetOrAdd<TArg>(K key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
235
        {
16,000,012✔
236
            while (true)
16,506,433✔
237
            {
16,506,433✔
238
                if (this.TryGet(key, out var value))
16,506,433✔
239
                {
1,379,404✔
240
                    return value;
1,379,404✔
241
                }
242

243
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
244
                value = valueFactory(key, factoryArgument);
15,127,029✔
245

246
                if (TryAdd(key, value))
15,127,029✔
247
                {
14,620,608✔
248
                    return value;
14,620,608✔
249
                }
250
            }
506,421✔
251
        }
16,000,012✔
252

253
        ///<inheritdoc/>
254
        public async ValueTask<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
255
        {
16,000,304✔
256
            while (true)
16,616,408✔
257
            {
16,616,408✔
258
                if (this.TryGet(key, out var value))
16,616,408✔
259
                {
1,520,642✔
260
                    return value;
1,520,642✔
261
                }
262

263
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
264
                // This is identical logic in ConcurrentDictionary.GetOrAdd method.
265
                value = await valueFactory(key).ConfigureAwait(false);
15,095,766✔
266

267
                if (TryAdd(key, value))
15,095,754✔
268
                {
14,479,650✔
269
                    return value;
14,479,650✔
270
                }
271
            }
616,104✔
272
        }
16,000,292✔
273

274
        /// <summary>
275
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
276
        /// existing value if the key already exists.
277
        /// </summary>
278
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
279
        /// <param name="key">The key of the element to add.</param>
280
        /// <param name="valueFactory">The factory function used to asynchronously generate a value for the key.</param>
281
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
282
        /// <returns>A task that represents the asynchronous GetOrAdd operation.</returns>
283
        public async ValueTask<V> GetOrAddAsync<TArg>(K key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
284
        {
16,000,276✔
285
            while (true)
16,532,778✔
286
            {
16,532,778✔
287
                if (this.TryGet(key, out var value))
16,532,778✔
288
                {
1,383,546✔
289
                    return value;
1,383,546✔
290
                }
291

292
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
293
                value = await valueFactory(key, factoryArgument).ConfigureAwait(false);
15,149,232✔
294

295
                if (TryAdd(key, value))
15,149,232✔
296
                {
14,616,730✔
297
                    return value;
14,616,730✔
298
                }
299
            }
532,502✔
300
        }
16,000,276✔
301

302
        /// <summary>
303
        /// Attempts to remove the specified key value pair.
304
        /// </summary>
305
        /// <param name="item">The item to remove.</param>
306
        /// <returns>true if the item was removed successfully; otherwise, false.</returns>
307
        public bool TryRemove(KeyValuePair<K, V> item)
308
        {
18,116,956✔
309
            if (this.dictionary.TryGetValue(item.Key, out var existing))
18,116,956✔
310
            {
1,863,887✔
311
                lock (existing)
1,863,887✔
312
                {
1,863,887✔
313
                    if (EqualityComparer<V>.Default.Equals(existing.Value, item.Value))
1,863,887✔
314
                    {
82,077✔
315
                        var kvp = new KeyValuePair<K, I>(item.Key, existing);
82,077✔
316
#if NET6_0_OR_GREATER
317
                    if (this.dictionary.TryRemove(kvp))
58,396✔
318
#else
319
                        // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
320
                        if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
23,681✔
321
#endif
322
                        {
74,718✔
323
                            OnRemove(item.Key, kvp.Value, ItemRemovedReason.Removed);
74,718✔
324
                            return true;
74,718✔
325
                        }
326
                    }
7,359✔
327
                }
1,789,169✔
328

329
                // it existed, but we couldn't remove - this means value was replaced afer the TryGetValue (a race)
330
            }
1,789,169✔
331

332
            return false;
18,042,238✔
333
        }
18,116,956✔
334

335
        /// <summary>
336
        /// Attempts to remove and return the value that has the specified key.
337
        /// </summary>
338
        /// <param name="key">The key of the element to remove.</param>
339
        /// <param name="value">When this method returns, contains the object removed, or the default value of the value type if key does not exist.</param>
340
        /// <returns>true if the object was removed successfully; otherwise, false.</returns>
341
        public bool TryRemove(K key, [MaybeNullWhen(false)] out V value)
342
        {
16,000,196✔
343
            if (this.dictionary.TryRemove(key, out var item))
16,000,196✔
344
            {
27,898✔
345
                OnRemove(key, item, ItemRemovedReason.Removed);
27,898✔
346
                value = item.Value;
27,898✔
347
                return true;
27,898✔
348
            }
349

350
            value = default;
15,972,298✔
351
            return false;
15,972,298✔
352
        }
16,000,196✔
353

354
        ///<inheritdoc/>
355
        public bool TryRemove(K key)
356
        {
16,000,132✔
357
            return TryRemove(key, out _);
16,000,132✔
358
        }
16,000,132✔
359

360
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
361
        private void OnRemove(K key, I item, ItemRemovedReason reason)
362
        {
187,446,700✔
363
            // Mark as not accessed, it will later be cycled out of the queues because it can never be fetched 
364
            // from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled 
365
            // from the queue.
366
            item.WasAccessed = false;
187,446,700✔
367
            item.WasRemoved = true;
187,446,700✔
368

369
            this.telemetryPolicy.OnItemRemoved(key, item.Value, reason);
187,446,700✔
370

371
            // serialize dispose (common case dispose not thread safe)
372
            lock (item)
187,446,700✔
373
            {
187,446,700✔
374
                Disposer<V>.Dispose(item.Value);
187,446,700✔
375
            }
187,446,700✔
376
        }
187,446,700✔
377

378
        ///<inheritdoc/>
379
        ///<remarks>Note: Calling this method does not affect LRU order.</remarks>
380
        public bool TryUpdate(K key, V value)
381
        {
50,615,318✔
382
            if (this.dictionary.TryGetValue(key, out var existing))
50,615,318✔
383
            {
6,425,668✔
384
                lock (existing)
6,425,668✔
385
                {
6,425,668✔
386
                    if (!existing.WasRemoved)
6,425,668✔
387
                    {
6,379,607✔
388
                        V oldValue = existing.Value;
6,379,607✔
389

390
                        existing.Value = value;
6,379,607✔
391

392
                        this.itemPolicy.Update(existing);
6,379,607✔
393
// backcompat: remove conditional compile
394
#if NETCOREAPP3_0_OR_GREATER
395
                        this.telemetryPolicy.OnItemUpdated(existing.Key, oldValue, existing.Value);
6,379,607✔
396
#endif
397
                        Disposer<V>.Dispose(oldValue);
6,379,607✔
398

399
                        return true;
6,379,607✔
400
                    }
401
                }
46,061✔
402
            }
46,061✔
403

404
            return false;
44,235,711✔
405
        }
50,615,318✔
406

407
        ///<inheritdoc/>
408
        ///<remarks>Note: Updates to existing items do not affect LRU order. Added items are at the top of the LRU.</remarks>
409
        public void AddOrUpdate(K key, V value)
410
        {
18,451,267✔
411
            while (true)
18,615,218✔
412
            {
18,615,218✔
413
                // first, try to update
414
                if (this.TryUpdate(key, value))
18,615,218✔
415
                {
4,490,363✔
416
                    return;
4,490,363✔
417
                }
418

419
                // then try add
420
                var newItem = this.itemPolicy.CreateItem(key, value);
14,124,855✔
421

422
                if (this.dictionary.TryAdd(key, newItem))
14,124,855✔
423
                {
13,960,904✔
424
                    this.hotQueue.Enqueue(newItem);
13,960,904✔
425
                    Cycle(Interlocked.Increment(ref counter.hot));
13,960,904✔
426
                    return;
13,960,904✔
427
                }
428

429
                // if both update and add failed there was a race, try again
430
            }
163,951✔
431
        }
18,451,267✔
432

433
        ///<inheritdoc/>
434
        public void Clear()
435
        {
1,250,128✔
436
            // don't overlap Clear/Trim/TrimExpired
437
            lock (this.dictionary)
1,250,128✔
438
            {
1,250,128✔
439
                // evaluate queue count, remove everything including items removed from the dictionary but
440
                // not the queues. This also avoids the expensive o(n) no lock count, or locking the dictionary.
441
                int queueCount = this.HotCount + this.WarmCount + this.ColdCount;
1,250,128✔
442
                this.TrimLiveItems(itemsRemoved: 0, queueCount, ItemRemovedReason.Cleared);
1,250,128✔
443
            }
1,250,128✔
444
        }
1,250,128✔
445

446
        /// <summary>
447
        /// Trim the specified number of items from the cache. Removes all discardable items per IItemPolicy.ShouldDiscard(), then 
448
        /// itemCount-discarded items in LRU order, if any.
449
        /// </summary>
450
        /// <param name="itemCount">The number of items to remove.</param>
451
        /// <returns>The number of items removed from the cache.</returns>
452
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is less than 0./</exception>
453
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is greater than capacity./</exception>
454
        /// <remarks>
455
        /// Note: Trim affects LRU order. Calling Trim resets the internal accessed status of items.
456
        /// </remarks>
457
        public void Trim(int itemCount)
458
        {
240✔
459
            int capacity = this.Capacity;
240✔
460

461
            if (itemCount < 1 || itemCount > capacity)
240✔
462
                Throw.ArgOutOfRange(nameof(itemCount), "itemCount must be greater than or equal to one, and less than the capacity of the cache.");
8✔
463

464
            // clamp itemCount to number of items actually in the cache
465
            itemCount = Math.Min(itemCount, this.HotCount + this.WarmCount + this.ColdCount);
232✔
466

467
            // don't overlap Clear/Trim/TrimExpired
468
            lock (this.dictionary)
232✔
469
            {
232✔
470
                // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items.
471
                int itemsRemoved = this.itemPolicy.CanDiscard() ? TrimAllDiscardedItems() : 0;
232✔
472

473
                TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed);
232✔
474
            }
232✔
475
        }
232✔
476

477
        private void TrimExpired()
478
        {
33✔
479
            if (this.itemPolicy.CanDiscard())
33✔
480
            {
33✔
481
                this.TrimAllDiscardedItems();
33✔
482
            }
33✔
483
        }
33✔
484

485
        /// <summary>
486
        /// Trim discarded items from all queues.
487
        /// </summary>
488
        /// <returns>The number of items removed.</returns>
489
        // backcompat: make internal
490
        protected int TrimAllDiscardedItems()
491
        {
45✔
492
            // don't overlap Clear/Trim/TrimExpired
493
            lock (this.dictionary)
45✔
494
            {
45✔
495
                int RemoveDiscardableItems(ConcurrentQueue<I> q, ref int queueCounter)
496
                {
135✔
497
                    int itemsRemoved = 0;
135✔
498
                    int localCount = queueCounter;
135✔
499

500
                    for (int i = 0; i < localCount; i++)
836✔
501
                    {
283✔
502
                        if (q.TryDequeue(out var item))
283✔
503
                        {
283✔
504
                            if (this.itemPolicy.ShouldDiscard(item) | item.WasRemoved)
283✔
505
                            {
244✔
506
                                Interlocked.Decrement(ref queueCounter);
244✔
507
                                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Trimmed);
244✔
508
                                itemsRemoved++;
244✔
509
                            }
244✔
510
                            else
511
                            {
39✔
512
                                q.Enqueue(item);
39✔
513
                            }
39✔
514
                        }
283✔
515
                    }
283✔
516

517
                    return itemsRemoved;
135✔
518
                }
135✔
519

520
                int coldRem = RemoveDiscardableItems(coldQueue, ref this.counter.cold);
45✔
521
                int warmRem = RemoveDiscardableItems(warmQueue, ref this.counter.warm);
45✔
522
                int hotRem = RemoveDiscardableItems(hotQueue, ref this.counter.hot);
45✔
523

524
                if (warmRem > 0)
45✔
525
                {
33✔
526
                    Volatile.Write(ref this.isWarm, false);
33✔
527
                }
33✔
528

529
                return coldRem + warmRem + hotRem;
45✔
530
            }
531
        }
45✔
532

533
        private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason)
534
        {
1,250,360✔
535
            // When items are touched, they are moved to warm by cycling. Therefore, to guarantee 
536
            // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times.
537
            // If clear is called during trimming, it would be possible to get stuck in an infinite
538
            // loop here. The warm + hot limit also guards against this case.
539
            int trimWarmAttempts = 0;
1,250,360✔
540
            int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot;
1,250,360✔
541

542
            while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts)
10,665,347✔
543
            {
9,414,987✔
544
                if (Volatile.Read(ref this.counter.cold) > 0)
9,414,987✔
545
                {
8,778,686✔
546
                    if (TryRemoveCold(reason) == (ItemDestination.Remove, 0))
8,778,686✔
547
                    {
8,778,167✔
548
                        itemsRemoved++;
8,778,167✔
549
                        trimWarmAttempts = 0;
8,778,167✔
550
                    }
8,778,167✔
551

552
                    TrimWarmOrHot(reason);
8,778,686✔
553
                }
8,778,686✔
554
                else
555
                {
636,301✔
556
                    TrimWarmOrHot(reason);
636,301✔
557
                    trimWarmAttempts++;
636,301✔
558
                }
636,301✔
559
            }
9,414,987✔
560

561
            if (Volatile.Read(ref this.counter.warm) < this.capacity.Warm)
1,250,360✔
562
            {
1,249,924✔
563
                Volatile.Write(ref this.isWarm, false);
1,249,924✔
564
            }
1,249,924✔
565
        }
1,250,360✔
566

567
        private void TrimWarmOrHot(ItemRemovedReason reason)
568
        {
9,414,987✔
569
            if (Volatile.Read(ref this.counter.warm) > 0)
9,414,987✔
570
            {
3,379,417✔
571
                CycleWarmUnchecked(reason);
3,379,417✔
572
            }
3,379,417✔
573
            else if (Volatile.Read(ref this.counter.hot) > 0)
6,035,570✔
574
            {
5,155,387✔
575
                CycleHotUnchecked(reason);
5,155,387✔
576
            }
5,155,387✔
577
        }
9,414,987✔
578

579
        private void Cycle(int hotCount)
580
        {
187,473,595✔
581
            if (isWarm)
187,473,595✔
582
            {
181,699,347✔
583
                (var dest, var count) = CycleHot(hotCount);
181,699,347✔
584

585
                int cycles = 0;
181,699,347✔
586
                while (cycles++ < 3 && dest != ItemDestination.Remove)
392,219,508✔
587
                {
210,520,161✔
588
                    if (dest == ItemDestination.Warm)
210,520,161✔
589
                    {
35,611,304✔
590
                        (dest, count) = CycleWarm(count);
35,611,304✔
591
                    }
35,611,304✔
592
                    else if (dest == ItemDestination.Cold)
174,908,857✔
593
                    {
174,908,857✔
594
                        (dest, count) = CycleCold(count);
174,908,857✔
595
                    }
174,908,857✔
596
                }
210,520,161✔
597

598
                // If nothing was removed yet, constrain the size of warm and cold by discarding the coldest item.
599
                if (dest != ItemDestination.Remove)
181,699,347✔
600
                {
5,008,308✔
601
                    if (dest == ItemDestination.Warm && count > this.capacity.Warm)
5,008,308✔
602
                    {
3,450,755✔
603
                        count = LastWarmToCold();
3,450,755✔
604
                    }
3,450,755✔
605

606
                    ConstrainCold(count, ItemRemovedReason.Evicted);
5,008,308✔
607
                }
5,008,308✔
608
            }
181,699,347✔
609
            else
610
            {
5,774,248✔
611
                // fill up the warm queue with new items until warm is full.
612
                // else during warmup the cache will only use the hot + cold queues until any item is requested twice.
613
                CycleDuringWarmup(hotCount);
5,774,248✔
614
            }
5,774,248✔
615
        }
187,473,595✔
616

617
        [MethodImpl(MethodImplOptions.NoInlining)]
618
        private void CycleDuringWarmup(int hotCount)
619
        {
5,774,248✔
620
            // do nothing until hot is full
621
            if (hotCount > this.capacity.Hot)
5,774,248✔
622
            {
3,317,380✔
623
                Interlocked.Decrement(ref this.counter.hot);
3,317,380✔
624

625
                if (this.hotQueue.TryDequeue(out var item))
3,317,380✔
626
                {
3,317,378✔
627
                    int count = this.Move(item, ItemDestination.Warm, ItemRemovedReason.Evicted);
3,317,378✔
628

629
                    // if warm is now full, overflow to cold and mark as warm
630
                    if (count > this.capacity.Warm)
3,317,378✔
631
                    {
964,677✔
632
                        Volatile.Write(ref this.isWarm, true);
964,677✔
633
                        count = LastWarmToCold();
964,677✔
634
                        ConstrainCold(count, ItemRemovedReason.Evicted);
964,677✔
635
                    }
964,677✔
636
                }
3,317,378✔
637
                else
638
                {
2✔
639
                    Interlocked.Increment(ref this.counter.hot);
2✔
640
                }
2✔
641
            }
3,317,380✔
642
        }
5,774,248✔
643

644
        private (ItemDestination, int) CycleHot(int hotCount)
645
        {
181,699,347✔
646
            if (hotCount > this.capacity.Hot)
181,699,347✔
647
            {
178,993,984✔
648
                return CycleHotUnchecked(ItemRemovedReason.Evicted);
178,993,984✔
649
            }
650

651
            return (ItemDestination.Remove, 0);
2,705,363✔
652
        }
181,699,347✔
653

654
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
655
        private (ItemDestination, int) CycleHotUnchecked(ItemRemovedReason removedReason)
656
        {
184,149,371✔
657
            Interlocked.Decrement(ref this.counter.hot);
184,149,371✔
658

659
            if (this.hotQueue.TryDequeue(out var item))
184,149,371✔
660
            {
184,149,359✔
661
                var where = this.itemPolicy.RouteHot(item);
184,149,359✔
662
                return (where, this.Move(item, where, removedReason));
184,149,359✔
663
            }
664
            else
665
            {
12✔
666
                Interlocked.Increment(ref this.counter.hot);
12✔
667
                return (ItemDestination.Remove, 0);
12✔
668
            }
669
        }
184,149,371✔
670

671
        private (ItemDestination, int) CycleWarm(int count)
672
        {
35,611,304✔
673
            if (count > this.capacity.Warm)
35,611,304✔
674
            {
34,583,827✔
675
                return CycleWarmUnchecked(ItemRemovedReason.Evicted);
34,583,827✔
676
            }
677

678
            return (ItemDestination.Remove, 0);
1,027,477✔
679
        }
35,611,304✔
680

681
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
682
        private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
683
        {
37,963,244✔
684
            int wc = Interlocked.Decrement(ref this.counter.warm);
37,963,244✔
685

686
            if (this.warmQueue.TryDequeue(out var item))
37,963,244!
687
            {
37,963,244✔
688
                var where = this.itemPolicy.RouteWarm(item);
37,963,244✔
689

690
                // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
691
                // This only happens when hit rate is high, in which case we can consider all items relatively equal in
692
                // terms of which was least recently used.
693
                if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
37,963,244✔
694
                {
10,282,381✔
695
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
10,282,381✔
696
                }
697
                else
698
                {
27,680,863✔
699
                    return (ItemDestination.Cold, this.Move(item, ItemDestination.Cold, removedReason));
27,680,863✔
700
                }
701
            }
702
            else
NEW
UNCOV
703
            {
×
NEW
UNCOV
704
                Interlocked.Increment(ref this.counter.warm);
×
NEW
UNCOV
705
                return (ItemDestination.Remove, 0);
×
706
            }
707
        }
37,963,244✔
708

709
        private (ItemDestination, int) CycleCold(int count)
710
        {
174,908,857✔
711
            if (count > this.capacity.Cold)
174,908,857✔
712
            {
174,730,990✔
713
                return TryRemoveCold(ItemRemovedReason.Evicted);
174,730,990✔
714
            }
715

716
            return (ItemDestination.Remove, 0);
177,867✔
717
        }
174,908,857✔
718

719
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
720
        private (ItemDestination, int) TryRemoveCold(ItemRemovedReason removedReason)
721
        {
183,509,676✔
722
            Interlocked.Decrement(ref this.counter.cold);
183,509,676✔
723

724
            if (this.coldQueue.TryDequeue(out var item))
183,509,676✔
725
            {
183,509,646✔
726
                var where = this.itemPolicy.RouteCold(item);
183,509,646✔
727

728
                if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
183,509,646✔
729
                {
2,715,807✔
730
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
2,715,807✔
731
                }
732
                else
733
                {
180,793,839✔
734
                    this.Move(item, ItemDestination.Remove, removedReason);
180,793,839✔
735
                    return (ItemDestination.Remove, 0);
180,793,839✔
736
                }
737
            }
738
            else
739
            {
30✔
740
                return (ItemDestination.Cold, Interlocked.Increment(ref this.counter.cold));
30✔
741
            }
742
        }
183,509,676✔
743

744
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
745
        private int LastWarmToCold()
746
        {
4,415,432✔
747
            Interlocked.Decrement(ref this.counter.warm);
4,415,432✔
748

749
            if (this.warmQueue.TryDequeue(out var item))
4,415,432✔
750
            {
4,415,423✔
751
                return this.Move(item, ItemDestination.Cold, ItemRemovedReason.Evicted);
4,415,423✔
752
            }
753
            else
754
            {
9✔
755
                Interlocked.Increment(ref this.counter.warm);
9✔
756
                return 0;
9✔
757
            }
758
        }
4,415,432✔
759

760
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
761
        private void ConstrainCold(int coldCount, ItemRemovedReason removedReason)
762
        {
5,972,985✔
763
            if (coldCount > this.capacity.Cold && this.coldQueue.TryDequeue(out var item))
5,972,985✔
764
            {
5,887,298✔
765
                Interlocked.Decrement(ref this.counter.cold);
5,887,298✔
766
                this.Move(item, ItemDestination.Remove, removedReason);
5,887,298✔
767
            }
5,887,298✔
768
        }
5,972,985✔
769

770
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
771
        private int Move(I item, ItemDestination where, ItemRemovedReason removedReason)
772
        {
422,431,352✔
773
            item.WasAccessed = false;
422,431,352✔
774

775
            switch (where)
422,431,352✔
776
            {
777
                case ItemDestination.Warm:
778
                    this.warmQueue.Enqueue(item);
42,393,656✔
779
                    return Interlocked.Increment(ref this.counter.warm);
42,393,656✔
780
                case ItemDestination.Cold:
781
                    this.coldQueue.Enqueue(item);
189,402,894✔
782
                    return Interlocked.Increment(ref this.counter.cold);
189,402,894✔
783
                case ItemDestination.Remove:
784

785
                    var kvp = new KeyValuePair<K, I>(item.Key, item);
190,634,802✔
786

787
#if NET6_0_OR_GREATER
788
                    if (this.dictionary.TryRemove(kvp))
144,897,881✔
789
#else
790
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
791
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
45,736,921✔
792
#endif
793
                    {
187,344,084✔
794
                        OnRemove(item.Key, item, removedReason);
187,344,084✔
795
                    }
187,344,084✔
796
                    break;
190,634,802✔
797
            }
798

799
            return 0;
190,634,802✔
800
        }
422,431,352✔
801

802
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
803
        /// <returns>An enumerator for the cache.</returns>
804
        /// <remarks>
805
        /// The enumerator returned from the cache is safe to use concurrently with
806
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
807
        /// The contents exposed through the enumerator may contain modifications
808
        /// made after <see cref="GetEnumerator"/> was called.
809
        /// </remarks>
810
        IEnumerator IEnumerable.GetEnumerator()
811
        {
12✔
812
            return ((ConcurrentLruCore<K, V, I, P, T>)this).GetEnumerator();
12✔
813
        }
12✔
814

815
        private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
816
        {
269✔
817
            var p = new Proxy(lru);
269✔
818

819
            if (typeof(P) == typeof(AfterAccessPolicy<K, V>))
269✔
820
            {
44✔
821
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), new Optional<ITimePolicy>(p), Optional<IDiscreteTimePolicy>.None());
44✔
822
            }
823

824
            // IsAssignableFrom is a jit intrinsic https://github.com/dotnet/runtime/issues/4920
825
            if (typeof(IDiscreteItemPolicy<K, V>).IsAssignableFrom(typeof(P)))
225✔
826
            {
60✔
827
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), Optional<ITimePolicy>.None(), new Optional<IDiscreteTimePolicy>(new DiscreteExpiryProxy(lru)));
60✔
828
            }
829

830
            return new CachePolicy(new Optional<IBoundedPolicy>(p), lru.itemPolicy.CanDiscard() ? new Optional<ITimePolicy>(p) : Optional<ITimePolicy>.None());
165✔
831
        }
269✔
832

833
        private static Optional<ICacheMetrics> CreateMetrics(ConcurrentLruCore<K, V, I, P, T> lru)
834
        {
108✔
835
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
108✔
836
            {
20✔
837
                return Optional<ICacheMetrics>.None();
20✔
838
            }
839

840
            return new(new Proxy(lru));
88✔
841
        }
108✔
842

843
        private static Optional<ICacheEvents<K, V>> CreateEvents(ConcurrentLruCore<K, V, I, P, T> lru)
844
        {
1,056✔
845
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
1,056✔
846
            {
52✔
847
                return Optional<ICacheEvents<K, V>>.None();
52✔
848
            }
849

850
            return new(new Proxy(lru));
1,004✔
851
        }
1,056✔
852

853
        // To get JIT optimizations, policies must be structs.
854
        // If the structs are returned directly via properties, they will be copied. Since  
855
        // telemetryPolicy is a mutable struct, copy is bad. One workaround is to store the 
856
        // state within the struct in an object. Since the struct points to the same object
857
        // it becomes immutable. However, this object is then somewhere else on the 
858
        // heap, which slows down the policies with hit counter logic in benchmarks. Likely
859
        // this approach keeps the structs data members in the same CPU cache line as the LRU.
860
        // backcompat: remove conditional compile
861
#if NETCOREAPP3_0_OR_GREATER
862
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
863
#else
864
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Evict = {Evicted}")]
865
#endif
866
        private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
867
        {
868
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
869

870
            public Proxy(ConcurrentLruCore<K, V, I, P, T> lru)
1,361✔
871
            {
1,361✔
872
                this.lru = lru;
1,361✔
873
            }
1,361✔
874

875
            public double HitRatio => lru.telemetryPolicy.HitRatio;
8✔
876

877
            public long Total => lru.telemetryPolicy.Total;
4✔
878

879
            public long Hits => lru.telemetryPolicy.Hits;
24✔
880

881
            public long Misses => lru.telemetryPolicy.Misses;
24✔
882

883
            public long Evicted => lru.telemetryPolicy.Evicted;
8✔
884

885
// backcompat: remove conditional compile
886
#if NETCOREAPP3_0_OR_GREATER
887
            public long Updated => lru.telemetryPolicy.Updated;
8✔
888
#endif
889
            public int Capacity => lru.Capacity;
108✔
890

891
            public TimeSpan TimeToLive => lru.itemPolicy.TimeToLive;
16✔
892

893
            public event EventHandler<ItemRemovedEventArgs<K, V>> ItemRemoved
894
            {
895
                add { this.lru.telemetryPolicy.ItemRemoved += value; }
180✔
896
                remove { this.lru.telemetryPolicy.ItemRemoved -= value; }
24✔
897
            }
898

899
// backcompat: remove conditional compile
900
#if NETCOREAPP3_0_OR_GREATER
901
            public event EventHandler<ItemUpdatedEventArgs<K, V>> ItemUpdated
902
            {
903
                add { this.lru.telemetryPolicy.ItemUpdated += value; }
108✔
904
                remove { this.lru.telemetryPolicy.ItemUpdated -= value; }
12✔
905
            }
906
#endif
907
            public void Trim(int itemCount)
908
            {
36✔
909
                lru.Trim(itemCount);
36✔
910
            }
36✔
911

912
            public void TrimExpired()
913
            {
25✔
914
                lru.TrimExpired();
25✔
915
            }
25✔
916
        }
917

918
        private class DiscreteExpiryProxy : IDiscreteTimePolicy
919
        {
920
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
921

922
            public DiscreteExpiryProxy(ConcurrentLruCore<K, V, I, P, T> lru)
60✔
923
            {
60✔
924
                this.lru = lru;
60✔
925
            }
60✔
926

927
            public void TrimExpired()
928
            {
8✔
929
                lru.TrimExpired();
8✔
930
            }
8✔
931

932
            public bool TryGetTimeToExpire<TKey>(TKey key, out TimeSpan timeToLive)
933
            {
12✔
934
                if (key is K k && lru.dictionary.TryGetValue(k, out var item))
12✔
935
                {
4✔
936
                    LongTickCountLruItem<K, V>? tickItem = item as LongTickCountLruItem<K, V>;
4✔
937
                    timeToLive = (new Duration(tickItem!.TickCount) - Duration.SinceEpoch()).ToTimeSpan();
4✔
938
                    return true;
4✔
939
                }
940

941
                timeToLive = default;
8✔
942
                return false;
8✔
943
            }
12✔
944
        }
945
    }
946
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc