• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

bitfaster / BitFaster.Caching / 15950375526

29 Jun 2025 01:57AM UTC coverage: 99.086% (-0.02%) from 99.102%
15950375526

Pull #692

github

web-flow
Merge 624cae9e0 into 28e46925e
Pull Request #692: add net9 target and tests

1124 of 1148 branches covered (97.91%)

Branch coverage included in aggregate %.

1 of 1 new or added line in 1 file covered. (100.0%)

3 existing lines in 1 file now uncovered.

4839 of 4870 relevant lines covered (99.36%)

66690702.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

99.4
/BitFaster.Caching/Lru/ConcurrentLruCore.cs
1
using System;
2
using System.Collections;
3
using System.Collections.Concurrent;
4
using System.Collections.Generic;
5
using System.Diagnostics;
6
using System.Diagnostics.CodeAnalysis;
7
using System.Linq;
8
using System.Runtime.CompilerServices;
9
using System.Threading;
10
using System.Threading.Tasks;
11

12
namespace BitFaster.Caching.Lru
13
{
14
    /// <summary>
15
    /// A pseudo LRU based on the TU-Q eviction policy. The LRU list is composed of 3 segments: hot, warm and cold. 
16
    /// Cost of maintaining segments is amortized across requests. Items are only cycled when capacity is exceeded. 
17
    /// Pure read does not cycle items if all segments are within capacity constraints. There are no global locks. 
18
    /// On cache miss, a new item is added. Tail items in each segment are dequeued, examined, and are either enqueued 
19
    /// or discarded.
20
    /// The TU-Q scheme of hot, warm and cold is similar to that used in MemCached (https://memcached.org/blog/modern-lru/)
21
    /// and OpenBSD (https://flak.tedunangst.com/post/2Q-buffer-cache-algorithm), but does not use a background thread
22
    /// to maintain the internal queues.
23
    /// </summary>
24
    /// <remarks>
25
    /// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
26
    /// <list type="number">
27
    ///   <item><description>New items are added to hot, WasAccessed = false.</description></item>
28
    ///   <item><description>When items are accessed, update WasAccessed = true.</description></item>
29
    ///   <item><description>When items are moved WasAccessed is set to false.</description></item>
30
    ///   <item><description>When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.</description></item>
31
    ///   <item><description>When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.</description></item>
32
    ///   <item><description>When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.</description></item>
33
    ///</list>
34
    /// </remarks>
35
    public class ConcurrentLruCore<K, V, I, P, T> : ICacheExt<K, V>, IAsyncCacheExt<K, V>, IEnumerable<KeyValuePair<K, V>>
36
        where K : notnull
37
        where I : LruItem<K, V>
38
        where P : struct, IItemPolicy<K, V, I>
39
        where T : struct, ITelemetryPolicy<K, V>
40
    {
41
        private readonly ConcurrentDictionary<K, I> dictionary;
42

43
        private readonly ConcurrentQueue<I> hotQueue;
44
        private readonly ConcurrentQueue<I> warmQueue;
45
        private readonly ConcurrentQueue<I> coldQueue;
46

47
        // maintain count outside ConcurrentQueue, since ConcurrentQueue.Count holds a global lock
48
        private PaddedQueueCount counter;
49

50
        private readonly ICapacityPartition capacity;
51

52
        private readonly P itemPolicy;
53
        private bool isWarm = false;
2,546✔
54

55
        /// <summary>
56
        /// The telemetry policy.
57
        /// </summary>
58
        /// <remarks>
59
        /// Since T is a struct, making it readonly will force the runtime to make defensive copies
60
        /// if mutate methods are called. Therefore, field must be mutable to maintain count.
61
        /// </remarks>
62
        protected T telemetryPolicy;
63

64
        /// <summary>
65
        /// Initializes a new instance of the ConcurrentLruCore class with the specified concurrencyLevel, capacity, equality comparer, item policy and telemetry policy.
66
        /// </summary>
67
        /// <param name="concurrencyLevel">The concurrency level.</param>
68
        /// <param name="capacity">The capacity.</param>
69
        /// <param name="comparer">The equality comparer.</param>
70
        /// <param name="itemPolicy">The item policy.</param>
71
        /// <param name="telemetryPolicy">The telemetry policy.</param>
72
        /// <exception cref="ArgumentNullException"></exception>
73
        public ConcurrentLruCore(
2,546✔
74
            int concurrencyLevel,
2,546✔
75
            ICapacityPartition capacity,
2,546✔
76
            IEqualityComparer<K> comparer,
2,546✔
77
            P itemPolicy,
2,546✔
78
            T telemetryPolicy)
2,546✔
79
        {
2,546✔
80
            if (capacity == null)
2,546✔
81
                Throw.ArgNull(ExceptionArgument.capacity);
5✔
82

83
            if (comparer == null)
2,541✔
84
                Throw.ArgNull(ExceptionArgument.comparer);
5✔
85

86
            capacity.Validate();
2,536✔
87
            this.capacity = capacity;
2,531✔
88

89
            this.hotQueue = new ConcurrentQueue<I>();
2,531✔
90
            this.warmQueue = new ConcurrentQueue<I>();
2,531✔
91
            this.coldQueue = new ConcurrentQueue<I>();
2,531✔
92

93
            int dictionaryCapacity = ConcurrentDictionarySize.Estimate(this.Capacity);
2,531✔
94

95
            this.dictionary = new ConcurrentDictionary<K, I>(concurrencyLevel, dictionaryCapacity, comparer);
2,531✔
96
            this.itemPolicy = itemPolicy;
2,521✔
97
            this.telemetryPolicy = telemetryPolicy;
2,521✔
98
            this.telemetryPolicy.SetEventSource(this);
2,521✔
99
        }
2,521✔
100

101
        // No lock count: https://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/
102
        ///<inheritdoc/>
103
        public int Count => this.dictionary.Where(i => !itemPolicy.ShouldDiscard(i.Value)).Count();
17,458,782✔
104

105
        ///<inheritdoc/>
106
        public int Capacity => this.capacity.Hot + this.capacity.Warm + this.capacity.Cold;
2,000,003,966✔
107

108
        ///<inheritdoc/>
109
        public Optional<ICacheMetrics> Metrics => CreateMetrics(this);
165✔
110

111
        ///<inheritdoc/>
112
        public Optional<ICacheEvents<K, V>> Events => CreateEvents(this);
1,308✔
113

114
        ///<inheritdoc/>
115
        public CachePolicy Policy => CreatePolicy(this);
346✔
116

117
        /// <summary>
118
        /// Gets the number of hot items.
119
        /// </summary>
120
        public int HotCount => Volatile.Read(ref this.counter.hot);
1,586,115✔
121

122
        /// <summary>
123
        /// Gets the number of warm items.
124
        /// </summary>
125
        public int WarmCount => Volatile.Read(ref this.counter.warm);
1,586,115✔
126

127
        /// <summary>
128
        /// Gets the number of cold items.
129
        /// </summary>
130
        public int ColdCount => Volatile.Read(ref this.counter.cold);
1,586,120✔
131

132
        /// <summary>
133
        /// Gets a collection containing the keys in the cache.
134
        /// </summary>
135
        public ICollection<K> Keys => this.dictionary.Keys;
1,000✔
136

137
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
138
        /// <returns>An enumerator for the cache.</returns>
139
        /// <remarks>
140
        /// The enumerator returned from the cache is safe to use concurrently with
141
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
142
        /// The contents exposed through the enumerator may contain modifications
143
        /// made after <see cref="GetEnumerator"/> was called.
144
        /// </remarks>
145
        public IEnumerator<KeyValuePair<K, V>> GetEnumerator()
146
        {
315✔
147
            foreach (var kvp in this.dictionary)
1,620✔
148
            {
340✔
149
                if (!itemPolicy.ShouldDiscard(kvp.Value))
340✔
150
                { 
325✔
151
                    yield return new KeyValuePair<K, V>(kvp.Key, kvp.Value.Value); 
325✔
152
                }
320✔
153
            }
335✔
154
        }
310✔
155

156
        ///<inheritdoc/>
157
        public bool TryGet(K key, [MaybeNullWhen(false)] out V value)
158
        {
2,248,171,495✔
159
            if (dictionary.TryGetValue(key, out var item))
2,248,171,495✔
160
            {
2,027,025,877✔
161
                return GetOrDiscard(item, out value);
2,027,025,877✔
162
            }
163

164
            value = default;
221,145,618✔
165
            this.telemetryPolicy.IncrementMiss();
221,145,618✔
166
            return false;
221,145,618✔
167
        }
2,248,171,495✔
168

169
        // AggressiveInlining forces the JIT to inline policy.ShouldDiscard(). For LRU policy 
170
        // the first branch is completely eliminated due to JIT time constant propogation.
171
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
172
        private bool GetOrDiscard(I item, [MaybeNullWhen(false)] out V value)
173
        {
2,027,025,877✔
174
            if (this.itemPolicy.ShouldDiscard(item))
2,027,025,877✔
175
            {
4,595,657✔
176
                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Evicted);
4,595,657✔
177
                this.telemetryPolicy.IncrementMiss();
4,595,657✔
178
                value = default;
4,595,657✔
179
                return false;
4,595,657✔
180
            }
181

182
            value = item.Value;
2,022,430,220✔
183

184
            this.itemPolicy.Touch(item);
2,022,430,220✔
185
            this.telemetryPolicy.IncrementHit();
2,022,430,220✔
186
            return true;
2,022,430,220✔
187
        }
2,027,025,877✔
188

189
        private bool TryAdd(K key, V value)
190
        {
225,740,916✔
191
            var newItem = this.itemPolicy.CreateItem(key, value);
225,740,916✔
192

193
            if (this.dictionary.TryAdd(key, newItem))
225,740,916✔
194
            {
214,292,247✔
195
                this.hotQueue.Enqueue(newItem);
214,292,247✔
196
                Cycle(Interlocked.Increment(ref counter.hot));
214,292,247✔
197
                return true;
214,292,247✔
198
            }
199

200
            Disposer<V>.Dispose(newItem.Value);
11,448,669✔
201
            return false;
11,448,669✔
202
        }
225,740,916✔
203

204
        ///<inheritdoc/>
205
        public V GetOrAdd(K key, Func<K, V> valueFactory)
206
        {
2,171,221,015✔
207
            while (true)
2,179,901,347✔
208
            {
2,179,901,347✔
209
                if (this.TryGet(key, out var value))
2,179,901,347✔
210
                {
2,009,819,487✔
211
                    return value;
2,009,819,487✔
212
                }
213

214
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
215
                value = valueFactory(key);
170,081,860✔
216

217
                if (TryAdd(key, value))
170,081,860✔
218
                {
161,401,528✔
219
                    return value;
161,401,528✔
220
                }
221
            }
8,680,332✔
222
        }
2,171,221,015✔
223

224
        /// <summary>
225
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
226
        /// existing value if the key already exists.
227
        /// </summary>
228
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
229
        /// <param name="key">The key of the element to add.</param>
230
        /// <param name="valueFactory">The factory function used to generate a value for the key.</param>
231
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
232
        /// <returns>The value for the key. This will be either the existing value for the key if the key is already 
233
        /// in the cache, or the new value if the key was not in the cache.</returns>
234
        public V GetOrAdd<TArg>(K key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
235
        {
20,000,015✔
236
            while (true)
21,077,161✔
237
            {
21,077,161✔
238
                if (this.TryGet(key, out var value))
21,077,161✔
239
                {
2,810,907✔
240
                    return value;
2,810,907✔
241
                }
242

243
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
244
                value = valueFactory(key, factoryArgument);
18,266,254✔
245

246
                if (TryAdd(key, value))
18,266,254✔
247
                {
17,189,108✔
248
                    return value;
17,189,108✔
249
                }
250
            }
1,077,146✔
251
        }
20,000,015✔
252

253
        ///<inheritdoc/>
254
        public async ValueTask<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
255
        {
20,000,380✔
256
            while (true)
20,645,450✔
257
            {
20,645,450✔
258
                if (this.TryGet(key, out var value))
20,645,450✔
259
                {
1,706,485✔
260
                    return value;
1,706,485✔
261
                }
262

263
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
264
                // This is identical logic in ConcurrentDictionary.GetOrAdd method.
265
                value = await valueFactory(key).ConfigureAwait(false);
18,938,965✔
266

267
                if (TryAdd(key, value))
18,938,950✔
268
                {
18,293,880✔
269
                    return value;
18,293,880✔
270
                }
271
            }
645,070✔
272
        }
20,000,365✔
273

274
        /// <summary>
275
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
276
        /// existing value if the key already exists.
277
        /// </summary>
278
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
279
        /// <param name="key">The key of the element to add.</param>
280
        /// <param name="valueFactory">The factory function used to asynchronously generate a value for the key.</param>
281
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
282
        /// <returns>A task that represents the asynchronous GetOrAdd operation.</returns>
283
        public async ValueTask<V> GetOrAddAsync<TArg>(K key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
284
        {
20,000,345✔
285
            while (true)
21,046,466✔
286
            {
21,046,466✔
287
                if (this.TryGet(key, out var value))
21,046,466✔
288
                {
2,592,614✔
289
                    return value;
2,592,614✔
290
                }
291

292
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
293
                value = await valueFactory(key, factoryArgument).ConfigureAwait(false);
18,453,852✔
294

295
                if (TryAdd(key, value))
18,453,852✔
296
                {
17,407,731✔
297
                    return value;
17,407,731✔
298
                }
299
            }
1,046,121✔
300
        }
20,000,345✔
301

302
        /// <summary>
303
        /// Attempts to remove the specified key value pair.
304
        /// </summary>
305
        /// <param name="item">The item to remove.</param>
306
        /// <returns>true if the item was removed successfully; otherwise, false.</returns>
307
        public bool TryRemove(KeyValuePair<K, V> item)
308
        {
23,997,919✔
309
            if (this.dictionary.TryGetValue(item.Key, out var existing))
23,997,919✔
310
            {
2,634,832✔
311
                lock (existing)
2,634,832✔
312
                {
2,634,832✔
313
                    if (EqualityComparer<V>.Default.Equals(existing.Value, item.Value))
2,634,832✔
314
                    {
296,773✔
315
                        var kvp = new KeyValuePair<K, I>(item.Key, existing);
296,773✔
316
#if NET6_0_OR_GREATER
317
                    if (this.dictionary.TryRemove(kvp))
240,054✔
318
#else
319
                        // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
320
                        if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
56,719✔
321
#endif
322
                        {
286,859✔
323
                            OnRemove(item.Key, kvp.Value, ItemRemovedReason.Removed);
286,859✔
324
                            return true;
286,859✔
325
                        }
326
                    }
9,914✔
327
                }
2,347,973✔
328

329
                // it existed, but we couldn't remove - this means value was replaced afer the TryGetValue (a race)
330
            }
2,347,973✔
331

332
            return false;
23,711,060✔
333
        }
23,997,919✔
334

335
        /// <summary>
336
        /// Attempts to remove and return the value that has the specified key.
337
        /// </summary>
338
        /// <param name="key">The key of the element to remove.</param>
339
        /// <param name="value">When this method returns, contains the object removed, or the default value of the value type if key does not exist.</param>
340
        /// <returns>true if the object was removed successfully; otherwise, false.</returns>
341
        public bool TryRemove(K key, [MaybeNullWhen(false)] out V value)
342
        {
20,000,298✔
343
            if (this.dictionary.TryRemove(key, out var item))
20,000,298✔
344
            {
41,730✔
345
                OnRemove(key, item, ItemRemovedReason.Removed);
41,730✔
346
                value = item.Value;
41,730✔
347
                return true;
41,730✔
348
            }
349

350
            value = default;
19,958,568✔
351
            return false;
19,958,568✔
352
        }
20,000,298✔
353

354
        ///<inheritdoc/>
355
        public bool TryRemove(K key)
356
        {
20,000,220✔
357
            return TryRemove(key, out _);
20,000,220✔
358
        }
20,000,220✔
359

360
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
361
        private void OnRemove(K key, I item, ItemRemovedReason reason)
362
        {
231,361,633✔
363
            // Mark as not accessed, it will later be cycled out of the queues because it can never be fetched 
364
            // from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled 
365
            // from the queue.
366
            item.WasAccessed = false;
231,361,633✔
367
            item.WasRemoved = true;
231,361,633✔
368

369
            this.telemetryPolicy.OnItemRemoved(key, item.Value, reason);
231,361,633✔
370

371
            // serialize dispose (common case dispose not thread safe)
372
            lock (item)
231,361,633✔
373
            {
231,361,633✔
374
                Disposer<V>.Dispose(item.Value);
231,361,633✔
375
            }
231,361,633✔
376
        }
231,361,633✔
377

378
        ///<inheritdoc/>
379
        ///<remarks>Note: Calling this method does not affect LRU order.</remarks>
380
        public bool TryUpdate(K key, V value)
381
        {
63,311,048✔
382
            if (this.dictionary.TryGetValue(key, out var existing))
63,311,048✔
383
            {
9,299,177✔
384
                lock (existing)
9,299,177✔
385
                {
9,299,177✔
386
                    if (!existing.WasRemoved)
9,299,177✔
387
                    {
9,058,561✔
388
                        V oldValue = existing.Value;
9,058,561✔
389

390
                        existing.Value = value;
9,058,561✔
391

392
                        this.itemPolicy.Update(existing);
9,058,561✔
393
// backcompat: remove conditional compile
394
#if NETCOREAPP3_0_OR_GREATER
395
                        this.telemetryPolicy.OnItemUpdated(existing.Key, oldValue, existing.Value);
9,058,561✔
396
#endif
397
                        Disposer<V>.Dispose(oldValue);
9,058,561✔
398

399
                        return true;
9,058,561✔
400
                    }
401
                }
240,616✔
402
            }
240,616✔
403

404
            return false;
54,252,487✔
405
        }
63,311,048✔
406

407
        ///<inheritdoc/>
408
        ///<remarks>Note: Updates to existing items do not affect LRU order. Added items are at the top of the LRU.</remarks>
409
        public void AddOrUpdate(K key, V value)
410
        {
23,077,329✔
411
            while (true)
23,310,922✔
412
            {
23,310,922✔
413
                // first, try to update
414
                if (this.TryUpdate(key, value))
23,310,922✔
415
                {
5,974,089✔
416
                    return;
5,974,089✔
417
                }
418

419
                // then try add
420
                var newItem = this.itemPolicy.CreateItem(key, value);
17,336,833✔
421

422
                if (this.dictionary.TryAdd(key, newItem))
17,336,833✔
423
                {
17,103,240✔
424
                    this.hotQueue.Enqueue(newItem);
17,103,240✔
425
                    Cycle(Interlocked.Increment(ref counter.hot));
17,103,240✔
426
                    return;
17,103,240✔
427
                }
428

429
                // if both update and add failed there was a race, try again
430
            }
233,593✔
431
        }
23,077,329✔
432

433
        ///<inheritdoc/>
434
        public void Clear()
435
        {
1,562,660✔
436
            // don't overlap Clear/Trim/TrimExpired
437
            lock (this.dictionary)
1,562,660✔
438
            {
1,562,660✔
439
                // evaluate queue count, remove everything including items removed from the dictionary but
440
                // not the queues. This also avoids the expensive o(n) no lock count, or locking the dictionary.
441
                int queueCount = this.HotCount + this.WarmCount + this.ColdCount;
1,562,660✔
442
                this.TrimLiveItems(itemsRemoved: 0, queueCount, ItemRemovedReason.Cleared);
1,562,660✔
443
            }
1,562,660✔
444
        }
1,562,660✔
445

446
        /// <summary>
447
        /// Trim the specified number of items from the cache. Removes all discardable items per IItemPolicy.ShouldDiscard(), then 
448
        /// itemCount-discarded items in LRU order, if any.
449
        /// </summary>
450
        /// <param name="itemCount">The number of items to remove.</param>
451
        /// <returns>The number of items removed from the cache.</returns>
452
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is less than 0./</exception>
453
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is greater than capacity./</exception>
454
        /// <remarks>
455
        /// Note: Trim affects LRU order. Calling Trim resets the internal accessed status of items.
456
        /// </remarks>
457
        public void Trim(int itemCount)
458
        {
305✔
459
            int capacity = this.Capacity;
305✔
460

461
            if (itemCount < 1 || itemCount > capacity)
305✔
462
                Throw.ArgOutOfRange(nameof(itemCount), "itemCount must be greater than or equal to one, and less than the capacity of the cache.");
10✔
463

464
            // clamp itemCount to number of items actually in the cache
465
            itemCount = Math.Min(itemCount, this.HotCount + this.WarmCount + this.ColdCount);
295✔
466

467
            // don't overlap Clear/Trim/TrimExpired
468
            lock (this.dictionary)
295✔
469
            {
295✔
470
                // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items.
471
                int itemsRemoved = TrimAllDiscardedItems();
295✔
472

473
                TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed);
295✔
474
            }
295✔
475
        }
295✔
476

477
        private void TrimExpired()
478
        {
46✔
479
            if (this.itemPolicy.CanDiscard())
46✔
480
            {
46✔
481
                lock (this.dictionary)
46✔
482
                {
46✔
483
                    this.TrimAllDiscardedItems();
46✔
484
                } 
46✔
485
            }
46✔
486
        }
46✔
487

488
        /// <summary>
489
        /// Trim discarded items from all queues.
490
        /// </summary>
491
        /// <returns>The number of items removed.</returns>
492
        // backcompat: make internal
493
        protected int TrimAllDiscardedItems()
494
        {
341✔
495
            // don't overlap Clear/Trim/TrimExpired
496
            lock (this.dictionary)
341✔
497
            {
341✔
498
                int RemoveDiscardableItems(ConcurrentQueue<I> q, ref int queueCounter)
499
                {
1,023✔
500
                    int itemsRemoved = 0;
1,023✔
501
                    int localCount = queueCounter;
1,023✔
502

503
                    for (int i = 0; i < localCount; i++)
6,234✔
504
                    {
2,094✔
505
                        if (q.TryDequeue(out var item))
2,094✔
506
                        {
2,094✔
507
                            if (this.itemPolicy.ShouldDiscard(item))
2,094✔
508
                            {
289✔
509
                                Interlocked.Decrement(ref queueCounter);
289✔
510
                                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Trimmed);
289✔
511
                                itemsRemoved++;
289✔
512
                            }
289✔
513
                            else if (item.WasRemoved)
1,805✔
514
                            {
30✔
515
                                Interlocked.Decrement(ref queueCounter);
30✔
516
                            }
30✔
517
                            else
518
                            {
1,775✔
519
                                q.Enqueue(item);
1,775✔
520
                            }
1,775✔
521
                        }
2,094✔
522
                    }
2,094✔
523

524
                    return itemsRemoved;
1,023✔
525
                }
1,023✔
526

527
                int coldRem = RemoveDiscardableItems(coldQueue, ref this.counter.cold);
341✔
528
                int warmRem = RemoveDiscardableItems(warmQueue, ref this.counter.warm);
341✔
529
                int hotRem = RemoveDiscardableItems(hotQueue, ref this.counter.hot);
341✔
530

531
                if (warmRem > 0)
341✔
532
                {
41✔
533
                    Volatile.Write(ref this.isWarm, false);
41✔
534
                }
41✔
535

536
                return coldRem + warmRem + hotRem;
341✔
537
            }
538
        }
341✔
539

540
        private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason)
541
        {
1,562,955✔
542
            // When items are touched, they are moved to warm by cycling. Therefore, to guarantee 
543
            // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times.
544
            // If clear is called during trimming, it would be possible to get stuck in an infinite
545
            // loop here. The warm + hot limit also guards against this case.
546
            int trimWarmAttempts = 0;
1,562,955✔
547
            int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot;
1,562,955✔
548

549
            while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts)
15,878,969✔
550
            {
14,316,014✔
551
                if (Volatile.Read(ref this.counter.cold) > 0)
14,316,014✔
552
                {
11,030,769✔
553
                    if (TryRemoveCold(reason) == (ItemDestination.Remove, 0))
11,030,769✔
554
                    {
11,030,295✔
555
                        itemsRemoved++;
11,030,295✔
556
                        trimWarmAttempts = 0;
11,030,295✔
557
                    }
11,030,295✔
558
                    else
559
                    {
474✔
560
                        TrimWarmOrHot(reason);
474✔
561
                    }
474✔
562
                }
11,030,769✔
563
                else
564
                {
3,285,245✔
565
                    TrimWarmOrHot(reason);
3,285,245✔
566
                    trimWarmAttempts++;
3,285,245✔
567
                }
3,285,245✔
568
            }
14,316,014✔
569

570
            if (Volatile.Read(ref this.counter.warm) < this.capacity.Warm)
1,562,955✔
571
            {
840,388✔
572
                Volatile.Write(ref this.isWarm, false);
840,388✔
573
            }
840,388✔
574
        }
1,562,955✔
575

576
        private void TrimWarmOrHot(ItemRemovedReason reason)
577
        {
3,285,719✔
578
            if (Volatile.Read(ref this.counter.warm) > 0)
3,285,719✔
579
            {
1,073,408✔
580
                CycleWarmUnchecked(reason);
1,073,408✔
581
            }
1,073,408✔
582
            else if (Volatile.Read(ref this.counter.hot) > 0)
2,212,311✔
583
            {
2,211,952✔
584
                CycleHotUnchecked(reason);
2,211,952✔
585
            }
2,211,952✔
586
        }
3,285,719✔
587

588
        private void Cycle(int hotCount)
589
        {
231,395,487✔
590
            if (isWarm)
231,395,487✔
591
            {
227,669,084✔
592
                (var dest, var count) = CycleHot(hotCount);
227,669,084✔
593

594
                int cycles = 0;
227,669,084✔
595
                while (cycles++ < 3 && dest != ItemDestination.Remove)
493,574,352✔
596
                {
265,905,268✔
597
                    if (dest == ItemDestination.Warm)
265,905,268✔
598
                    {
42,228,802✔
599
                        (dest, count) = CycleWarm(count);
42,228,802✔
600
                    }
42,228,802✔
601
                    else if (dest == ItemDestination.Cold)
223,676,466✔
602
                    {
223,676,466✔
603
                        (dest, count) = CycleCold(count);
223,676,466✔
604
                    }
223,676,466✔
605
                }
265,905,268✔
606

607
                // If nothing was removed yet, constrain the size of warm and cold by discarding the coldest item.
608
                if (dest != ItemDestination.Remove)
227,669,084✔
609
                {
5,036,055✔
610
                    if (dest == ItemDestination.Warm && count > this.capacity.Warm)
5,036,055✔
611
                    {
3,324,819✔
612
                        count = LastWarmToCold();
3,324,819✔
613
                    }
3,324,819✔
614

615
                    ConstrainCold(count, ItemRemovedReason.Evicted);
5,036,055✔
616
                }
5,036,055✔
617
            }
227,669,084✔
618
            else
619
            {
3,726,403✔
620
                // fill up the warm queue with new items until warm is full.
621
                // else during warmup the cache will only use the hot + cold queues until any item is requested twice.
622
                CycleDuringWarmup(hotCount);
3,726,403✔
623
            }
3,726,403✔
624
        }
231,395,487✔
625

626
        [MethodImpl(MethodImplOptions.NoInlining)]
627
        private void CycleDuringWarmup(int hotCount)
628
        {
3,726,403✔
629
            // do nothing until hot is full
630
            if (hotCount > this.capacity.Hot)
3,726,403✔
631
            {
1,506,098✔
632
                Interlocked.Decrement(ref this.counter.hot);
1,506,098✔
633

634
                if (this.hotQueue.TryDequeue(out var item))
1,506,098✔
635
                {
1,506,097✔
636
                    // special case: removed during warmup
637
                    if (item.WasRemoved)
1,506,097✔
638
                    {
240,653✔
639
                        return;
240,653✔
640
                    }
641

642
                    int count = this.Move(item, ItemDestination.Warm, ItemRemovedReason.Evicted);
1,265,444✔
643

644
                    // if warm is now full, overflow to cold and mark as warm
645
                    if (count > this.capacity.Warm)
1,265,444✔
646
                    {
232,541✔
647
                        Volatile.Write(ref this.isWarm, true);
232,541✔
648
                        count = LastWarmToCold();
232,541✔
649
                        ConstrainCold(count, ItemRemovedReason.Evicted);
232,541✔
650
                    }
232,541✔
651
                }
1,265,444✔
652
                else
653
                {
1✔
654
                    Interlocked.Increment(ref this.counter.hot);
1✔
655
                }
1✔
656
            }
1,265,445✔
657
        }
3,726,403✔
658

659
        private (ItemDestination, int) CycleHot(int hotCount)
660
        {
227,669,084✔
661
            if (hotCount > this.capacity.Hot)
227,669,084✔
662
            {
227,668,641✔
663
                return CycleHotUnchecked(ItemRemovedReason.Evicted);
227,668,641✔
664
            }
665

666
            return (ItemDestination.Remove, 0);
443✔
667
        }
227,669,084✔
668

669
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
670
        private (ItemDestination, int) CycleHotUnchecked(ItemRemovedReason removedReason)
671
        {
229,880,593✔
672
            Interlocked.Decrement(ref this.counter.hot);
229,880,593✔
673

674
            if (this.hotQueue.TryDequeue(out var item))
229,880,593✔
675
            {
229,880,588✔
676
                var where = this.itemPolicy.RouteHot(item);
229,880,588✔
677
                return (where, this.Move(item, where, removedReason));
229,880,588✔
678
            }
679
            else
680
            {
5✔
681
                Interlocked.Increment(ref this.counter.hot);
5✔
682
                return (ItemDestination.Remove, 0);
5✔
683
            }
684
        }
229,880,593✔
685

686
        private (ItemDestination, int) CycleWarm(int count)
687
        {
42,228,802✔
688
            if (count > this.capacity.Warm)
42,228,802✔
689
            {
42,172,543✔
690
                return CycleWarmUnchecked(ItemRemovedReason.Evicted);
42,172,543✔
691
            }
692

693
            return (ItemDestination.Remove, 0);
56,259✔
694
        }
42,228,802✔
695

696
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
697
        private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
698
        {
43,245,951✔
699
            int wc = Interlocked.Decrement(ref this.counter.warm);
43,245,951✔
700

701
            if (this.warmQueue.TryDequeue(out var item))
43,245,951✔
702
            {
43,245,950✔
703
                if (item.WasRemoved)
43,245,950✔
704
                {
927,432✔
705
                    return (ItemDestination.Remove, 0);
927,432✔
706
                }
707

708
                var where = this.itemPolicy.RouteWarm(item);
42,318,518✔
709

710
                // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
711
                // This only happens when hit rate is high, in which case we can consider all items relatively equal in
712
                // terms of which was least recently used.
713
                if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
42,318,518✔
714
                {
10,192,603✔
715
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
10,192,603✔
716
                }
717
                else
718
                {
32,125,915✔
719
                    return (ItemDestination.Cold, this.Move(item, ItemDestination.Cold, removedReason));
32,125,915✔
720
                }
721
            }
722
            else
723
            {
1✔
724
                Interlocked.Increment(ref this.counter.warm);
1✔
725
                return (ItemDestination.Remove, 0);
1✔
726
            }
727
        }
43,245,951✔
728

729
        private (ItemDestination, int) CycleCold(int count)
730
        {
223,676,466✔
731
            if (count > this.capacity.Cold)
223,676,466✔
732
            {
216,092,916✔
733
                return TryRemoveCold(ItemRemovedReason.Evicted);
216,092,916✔
734
            }
735

736
            return (ItemDestination.Remove, 0);
7,583,550✔
737
        }
223,676,466✔
738

739
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
740
        private (ItemDestination, int) TryRemoveCold(ItemRemovedReason removedReason)
741
        {
227,123,685✔
742
            Interlocked.Decrement(ref this.counter.cold);
227,123,685✔
743

744
            if (this.coldQueue.TryDequeue(out var item))
227,123,685✔
745
            {
227,123,625✔
746
                var where = this.itemPolicy.RouteCold(item);
227,123,625✔
747

748
                if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
227,123,625✔
749
                {
3,087,754✔
750
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
3,087,754✔
751
                }
752
                else
753
                {
224,035,871✔
754
                    this.Move(item, ItemDestination.Remove, removedReason);
224,035,871✔
755
                    return (ItemDestination.Remove, 0);
224,035,871✔
756
                }
757
            }
758
            else
759
            {
60✔
760
                return (ItemDestination.Cold, Interlocked.Increment(ref this.counter.cold));
60✔
761
            }
762
        }
227,123,685✔
763

764
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
765
        private int LastWarmToCold()
766
        {
3,557,360✔
767
            Interlocked.Decrement(ref this.counter.warm);
3,557,360✔
768

769
            if (this.warmQueue.TryDequeue(out var item))
3,557,360!
770
            {
3,557,360✔
771
                var destination = item.WasRemoved ? ItemDestination.Remove : ItemDestination.Cold;
3,557,360✔
772
                return this.Move(item, destination, ItemRemovedReason.Evicted);
3,557,360✔
773
            }
774
            else
UNCOV
775
            {
×
UNCOV
776
                Interlocked.Increment(ref this.counter.warm);
×
UNCOV
777
                return 0;
×
778
            }
779
        }
3,557,360✔
780

781
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
782
        private void ConstrainCold(int coldCount, ItemRemovedReason removedReason)
783
        {
5,268,596✔
784
            if (coldCount > this.capacity.Cold && this.coldQueue.TryDequeue(out var item))
5,268,596✔
785
            {
4,997,644✔
786
                Interlocked.Decrement(ref this.counter.cold);
4,997,644✔
787
                this.Move(item, ItemDestination.Remove, removedReason);
4,997,644✔
788
            }
4,997,644✔
789
        }
5,268,596✔
790

791
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
792
        private int Move(I item, ItemDestination where, ItemRemovedReason removedReason)
793
        {
513,739,125✔
794
            item.WasAccessed = false;
513,739,125✔
795

796
            switch (where)
513,739,125✔
797
            {
798
                case ItemDestination.Warm:
799
                    this.warmQueue.Enqueue(item);
46,822,236✔
800
                    return Interlocked.Increment(ref this.counter.warm);
46,822,236✔
801
                case ItemDestination.Cold:
802
                    this.coldQueue.Enqueue(item);
232,128,492✔
803
                    return Interlocked.Increment(ref this.counter.cold);
232,128,492✔
804
                case ItemDestination.Remove:
805

806
                    var kvp = new KeyValuePair<K, I>(item.Key, item);
234,788,397✔
807

808
#if NET6_0_OR_GREATER
809
                    if (this.dictionary.TryRemove(kvp))
190,112,360✔
810
#else
811
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
812
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
44,676,037✔
813
#endif
814
                    {
231,033,044✔
815
                        OnRemove(item.Key, item, removedReason);
231,033,044✔
816
                    }
231,033,044✔
817
                    break;
234,788,397✔
818
            }
819

820
            return 0;
234,788,397✔
821
        }
513,739,125✔
822

823
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
824
        /// <returns>An enumerator for the cache.</returns>
825
        /// <remarks>
826
        /// The enumerator returned from the cache is safe to use concurrently with
827
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
828
        /// The contents exposed through the enumerator may contain modifications
829
        /// made after <see cref="GetEnumerator"/> was called.
830
        /// </remarks>
831
        IEnumerator IEnumerable.GetEnumerator()
832
        {
15✔
833
            return ((ConcurrentLruCore<K, V, I, P, T>)this).GetEnumerator();
15✔
834
        }
15✔
835

836
#if DEBUG
837
        /// <summary>
838
        /// Format the LRU as a string by converting all the keys to strings.
839
        /// </summary>
840
        /// <returns>The LRU formatted as a string.</returns>
841
        internal string FormatLruString()
842
        {
75✔
843
            var sb = new System.Text.StringBuilder();
75✔
844

845
            sb.Append("Hot [");
75✔
846
            sb.Append(string.Join(",", this.hotQueue.Select(n => n.Key.ToString())));
280✔
847
            sb.Append("] Warm [");
75✔
848
            sb.Append(string.Join(",", this.warmQueue.Select(n => n.Key.ToString())));
250✔
849
            sb.Append("] Cold [");
75✔
850
            sb.Append(string.Join(",", this.coldQueue.Select(n => n.Key.ToString())));
225✔
851
            sb.Append(']');
75✔
852

853
            return sb.ToString();
75✔
854
        }
75✔
855
#endif
856

857
        private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
858
        {
346✔
859
            var p = new Proxy(lru);
346✔
860

861
            if (typeof(P) == typeof(AfterAccessPolicy<K, V>))
346✔
862
            {
55✔
863
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), new Optional<ITimePolicy>(p), Optional<IDiscreteTimePolicy>.None());
55✔
864
            }
865

866
            // IsAssignableFrom is a jit intrinsic https://github.com/dotnet/runtime/issues/4920
867
            if (typeof(IDiscreteItemPolicy<K, V>).IsAssignableFrom(typeof(P)))
291✔
868
            {
75✔
869
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), Optional<ITimePolicy>.None(), new Optional<IDiscreteTimePolicy>(new DiscreteExpiryProxy(lru)));
75✔
870
            }
871

872
            return new CachePolicy(new Optional<IBoundedPolicy>(p), lru.itemPolicy.CanDiscard() ? new Optional<ITimePolicy>(p) : Optional<ITimePolicy>.None());
216✔
873
        }
346✔
874

875
        private static Optional<ICacheMetrics> CreateMetrics(ConcurrentLruCore<K, V, I, P, T> lru)
876
        {
165✔
877
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
165✔
878
            {
25✔
879
                return Optional<ICacheMetrics>.None();
25✔
880
            }
881

882
            return new(new Proxy(lru));
140✔
883
        }
165✔
884

885
        private static Optional<ICacheEvents<K, V>> CreateEvents(ConcurrentLruCore<K, V, I, P, T> lru)
886
        {
1,308✔
887
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
1,308✔
888
            {
65✔
889
                return Optional<ICacheEvents<K, V>>.None();
65✔
890
            }
891

892
            return new(new Proxy(lru));
1,243✔
893
        }
1,308✔
894

895
        // To get JIT optimizations, policies must be structs.
896
        // If the structs are returned directly via properties, they will be copied. Since  
897
        // telemetryPolicy is a mutable struct, copy is bad. One workaround is to store the 
898
        // state within the struct in an object. Since the struct points to the same object
899
        // it becomes immutable. However, this object is then somewhere else on the 
900
        // heap, which slows down the policies with hit counter logic in benchmarks. Likely
901
        // this approach keeps the structs data members in the same CPU cache line as the LRU.
902
        // backcompat: remove conditional compile
903
#if NETCOREAPP3_0_OR_GREATER
904
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
905
#else
906
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Evict = {Evicted}")]
907
#endif
908
        private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
909
        {
910
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
911

912
            public Proxy(ConcurrentLruCore<K, V, I, P, T> lru)
1,729✔
913
            {
1,729✔
914
                this.lru = lru;
1,729✔
915
            }
1,729✔
916

917
            public double HitRatio => lru.telemetryPolicy.HitRatio;
10✔
918

919
            public long Total => lru.telemetryPolicy.Total;
5✔
920

921
            public long Hits => lru.telemetryPolicy.Hits;
30✔
922

923
            public long Misses => lru.telemetryPolicy.Misses;
30✔
924

925
            public long Evicted => lru.telemetryPolicy.Evicted;
40✔
926

927
// backcompat: remove conditional compile
928
#if NETCOREAPP3_0_OR_GREATER
929
            public long Updated => lru.telemetryPolicy.Updated;
10✔
930
#endif
931
            public int Capacity => lru.Capacity;
135✔
932

933
            public TimeSpan TimeToLive => lru.itemPolicy.TimeToLive;
20✔
934

935
            public event EventHandler<ItemRemovedEventArgs<K, V>> ItemRemoved
936
            {
937
                add { this.lru.telemetryPolicy.ItemRemoved += value; }
225✔
938
                remove { this.lru.telemetryPolicy.ItemRemoved -= value; }
30✔
939
            }
940

941
// backcompat: remove conditional compile
942
#if NETCOREAPP3_0_OR_GREATER
943
            public event EventHandler<ItemUpdatedEventArgs<K, V>> ItemUpdated
944
            {
945
                add { this.lru.telemetryPolicy.ItemUpdated += value; }
132✔
946
                remove { this.lru.telemetryPolicy.ItemUpdated -= value; }
15✔
947
            }
948
#endif
949
            public void Trim(int itemCount)
950
            {
50✔
951
                lru.Trim(itemCount);
50✔
952
            }
50✔
953

954
            public void TrimExpired()
955
            {
36✔
956
                lru.TrimExpired();
36✔
957
            }
36✔
958
        }
959

960
        private class DiscreteExpiryProxy : IDiscreteTimePolicy
961
        {
962
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
963

964
            public DiscreteExpiryProxy(ConcurrentLruCore<K, V, I, P, T> lru)
75✔
965
            {
75✔
966
                this.lru = lru;
75✔
967
            }
75✔
968

969
            public void TrimExpired()
970
            {
10✔
971
                lru.TrimExpired();
10✔
972
            }
10✔
973

974
            public bool TryGetTimeToExpire<TKey>(TKey key, out TimeSpan timeToLive)
975
            {
15✔
976
                if (key is K k && lru.dictionary.TryGetValue(k, out var item))
15✔
977
                {
5✔
978
                    LongTickCountLruItem<K, V>? tickItem = item as LongTickCountLruItem<K, V>;
5✔
979
                    timeToLive = (new Duration(tickItem!.TickCount) - Duration.SinceEpoch()).ToTimeSpan();
5✔
980
                    return true;
5✔
981
                }
982

983
                timeToLive = default;
10✔
984
                return false;
10✔
985
            }
15✔
986
        }
987
    }
988
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc