• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

bitfaster / BitFaster.Caching / 9834817549

08 Jul 2024 06:44AM UTC coverage: 98.949% (-0.3%) from 99.209%
9834817549

Pull #612

github

web-flow
Merge 81cff3ede into f9563f881
Pull Request #612: Bump xunit.runner.visualstudio from 2.4.5 to 2.8.1

1070 of 1104 branches covered (96.92%)

Branch coverage included in aggregate %.

4674 of 4701 relevant lines covered (99.43%)

38625079.35 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.72
/BitFaster.Caching/Lru/ConcurrentLruCore.cs
1
using System;
2
using System.Collections;
3
using System.Collections.Concurrent;
4
using System.Collections.Generic;
5
using System.Diagnostics;
6
using System.Diagnostics.CodeAnalysis;
7
using System.Linq;
8
using System.Runtime.CompilerServices;
9
using System.Threading;
10
using System.Threading.Tasks;
11

12
namespace BitFaster.Caching.Lru
13
{
14
    /// <summary>
15
    /// A pseudo LRU based on the TU-Q eviction policy. The LRU list is composed of 3 segments: hot, warm and cold. 
16
    /// Cost of maintaining segments is amortized across requests. Items are only cycled when capacity is exceeded. 
17
    /// Pure read does not cycle items if all segments are within capacity constraints. There are no global locks. 
18
    /// On cache miss, a new item is added. Tail items in each segment are dequeued, examined, and are either enqueued 
19
    /// or discarded.
20
    /// The TU-Q scheme of hot, warm and cold is similar to that used in MemCached (https://memcached.org/blog/modern-lru/)
21
    /// and OpenBSD (https://flak.tedunangst.com/post/2Q-buffer-cache-algorithm), but does not use a background thread
22
    /// to maintain the internal queues.
23
    /// </summary>
24
    /// <remarks>
25
    /// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
26
    /// <list type="number">
27
    ///   <item><description>New items are added to hot, WasAccessed = false.</description></item>
28
    ///   <item><description>When items are accessed, update WasAccessed = true.</description></item>
29
    ///   <item><description>When items are moved WasAccessed is set to false.</description></item>
30
    ///   <item><description>When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.</description></item>
31
    ///   <item><description>When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.</description></item>
32
    ///   <item><description>When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.</description></item>
33
    ///</list>
34
    /// </remarks>
35
    public class ConcurrentLruCore<K, V, I, P, T> : ICacheExt<K, V>, IAsyncCacheExt<K, V>, IEnumerable<KeyValuePair<K, V>>
36
        where K : notnull
37
        where I : LruItem<K, V>
38
        where P : struct, IItemPolicy<K, V, I>
39
        where T : struct, ITelemetryPolicy<K, V>
40
    {
41
        private readonly ConcurrentDictionary<K, I> dictionary;
42

43
        private readonly ConcurrentQueue<I> hotQueue;
44
        private readonly ConcurrentQueue<I> warmQueue;
45
        private readonly ConcurrentQueue<I> coldQueue;
46

47
        // maintain count outside ConcurrentQueue, since ConcurrentQueue.Count holds a global lock
48
        private PaddedQueueCount counter;
49

50
        private readonly ICapacityPartition capacity;
51

52
        private readonly P itemPolicy;
53
        private bool isWarm = false;
1,503✔
54

55
        /// <summary>
56
        /// The telemetry policy.
57
        /// </summary>
58
        /// <remarks>
59
        /// Since T is a struct, making it readonly will force the runtime to make defensive copies
60
        /// if mutate methods are called. Therefore, field must be mutable to maintain count.
61
        /// </remarks>
62
        protected T telemetryPolicy;
63

64
        /// <summary>
65
        /// Initializes a new instance of the ConcurrentLruCore class with the specified concurrencyLevel, capacity, equality comparer, item policy and telemetry policy.
66
        /// </summary>
67
        /// <param name="concurrencyLevel">The concurrency level.</param>
68
        /// <param name="capacity">The capacity.</param>
69
        /// <param name="comparer">The equality comparer.</param>
70
        /// <param name="itemPolicy">The item policy.</param>
71
        /// <param name="telemetryPolicy">The telemetry policy.</param>
72
        /// <exception cref="ArgumentNullException"></exception>
73
        public ConcurrentLruCore(
1,503✔
74
            int concurrencyLevel,
1,503✔
75
            ICapacityPartition capacity,
1,503✔
76
            IEqualityComparer<K> comparer,
1,503✔
77
            P itemPolicy,
1,503✔
78
            T telemetryPolicy)
1,503✔
79
        {
1,503✔
80
            if (capacity == null)
1,503✔
81
                Throw.ArgNull(ExceptionArgument.capacity);
3✔
82

83
            if (comparer == null)
1,500✔
84
                Throw.ArgNull(ExceptionArgument.comparer);
3✔
85

86
            capacity.Validate();
1,497✔
87
            this.capacity = capacity;
1,494✔
88

89
            this.hotQueue = new ConcurrentQueue<I>();
1,494✔
90
            this.warmQueue = new ConcurrentQueue<I>();
1,494✔
91
            this.coldQueue = new ConcurrentQueue<I>();
1,494✔
92

93
            int dictionaryCapacity = ConcurrentDictionarySize.Estimate(this.Capacity);
1,494✔
94

95
            this.dictionary = new ConcurrentDictionary<K, I>(concurrencyLevel, dictionaryCapacity, comparer);
1,494✔
96
            this.itemPolicy = itemPolicy;
1,488✔
97
            this.telemetryPolicy = telemetryPolicy;
1,488✔
98
            this.telemetryPolicy.SetEventSource(this);
1,488✔
99
        }
1,488✔
100

101
        // No lock count: https://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/
102
        ///<inheritdoc/>
103
        public int Count => this.dictionary.Where(i => !itemPolicy.ShouldDiscard(i.Value)).Count();
10,475,300✔
104

105
        ///<inheritdoc/>
106
        public int Capacity => this.capacity.Hot + this.capacity.Warm + this.capacity.Cold;
1,200,002,202✔
107

108
        ///<inheritdoc/>
109
        public Optional<ICacheMetrics> Metrics => CreateMetrics(this);
81✔
110

111
        ///<inheritdoc/>
112
        public Optional<ICacheEvents<K, V>> Events => CreateEvents(this);
792✔
113

114
        ///<inheritdoc/>
115
        public CachePolicy Policy => CreatePolicy(this);
201✔
116

117
        /// <summary>
118
        /// Gets the number of hot items.
119
        /// </summary>
120
        public int HotCount => Volatile.Read(ref this.counter.hot);
951,660✔
121

122
        /// <summary>
123
        /// Gets the number of warm items.
124
        /// </summary>
125
        public int WarmCount => Volatile.Read(ref this.counter.warm);
951,654✔
126

127
        /// <summary>
128
        /// Gets the number of cold items.
129
        /// </summary>
130
        public int ColdCount => Volatile.Read(ref this.counter.cold);
951,657✔
131

132
        /// <summary>
133
        /// Gets a collection containing the keys in the cache.
134
        /// </summary>
135
        public ICollection<K> Keys => this.dictionary.Keys;
600✔
136

137
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
138
        /// <returns>An enumerator for the cache.</returns>
139
        /// <remarks>
140
        /// The enumerator returned from the cache is safe to use concurrently with
141
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
142
        /// The contents exposed through the enumerator may contain modifications
143
        /// made after <see cref="GetEnumerator"/> was called.
144
        /// </remarks>
145
        public IEnumerator<KeyValuePair<K, V>> GetEnumerator()
146
        {
189✔
147
            foreach (var kvp in this.dictionary)
972✔
148
            {
204✔
149
                if (!itemPolicy.ShouldDiscard(kvp.Value))
204✔
150
                { 
195✔
151
                    yield return new KeyValuePair<K, V>(kvp.Key, kvp.Value.Value); 
195✔
152
                }
192✔
153
            }
201✔
154
        }
186✔
155

156
        ///<inheritdoc/>
157
        public bool TryGet(K key, [MaybeNullWhen(false)] out V value)
158
        {
1,347,806,208✔
159
            if (dictionary.TryGetValue(key, out var item))
1,347,806,208✔
160
            {
1,212,507,632✔
161
                return GetOrDiscard(item, out value);
1,212,507,632✔
162
            }
163

164
            value = default;
135,298,576✔
165
            this.telemetryPolicy.IncrementMiss();
135,298,576✔
166
            return false;
135,298,576✔
167
        }
1,347,806,208✔
168

169
        // AggressiveInlining forces the JIT to inline policy.ShouldDiscard(). For LRU policy 
170
        // the first branch is completely eliminated due to JIT time constant propogation.
171
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
172
        private bool GetOrDiscard(I item, [MaybeNullWhen(false)] out V value)
173
        {
1,212,507,632✔
174
            if (this.itemPolicy.ShouldDiscard(item))
1,212,507,632✔
175
            {
2,225,011✔
176
                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Evicted);
2,225,011✔
177
                this.telemetryPolicy.IncrementMiss();
2,225,011✔
178
                value = default;
2,225,011✔
179
                return false;
2,225,011✔
180
            }
181

182
            value = item.Value;
1,210,282,621✔
183

184
            this.itemPolicy.Touch(item);
1,210,282,621✔
185
            this.telemetryPolicy.IncrementHit();
1,210,282,621✔
186
            return true;
1,210,282,621✔
187
        }
1,212,507,632✔
188

189
        private bool TryAdd(K key, V value)
190
        {
137,523,136✔
191
            var newItem = this.itemPolicy.CreateItem(key, value);
137,523,136✔
192

193
            if (this.dictionary.TryAdd(key, newItem))
137,523,136✔
194
            {
131,368,528✔
195
                this.hotQueue.Enqueue(newItem);
131,368,528✔
196
                Cycle(Interlocked.Increment(ref counter.hot));
131,368,528✔
197
                return true;
131,368,528✔
198
            }
199

200
            Disposer<V>.Dispose(newItem.Value);
6,154,608✔
201
            return false;
6,154,608✔
202
        }
137,523,136✔
203

204
        ///<inheritdoc/>
205
        public V GetOrAdd(K key, Func<K, V> valueFactory)
206
        {
1,302,650,507✔
207
            while (true)
1,307,833,932✔
208
            {
1,307,833,932✔
209
                if (this.TryGet(key, out var value))
1,307,833,932✔
210
                {
1,204,357,353✔
211
                    return value;
1,204,357,353✔
212
                }
213

214
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
215
                value = valueFactory(key);
103,476,579✔
216

217
                if (TryAdd(key, value))
103,476,579✔
218
                {
98,293,154✔
219
                    return value;
98,293,154✔
220
                }
221
            }
5,183,425✔
222
        }
1,302,650,507✔
223

224
        /// <summary>
225
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
226
        /// existing value if the key already exists.
227
        /// </summary>
228
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
229
        /// <param name="key">The key of the element to add.</param>
230
        /// <param name="valueFactory">The factory function used to generate a value for the key.</param>
231
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
232
        /// <returns>The value for the key. This will be either the existing value for the key if the key is already 
233
        /// in the cache, or the new value if the key was not in the cache.</returns>
234
        public V GetOrAdd<TArg>(K key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
235
        {
12,000,009✔
236
            while (true)
12,364,011✔
237
            {
12,364,011✔
238
                if (this.TryGet(key, out var value))
12,364,011✔
239
                {
1,206,720✔
240
                    return value;
1,206,720✔
241
                }
242

243
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
244
                value = valueFactory(key, factoryArgument);
11,157,291✔
245

246
                if (TryAdd(key, value))
11,157,291✔
247
                {
10,793,289✔
248
                    return value;
10,793,289✔
249
                }
250
            }
364,002✔
251
        }
12,000,009✔
252

253
        ///<inheritdoc/>
254
        public async ValueTask<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
255
        {
12,000,228✔
256
            while (true)
12,284,930✔
257
            {
12,284,930✔
258
                if (this.TryGet(key, out var value))
12,284,930✔
259
                {
824,334✔
260
                    return value;
824,334✔
261
                }
262

263
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
264
                // This is identical logic in ConcurrentDictionary.GetOrAdd method.
265
                value = await valueFactory(key).ConfigureAwait(false);
11,460,596✔
266

267
                if (TryAdd(key, value))
11,460,587✔
268
                {
11,175,885✔
269
                    return value;
11,175,885✔
270
                }
271
            }
284,702✔
272
        }
12,000,219✔
273

274
        /// <summary>
275
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
276
        /// existing value if the key already exists.
277
        /// </summary>
278
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
279
        /// <param name="key">The key of the element to add.</param>
280
        /// <param name="valueFactory">The factory function used to asynchronously generate a value for the key.</param>
281
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
282
        /// <returns>A task that represents the asynchronous GetOrAdd operation.</returns>
283
        public async ValueTask<V> GetOrAddAsync<TArg>(K key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
284
        {
12,000,207✔
285
            while (true)
12,322,686✔
286
            {
12,322,686✔
287
                if (this.TryGet(key, out var value))
12,322,686✔
288
                {
894,007✔
289
                    return value;
894,007✔
290
                }
291

292
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
293
                value = await valueFactory(key, factoryArgument).ConfigureAwait(false);
11,428,679✔
294

295
                if (TryAdd(key, value))
11,428,679✔
296
                {
11,106,200✔
297
                    return value;
11,106,200✔
298
                }
299
            }
322,479✔
300
        }
12,000,207✔
301

302
        /// <summary>
303
        /// Attempts to remove the specified key value pair.
304
        /// </summary>
305
        /// <param name="item">The item to remove.</param>
306
        /// <returns>true if the item was removed successfully; otherwise, false.</returns>
307
        public bool TryRemove(KeyValuePair<K, V> item)
308
        {
12,000,021✔
309
            if (this.dictionary.TryGetValue(item.Key, out var existing))
12,000,021✔
310
            {
32,671✔
311
                if (EqualityComparer<V>.Default.Equals(existing.Value, item.Value))
32,671✔
312
                {
32,659✔
313
                    var kvp = new KeyValuePair<K, I>(item.Key, existing);
32,659✔
314
#if NET6_0_OR_GREATER
315
                    if (this.dictionary.TryRemove(kvp))
32,659✔
316
#else
317
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
318
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
×
319
#endif
320
                    {
26,848✔
321
                        OnRemove(item.Key, kvp.Value, ItemRemovedReason.Removed);
26,848✔
322
                        return true;
26,848✔
323
                    }
324
                }
5,811✔
325

326
                // it existed, but we couldn't remove - this means value was replaced afer the TryGetValue (a race)
327
            }
5,823✔
328

329
            return false;
11,973,173✔
330
        }
12,000,021✔
331

332
        /// <summary>
333
        /// Attempts to remove and return the value that has the specified key.
334
        /// </summary>
335
        /// <param name="key">The key of the element to remove.</param>
336
        /// <param name="value">When this method returns, contains the object removed, or the default value of the value type if key does not exist.</param>
337
        /// <returns>true if the object was removed successfully; otherwise, false.</returns>
338
        public bool TryRemove(K key, [MaybeNullWhen(false)] out V value)
339
        {
12,000,147✔
340
            if (this.dictionary.TryRemove(key, out var item))
12,000,147✔
341
            {
21,154✔
342
                OnRemove(key, item, ItemRemovedReason.Removed);
21,154✔
343
                value = item.Value;
21,154✔
344
                return true;
21,154✔
345
            }
346

347
            value = default;
11,978,993✔
348
            return false;
11,978,993✔
349
        }
12,000,147✔
350

351
        ///<inheritdoc/>
352
        public bool TryRemove(K key)
353
        {
12,000,099✔
354
            return TryRemove(key, out _);
12,000,099✔
355
        }
12,000,099✔
356

357
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
358
        private void OnRemove(K key, I item, ItemRemovedReason reason)
359
        {
142,154,468✔
360
            // Mark as not accessed, it will later be cycled out of the queues because it can never be fetched 
361
            // from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled 
362
            // from the queue.
363
            item.WasAccessed = false;
142,154,468✔
364
            item.WasRemoved = true;
142,154,468✔
365

366
            this.telemetryPolicy.OnItemRemoved(key, item.Value, reason);
142,154,468✔
367

368
            // serialize dispose (common case dispose not thread safe)
369
            lock (item)
142,154,468✔
370
            {
142,154,468✔
371
                Disposer<V>.Dispose(item.Value);
142,154,468✔
372
            }
142,154,468✔
373
        }
142,154,468✔
374

375
        ///<inheritdoc/>
376
        ///<remarks>Note: Calling this method does not affect LRU order.</remarks>
377
        public bool TryUpdate(K key, V value)
378
        {
37,166,668✔
379
            if (this.dictionary.TryGetValue(key, out var existing))
37,166,668✔
380
            {
3,259,959✔
381
                lock (existing)
3,259,959✔
382
                {
3,259,959✔
383
                    if (!existing.WasRemoved)
3,259,959✔
384
                    {
3,259,920✔
385
                        V oldValue = existing.Value;
3,259,920✔
386

387
                        existing.Value = value;
3,259,920✔
388

389
                        this.itemPolicy.Update(existing);
3,259,920✔
390
// backcompat: remove conditional compile
391
#if NETCOREAPP3_0_OR_GREATER
392
                        this.telemetryPolicy.OnItemUpdated(existing.Key, oldValue, existing.Value);
3,259,920✔
393
#endif
394
                        Disposer<V>.Dispose(oldValue);
3,259,920✔
395

396
                        return true;
3,259,920✔
397
                    }
398
                }
39✔
399
            }
39✔
400

401
            return false;
33,906,748✔
402
        }
37,166,668✔
403

404
        ///<inheritdoc/>
405
        ///<remarks>Note: Updates to existing items do not affect LRU order. Added items are at the top of the LRU.</remarks>
406
        public void AddOrUpdate(K key, V value)
407
        {
13,073,224✔
408
            while (true)
13,166,593✔
409
            {
13,166,593✔
410
                // first, try to update
411
                if (this.TryUpdate(key, value))
13,166,593✔
412
                {
2,267,071✔
413
                    return;
2,267,071✔
414
                }
415

416
                // then try add
417
                var newItem = this.itemPolicy.CreateItem(key, value);
10,899,522✔
418

419
                if (this.dictionary.TryAdd(key, newItem))
10,899,522✔
420
                {
10,806,153✔
421
                    this.hotQueue.Enqueue(newItem);
10,806,153✔
422
                    Cycle(Interlocked.Increment(ref counter.hot));
10,806,153✔
423
                    return;
10,806,153✔
424
                }
425

426
                // if both update and add failed there was a race, try again
427
            }
93,369✔
428
        }
13,073,224✔
429

430
        ///<inheritdoc/>
431
        public void Clear()
432
        {
937,596✔
433
            // don't overlap Clear/Trim/TrimExpired
434
            lock (this.dictionary)
937,596✔
435
            {
937,596✔
436
                // evaluate queue count, remove everything including items removed from the dictionary but
437
                // not the queues. This also avoids the expensive o(n) no lock count, or locking the dictionary.
438
                int queueCount = this.HotCount + this.WarmCount + this.ColdCount;
937,596✔
439
                this.TrimLiveItems(itemsRemoved: 0, queueCount, ItemRemovedReason.Cleared);
937,596✔
440
            }
937,596✔
441
        }
937,596✔
442

443
        /// <summary>
444
        /// Trim the specified number of items from the cache. Removes all discardable items per IItemPolicy.ShouldDiscard(), then 
445
        /// itemCount-discarded items in LRU order, if any.
446
        /// </summary>
447
        /// <param name="itemCount">The number of items to remove.</param>
448
        /// <returns>The number of items removed from the cache.</returns>
449
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is less than 0./</exception>
450
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is greater than capacity./</exception>
451
        /// <remarks>
452
        /// Note: Trim affects LRU order. Calling Trim resets the internal accessed status of items.
453
        /// </remarks>
454
        public void Trim(int itemCount)
455
        {
180✔
456
            int capacity = this.Capacity;
180✔
457

458
            if (itemCount < 1 || itemCount > capacity)
180✔
459
                Throw.ArgOutOfRange(nameof(itemCount), "itemCount must be greater than or equal to one, and less than the capacity of the cache.");
6✔
460

461
            // clamp itemCount to number of items actually in the cache
462
            itemCount = Math.Min(itemCount, this.HotCount + this.WarmCount + this.ColdCount);
174✔
463

464
            // don't overlap Clear/Trim/TrimExpired
465
            lock (this.dictionary)
174✔
466
            {
174✔
467
                // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items.
468
                int itemsRemoved = this.itemPolicy.CanDiscard() ? TrimAllDiscardedItems() : 0;
174✔
469

470
                TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed);
174✔
471
            }
174✔
472
        }
174✔
473

474
        private void TrimExpired()
475
        {
24✔
476
            if (this.itemPolicy.CanDiscard())
24✔
477
            {
24✔
478
                this.TrimAllDiscardedItems();
24✔
479
            }
24✔
480
        }
24✔
481

482
        /// <summary>
483
        /// Trim discarded items from all queues.
484
        /// </summary>
485
        /// <returns>The number of items removed.</returns>
486
        // backcompat: make internal
487
        protected int TrimAllDiscardedItems()
488
        {
33✔
489
            // don't overlap Clear/Trim/TrimExpired
490
            lock (this.dictionary)
33✔
491
            {
33✔
492
                int RemoveDiscardableItems(ConcurrentQueue<I> q, ref int queueCounter)
493
                {
99✔
494
                    int itemsRemoved = 0;
99✔
495
                    int localCount = queueCounter;
99✔
496

497
                    for (int i = 0; i < localCount; i++)
612✔
498
                    {
207✔
499
                        if (q.TryDequeue(out var item))
207✔
500
                        {
207✔
501
                            if (this.itemPolicy.ShouldDiscard(item))
207✔
502
                            {
180✔
503
                                Interlocked.Decrement(ref queueCounter);
180✔
504
                                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Trimmed);
180✔
505
                                itemsRemoved++;
180✔
506
                            }
180✔
507
                            else
508
                            {
27✔
509
                                q.Enqueue(item);
27✔
510
                            }
27✔
511
                        }
207✔
512
                    }
207✔
513

514
                    return itemsRemoved;
99✔
515
                }
99✔
516

517
                int coldRem = RemoveDiscardableItems(coldQueue, ref this.counter.cold);
33✔
518
                int warmRem = RemoveDiscardableItems(warmQueue, ref this.counter.warm);
33✔
519
                int hotRem = RemoveDiscardableItems(hotQueue, ref this.counter.hot);
33✔
520

521
                if (warmRem > 0)
33✔
522
                {
24✔
523
                    Volatile.Write(ref this.isWarm, false);
24✔
524
                }
24✔
525

526
                return coldRem + warmRem + hotRem;
33✔
527
            }
528
        }
33✔
529

530
        private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason)
531
        {
937,770✔
532
            // When items are touched, they are moved to warm by cycling. Therefore, to guarantee 
533
            // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times.
534
            // If clear is called during trimming, it would be possible to get stuck in an infinite
535
            // loop here. The warm + hot limit also guards against this case.
536
            int trimWarmAttempts = 0;
937,770✔
537
            int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot;
937,770✔
538

539
            while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts)
8,048,234✔
540
            {
7,110,464✔
541
                if (Volatile.Read(ref this.counter.cold) > 0)
7,110,464✔
542
                {
6,643,427✔
543
                    if (TryRemoveCold(reason) == (ItemDestination.Remove, 0))
6,643,427✔
544
                    {
6,643,063✔
545
                        itemsRemoved++;
6,643,063✔
546
                        trimWarmAttempts = 0;
6,643,063✔
547
                    }
6,643,063✔
548

549
                    TrimWarmOrHot(reason);
6,643,427✔
550
                }
6,643,427✔
551
                else
552
                {
467,037✔
553
                    TrimWarmOrHot(reason);
467,037✔
554
                    trimWarmAttempts++;
467,037✔
555
                }
467,037✔
556
            }
7,110,464✔
557

558
            if (Volatile.Read(ref this.counter.warm) < this.capacity.Warm)
937,770✔
559
            {
937,475✔
560
                Volatile.Write(ref this.isWarm, false);
937,475✔
561
            }
937,475✔
562
        }
937,770✔
563

564
        private void TrimWarmOrHot(ItemRemovedReason reason)
565
        {
7,110,464✔
566
            if (Volatile.Read(ref this.counter.warm) > 0)
7,110,464✔
567
            {
2,480,859✔
568
                CycleWarmUnchecked(reason);
2,480,859✔
569
            }
2,480,859✔
570
            else if (Volatile.Read(ref this.counter.hot) > 0)
4,629,605✔
571
            {
3,985,150✔
572
                CycleHotUnchecked(reason);
3,985,150✔
573
            }
3,985,150✔
574
        }
7,110,464✔
575

576
        private void Cycle(int hotCount)
577
        {
142,174,681✔
578
            if (isWarm)
142,174,681✔
579
            {
137,827,650✔
580
                (var dest, var count) = CycleHot(hotCount);
137,827,650✔
581

582
                int cycles = 0;
137,827,650✔
583
                while (cycles++ < 3 && dest != ItemDestination.Remove)
295,874,892✔
584
                {
158,047,242✔
585
                    if (dest == ItemDestination.Warm)
158,047,242✔
586
                    {
25,215,794✔
587
                        (dest, count) = CycleWarm(count);
25,215,794✔
588
                    }
25,215,794✔
589
                    else if (dest == ItemDestination.Cold)
132,831,448✔
590
                    {
132,831,448✔
591
                        (dest, count) = CycleCold(count);
132,831,448✔
592
                    }
132,831,448✔
593
                }
158,047,242✔
594

595
                // If nothing was removed yet, constrain the size of warm and cold by discarding the coldest item.
596
                if (dest != ItemDestination.Remove)
137,827,650✔
597
                {
3,315,820✔
598
                    if (dest == ItemDestination.Warm && count > this.capacity.Warm)
3,315,820✔
599
                    {
2,298,784✔
600
                        count = LastWarmToCold();
2,298,784✔
601
                    }
2,298,784✔
602

603
                    ConstrainCold(count, ItemRemovedReason.Evicted);
3,315,820✔
604
                }
3,315,820✔
605
            }
137,827,650✔
606
            else
607
            {
4,347,031✔
608
                // fill up the warm queue with new items until warm is full.
609
                // else during warmup the cache will only use the hot + cold queues until any item is requested twice.
610
                CycleDuringWarmup(hotCount);
4,347,031✔
611
            }
4,347,031✔
612
        }
142,174,681✔
613

614
        [MethodImpl(MethodImplOptions.NoInlining)]
615
        private void CycleDuringWarmup(int hotCount)
616
        {
4,347,031✔
617
            // do nothing until hot is full
618
            if (hotCount > this.capacity.Hot)
4,347,031✔
619
            {
2,516,567✔
620
                Interlocked.Decrement(ref this.counter.hot);
2,516,567✔
621

622
                if (this.hotQueue.TryDequeue(out var item))
2,516,567!
623
                {
2,516,567✔
624
                    int count = this.Move(item, ItemDestination.Warm, ItemRemovedReason.Evicted);
2,516,567✔
625

626
                    // if warm is now full, overflow to cold and mark as warm
627
                    if (count > this.capacity.Warm)
2,516,567✔
628
                    {
725,778✔
629
                        Volatile.Write(ref this.isWarm, true);
725,778✔
630
                        count = LastWarmToCold();
725,778✔
631
                        ConstrainCold(count, ItemRemovedReason.Evicted);
725,778✔
632
                    }
725,778✔
633
                }
2,516,567✔
634
                else
635
                {
×
636
                    Interlocked.Increment(ref this.counter.hot);
×
637
                }
×
638
            }
2,516,567✔
639
        }
4,347,031✔
640

641
        private (ItemDestination, int) CycleHot(int hotCount)
642
        {
137,827,650✔
643
            if (hotCount > this.capacity.Hot)
137,827,650✔
644
            {
135,667,838✔
645
                return CycleHotUnchecked(ItemRemovedReason.Evicted);
135,667,838✔
646
            }
647

648
            return (ItemDestination.Remove, 0);
2,159,812✔
649
        }
137,827,650✔
650

651
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
652
        private (ItemDestination, int) CycleHotUnchecked(ItemRemovedReason removedReason)
653
        {
139,652,988✔
654
            Interlocked.Decrement(ref this.counter.hot);
139,652,988✔
655

656
            if (this.hotQueue.TryDequeue(out var item))
139,652,988✔
657
            {
139,652,980✔
658
                var where = this.itemPolicy.RouteHot(item);
139,652,980✔
659
                return (where, this.Move(item, where, removedReason));
139,652,980✔
660
            }
661
            else
662
            {
8✔
663
                Interlocked.Increment(ref this.counter.hot);
8✔
664
                return (ItemDestination.Remove, 0);
8✔
665
            }
666
        }
139,652,988✔
667

668
        private (ItemDestination, int) CycleWarm(int count)
669
        {
25,215,794✔
670
            if (count > this.capacity.Warm)
25,215,794✔
671
            {
24,524,233✔
672
                return CycleWarmUnchecked(ItemRemovedReason.Evicted);
24,524,233✔
673
            }
674

675
            return (ItemDestination.Remove, 0);
691,561✔
676
        }
25,215,794✔
677

678
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
679
        private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
680
        {
27,005,092✔
681
            int wc = Interlocked.Decrement(ref this.counter.warm);
27,005,092✔
682

683
            if (this.warmQueue.TryDequeue(out var item))
27,005,092✔
684
            {
27,005,090✔
685
                var where = this.itemPolicy.RouteWarm(item);
27,005,090✔
686

687
                // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
688
                // This only happens when hit rate is high, in which case we can consider all items relatively equal in
689
                // terms of which was least recently used.
690
                if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
27,005,090✔
691
                {
6,803,694✔
692
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
6,803,694✔
693
                }
694
                else
695
                {
20,201,396✔
696
                    return (ItemDestination.Cold, this.Move(item, ItemDestination.Cold, removedReason));
20,201,396✔
697
                }
698
            }
699
            else
700
            {
2✔
701
                Interlocked.Increment(ref this.counter.warm);
2✔
702
                return (ItemDestination.Remove, 0);
2✔
703
            }
704
        }
27,005,092✔
705

706
        private (ItemDestination, int) CycleCold(int count)
707
        {
132,831,448✔
708
            if (count > this.capacity.Cold)
132,831,448✔
709
            {
132,702,237✔
710
                return TryRemoveCold(ItemRemovedReason.Evicted);
132,702,237✔
711
            }
712

713
            return (ItemDestination.Remove, 0);
129,211✔
714
        }
132,831,448✔
715

716
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
717
        private (ItemDestination, int) TryRemoveCold(ItemRemovedReason removedReason)
718
        {
139,345,664✔
719
            Interlocked.Decrement(ref this.counter.cold);
139,345,664✔
720

721
            if (this.coldQueue.TryDequeue(out var item))
139,345,664✔
722
            {
139,345,650✔
723
                var where = this.itemPolicy.RouteCold(item);
139,345,650✔
724

725
                if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
139,345,650✔
726
                {
1,819,193✔
727
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
1,819,193✔
728
                }
729
                else
730
                {
137,526,457✔
731
                    this.Move(item, ItemDestination.Remove, removedReason);
137,526,457✔
732
                    return (ItemDestination.Remove, 0);
137,526,457✔
733
                }
734
            }
735
            else
736
            {
14✔
737
                return (ItemDestination.Cold, Interlocked.Increment(ref this.counter.cold));
14✔
738
            }
739
        }
139,345,664✔
740

741
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
742
        private int LastWarmToCold()
743
        {
3,024,562✔
744
            Interlocked.Decrement(ref this.counter.warm);
3,024,562✔
745

746
            if (this.warmQueue.TryDequeue(out var item))
3,024,562✔
747
            {
3,024,558✔
748
                return this.Move(item, ItemDestination.Cold, ItemRemovedReason.Evicted);
3,024,558✔
749
            }
750
            else
751
            {
4✔
752
                Interlocked.Increment(ref this.counter.warm);
4✔
753
                return 0;
4✔
754
            }
755
        }
3,024,562✔
756

757
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
758
        private void ConstrainCold(int coldCount, ItemRemovedReason removedReason)
759
        {
4,041,598✔
760
            if (coldCount > this.capacity.Cold && this.coldQueue.TryDequeue(out var item))
4,041,598✔
761
            {
3,979,555✔
762
                Interlocked.Decrement(ref this.counter.cold);
3,979,555✔
763
                this.Move(item, ItemDestination.Remove, removedReason);
3,979,555✔
764
            }
3,979,555✔
765
        }
4,041,598✔
766

767
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
768
        private int Move(I item, ItemDestination where, ItemRemovedReason removedReason)
769
        {
317,749,591✔
770
            item.WasAccessed = false;
317,749,591✔
771

772
            switch (where)
317,749,591✔
773
            {
774
                case ItemDestination.Warm:
775
                    this.warmQueue.Enqueue(item);
30,040,875✔
776
                    return Interlocked.Increment(ref this.counter.warm);
30,040,875✔
777
                case ItemDestination.Cold:
778
                    this.coldQueue.Enqueue(item);
143,329,671✔
779
                    return Interlocked.Increment(ref this.counter.cold);
143,329,671✔
780
                case ItemDestination.Remove:
781

782
                    var kvp = new KeyValuePair<K, I>(item.Key, item);
144,379,045✔
783

784
#if NET6_0_OR_GREATER
785
                    if (this.dictionary.TryRemove(kvp))
144,379,045✔
786
#else
787
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
788
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
×
789
#endif
790
                    {
142,106,466✔
791
                        OnRemove(item.Key, item, removedReason);
142,106,466✔
792
                    }
142,106,466✔
793
                    break;
144,379,045✔
794
            }
795

796
            return 0;
144,379,045✔
797
        }
317,749,591✔
798

799
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
800
        /// <returns>An enumerator for the cache.</returns>
801
        /// <remarks>
802
        /// The enumerator returned from the cache is safe to use concurrently with
803
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
804
        /// The contents exposed through the enumerator may contain modifications
805
        /// made after <see cref="GetEnumerator"/> was called.
806
        /// </remarks>
807
        IEnumerator IEnumerable.GetEnumerator()
808
        {
9✔
809
            return ((ConcurrentLruCore<K, V, I, P, T>)this).GetEnumerator();
9✔
810
        }
9✔
811

812
        private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
813
        {
201✔
814
            var p = new Proxy(lru);
201✔
815

816
            if (typeof(P) == typeof(AfterAccessPolicy<K, V>))
201✔
817
            {
33✔
818
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), new Optional<ITimePolicy>(p), Optional<IDiscreteTimePolicy>.None());
33✔
819
            }
820

821
            // IsAssignableFrom is a jit intrinsic https://github.com/dotnet/runtime/issues/4920
822
            if (typeof(IDiscreteItemPolicy<K, V>).IsAssignableFrom(typeof(P)))
168✔
823
            {
45✔
824
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), Optional<ITimePolicy>.None(), new Optional<IDiscreteTimePolicy>(new DiscreteExpiryProxy(lru)));
45✔
825
            }
826

827
            return new CachePolicy(new Optional<IBoundedPolicy>(p), lru.itemPolicy.CanDiscard() ? new Optional<ITimePolicy>(p) : Optional<ITimePolicy>.None());
123✔
828
        }
201✔
829

830
        private static Optional<ICacheMetrics> CreateMetrics(ConcurrentLruCore<K, V, I, P, T> lru)
831
        {
81✔
832
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
81✔
833
            {
15✔
834
                return Optional<ICacheMetrics>.None();
15✔
835
            }
836

837
            return new(new Proxy(lru));
66✔
838
        }
81✔
839

840
        private static Optional<ICacheEvents<K, V>> CreateEvents(ConcurrentLruCore<K, V, I, P, T> lru)
841
        {
792✔
842
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
792✔
843
            {
39✔
844
                return Optional<ICacheEvents<K, V>>.None();
39✔
845
            }
846

847
            return new(new Proxy(lru));
753✔
848
        }
792✔
849

850
        // To get JIT optimizations, policies must be structs.
851
        // If the structs are returned directly via properties, they will be copied. Since  
852
        // telemetryPolicy is a mutable struct, copy is bad. One workaround is to store the 
853
        // state within the struct in an object. Since the struct points to the same object
854
        // it becomes immutable. However, this object is then somewhere else on the 
855
        // heap, which slows down the policies with hit counter logic in benchmarks. Likely
856
        // this approach keeps the structs data members in the same CPU cache line as the LRU.
857
        // backcompat: remove conditional compile
858
#if NETCOREAPP3_0_OR_GREATER
859
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
860
#else
861
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Evict = {Evicted}")]
862
#endif
863
        private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
864
        {
865
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
866

867
            public Proxy(ConcurrentLruCore<K, V, I, P, T> lru)
1,020✔
868
            {
1,020✔
869
                this.lru = lru;
1,020✔
870
            }
1,020✔
871

872
            public double HitRatio => lru.telemetryPolicy.HitRatio;
6✔
873

874
            public long Total => lru.telemetryPolicy.Total;
3✔
875

876
            public long Hits => lru.telemetryPolicy.Hits;
18✔
877

878
            public long Misses => lru.telemetryPolicy.Misses;
18✔
879

880
            public long Evicted => lru.telemetryPolicy.Evicted;
6✔
881

882
// backcompat: remove conditional compile
883
#if NETCOREAPP3_0_OR_GREATER
884
            public long Updated => lru.telemetryPolicy.Updated;
6✔
885
#endif
886
            public int Capacity => lru.Capacity;
81✔
887

888
            public TimeSpan TimeToLive => lru.itemPolicy.TimeToLive;
12✔
889

890
            public event EventHandler<ItemRemovedEventArgs<K, V>> ItemRemoved
891
            {
892
                add { this.lru.telemetryPolicy.ItemRemoved += value; }
135✔
893
                remove { this.lru.telemetryPolicy.ItemRemoved -= value; }
18✔
894
            }
895

896
// backcompat: remove conditional compile
897
#if NETCOREAPP3_0_OR_GREATER
898
            public event EventHandler<ItemUpdatedEventArgs<K, V>> ItemUpdated
899
            {
900
                add { this.lru.telemetryPolicy.ItemUpdated += value; }
81✔
901
                remove { this.lru.telemetryPolicy.ItemUpdated -= value; }
9✔
902
            }
903
#endif
904
            public void Trim(int itemCount)
905
            {
27✔
906
                lru.Trim(itemCount);
27✔
907
            }
27✔
908

909
            public void TrimExpired()
910
            {
18✔
911
                lru.TrimExpired();
18✔
912
            }
18✔
913
        }
914

915
        private class DiscreteExpiryProxy : IDiscreteTimePolicy
916
        {
917
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
918

919
            public DiscreteExpiryProxy(ConcurrentLruCore<K, V, I, P, T> lru)
45✔
920
            {
45✔
921
                this.lru = lru;
45✔
922
            }
45✔
923

924
            public void TrimExpired()
925
            {
6✔
926
                lru.TrimExpired();
6✔
927
            }
6✔
928

929
            public bool TryGetTimeToExpire<TKey>(TKey key, out TimeSpan timeToLive)
930
            {
9✔
931
                if (key is K k && lru.dictionary.TryGetValue(k, out var item))
9✔
932
                {
3✔
933
                    LongTickCountLruItem<K, V>? tickItem = item as LongTickCountLruItem<K, V>;
3✔
934
                    timeToLive = (new Duration(tickItem!.TickCount) - Duration.SinceEpoch()).ToTimeSpan();
3✔
935
                    return true;
3✔
936
                }
937

938
                timeToLive = default;
6✔
939
                return false;
6✔
940
            }
9✔
941
        }
942
    }
943
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc