• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

bitfaster / BitFaster.Caching / 8210641684

09 Mar 2024 12:23AM UTC coverage: 99.086% (-0.08%) from 99.164%
8210641684

push

github

web-flow
Bump CsvHelper from 31.0.0 to 31.0.2 (#560)

Bumps [CsvHelper](https://github.com/JoshClose/CsvHelper) from 31.0.0 to 31.0.2.
- [Commits](https://github.com/JoshClose/CsvHelper/commits)

---
updated-dependencies:
- dependency-name: CsvHelper
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>

955 of 977 branches covered (97.75%)

Branch coverage included in aggregate %.

4141 of 4166 relevant lines covered (99.4%)

50143853.0 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.72
/BitFaster.Caching/Lru/ConcurrentLruCore.cs
1
using System;
2
using System.Collections;
3
using System.Collections.Concurrent;
4
using System.Collections.Generic;
5
using System.Diagnostics;
6
using System.Diagnostics.CodeAnalysis;
7
using System.Linq;
8
using System.Runtime.CompilerServices;
9
using System.Threading;
10
using System.Threading.Tasks;
11

12
namespace BitFaster.Caching.Lru
13
{
14
    /// <summary>
15
    /// A pseudo LRU based on the TU-Q eviction policy. The LRU list is composed of 3 segments: hot, warm and cold. 
16
    /// Cost of maintaining segments is amortized across requests. Items are only cycled when capacity is exceeded. 
17
    /// Pure read does not cycle items if all segments are within capacity constraints. There are no global locks. 
18
    /// On cache miss, a new item is added. Tail items in each segment are dequeued, examined, and are either enqueued 
19
    /// or discarded.
20
    /// The TU-Q scheme of hot, warm and cold is similar to that used in MemCached (https://memcached.org/blog/modern-lru/)
21
    /// and OpenBSD (https://flak.tedunangst.com/post/2Q-buffer-cache-algorithm), but does not use a background thread
22
    /// to maintain the internal queues.
23
    /// </summary>
24
    /// <remarks>
25
    /// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
26
    /// <list type="number">
27
    ///   <item><description>New items are added to hot, WasAccessed = false.</description></item>
28
    ///   <item><description>When items are accessed, update WasAccessed = true.</description></item>
29
    ///   <item><description>When items are moved WasAccessed is set to false.</description></item>
30
    ///   <item><description>When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.</description></item>
31
    ///   <item><description>When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.</description></item>
32
    ///   <item><description>When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.</description></item>
33
    ///</list>
34
    /// </remarks>
35
    public class ConcurrentLruCore<K, V, I, P, T> : ICache<K, V>, IAsyncCache<K, V>, IEnumerable<KeyValuePair<K, V>>
36
        where K : notnull
37
        where I : LruItem<K, V>
38
        where P : struct, IItemPolicy<K, V, I>
39
        where T : struct, ITelemetryPolicy<K, V>
40
    {
41
        private readonly ConcurrentDictionary<K, I> dictionary;
42

43
        private readonly ConcurrentQueue<I> hotQueue;
44
        private readonly ConcurrentQueue<I> warmQueue;
45
        private readonly ConcurrentQueue<I> coldQueue;
46

47
        // maintain count outside ConcurrentQueue, since ConcurrentQueue.Count holds a global lock
48
        private PaddedQueueCount counter;
49

50
        private readonly ICapacityPartition capacity;
51

52
        private readonly P itemPolicy;
53
        private bool isWarm = false;
1,904✔
54

55
        /// <summary>
56
        /// The telemetry policy.
57
        /// </summary>
58
        /// <remarks>
59
        /// Since T is a struct, making it readonly will force the runtime to make defensive copies
60
        /// if mutate methods are called. Therefore, field must be mutable to maintain count.
61
        /// </remarks>
62
        protected T telemetryPolicy;
63

64
        /// <summary>
65
        /// Initializes a new instance of the ConcurrentLruCore class with the specified concurrencyLevel, capacity, equality comparer, item policy and telemetry policy.
66
        /// </summary>
67
        /// <param name="concurrencyLevel">The concurrency level.</param>
68
        /// <param name="capacity">The capacity.</param>
69
        /// <param name="comparer">The equality comparer.</param>
70
        /// <param name="itemPolicy">The item policy.</param>
71
        /// <param name="telemetryPolicy">The telemetry policy.</param>
72
        /// <exception cref="ArgumentNullException"></exception>
73
        public ConcurrentLruCore(
1,904✔
74
            int concurrencyLevel,
1,904✔
75
            ICapacityPartition capacity,
1,904✔
76
            IEqualityComparer<K> comparer,
1,904✔
77
            P itemPolicy,
1,904✔
78
            T telemetryPolicy)
1,904✔
79
        {
1,904✔
80
            if (capacity == null)
1,904✔
81
                Throw.ArgNull(ExceptionArgument.capacity);
4✔
82

83
            if (comparer == null)
1,900✔
84
                Throw.ArgNull(ExceptionArgument.comparer);
4✔
85

86
            capacity.Validate();
1,896✔
87
            this.capacity = capacity;
1,892✔
88

89
            this.hotQueue = new ConcurrentQueue<I>();
1,892✔
90
            this.warmQueue = new ConcurrentQueue<I>();
1,892✔
91
            this.coldQueue = new ConcurrentQueue<I>();
1,892✔
92

93
            int dictionaryCapacity = ConcurrentDictionarySize.Estimate(this.Capacity);
1,892✔
94

95
            this.dictionary = new ConcurrentDictionary<K, I>(concurrencyLevel, dictionaryCapacity, comparer);
1,892✔
96
            this.itemPolicy = itemPolicy;
1,884✔
97
            this.telemetryPolicy = telemetryPolicy;
1,884✔
98
            this.telemetryPolicy.SetEventSource(this);
1,884✔
99
        }
1,884✔
100

101
        // No lock count: https://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/
102
        ///<inheritdoc/>
103
        public int Count => this.dictionary.Where(i => !itemPolicy.ShouldDiscard(i.Value)).Count();
13,966,557✔
104

105
        ///<inheritdoc/>
106
        public int Capacity => this.capacity.Hot + this.capacity.Warm + this.capacity.Cold;
1,600,002,796✔
107

108
        ///<inheritdoc/>
109
        public Optional<ICacheMetrics> Metrics => CreateMetrics(this);
108✔
110

111
        ///<inheritdoc/>
112
        public Optional<ICacheEvents<K, V>> Events => CreateEvents(this);
1,056✔
113

114
        ///<inheritdoc/>
115
        public CachePolicy Policy => CreatePolicy(this);
268✔
116

117
        /// <summary>
118
        /// Gets the number of hot items.
119
        /// </summary>
120
        public int HotCount => Volatile.Read(ref this.counter.hot);
1,268,800✔
121

122
        /// <summary>
123
        /// Gets the number of warm items.
124
        /// </summary>
125
        public int WarmCount => Volatile.Read(ref this.counter.warm);
1,268,792✔
126

127
        /// <summary>
128
        /// Gets the number of cold items.
129
        /// </summary>
130
        public int ColdCount => Volatile.Read(ref this.counter.cold);
1,268,796✔
131

132
        /// <summary>
133
        /// Gets a collection containing the keys in the cache.
134
        /// </summary>
135
        public ICollection<K> Keys => this.dictionary.Keys;
760✔
136

137
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
138
        /// <returns>An enumerator for the cache.</returns>
139
        /// <remarks>
140
        /// The enumerator returned from the cache is safe to use concurrently with
141
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
142
        /// The contents exposed through the enumerator may contain modifications
143
        /// made after <see cref="GetEnumerator"/> was called.
144
        /// </remarks>
145
        public IEnumerator<KeyValuePair<K, V>> GetEnumerator()
146
        {
248✔
147
            foreach (var kvp in this.dictionary)
1,280✔
148
            {
268✔
149
                if (!itemPolicy.ShouldDiscard(kvp.Value))
268✔
150
                { 
256✔
151
                    yield return new KeyValuePair<K, V>(kvp.Key, kvp.Value.Value); 
256✔
152
                }
256✔
153
            }
268✔
154
        }
248✔
155

156
        ///<inheritdoc/>
157
        public bool TryGet(K key, [MaybeNullWhen(false)] out V value)
158
        {
1,778,857,782✔
159
            if (dictionary.TryGetValue(key, out var item))
1,778,857,782✔
160
            {
1,618,248,313✔
161
                return GetOrDiscard(item, out value);
1,618,248,313✔
162
            }
163

164
            value = default;
160,609,469✔
165
            this.telemetryPolicy.IncrementMiss();
160,609,469✔
166
            return false;
160,609,469✔
167
        }
1,778,857,782✔
168

169
        // AggressiveInlining forces the JIT to inline policy.ShouldDiscard(). For LRU policy 
170
        // the first branch is completely eliminated due to JIT time constant propogation.
171
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
172
        private bool GetOrDiscard(I item, [MaybeNullWhen(false)] out V value)
173
        {
1,618,248,313✔
174
            if (this.itemPolicy.ShouldDiscard(item))
1,618,248,313✔
175
            {
3,022,465✔
176
                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Evicted);
3,022,465✔
177
                this.telemetryPolicy.IncrementMiss();
3,022,465✔
178
                value = default;
3,022,465✔
179
                return false;
3,022,465✔
180
            }
181

182
            value = item.Value;
1,615,225,848✔
183
            this.itemPolicy.Touch(item);
1,615,225,848✔
184
            this.telemetryPolicy.IncrementHit();
1,615,225,848✔
185
            return true;
1,615,225,848✔
186
        }
1,618,248,313✔
187

188
        private bool TryAdd(K key, V value)
189
        {
163,631,834✔
190
            var newItem = this.itemPolicy.CreateItem(key, value);
163,631,834✔
191

192
            if (this.dictionary.TryAdd(key, newItem))
163,631,834✔
193
            {
154,093,542✔
194
                this.hotQueue.Enqueue(newItem);
154,093,542✔
195
                Cycle(Interlocked.Increment(ref counter.hot));
154,093,542✔
196
                return true;
154,093,542✔
197
            }
198

199
            Disposer<V>.Dispose(newItem.Value);
9,538,292✔
200
            return false;
9,538,292✔
201
        }
163,631,834✔
202

203
        ///<inheritdoc/>
204
        public V GetOrAdd(K key, Func<K, V> valueFactory)
205
        {
1,721,318,056✔
206
            while (true)
1,728,098,766✔
207
            {
1,728,098,766✔
208
                if (this.TryGet(key, out var value))
1,728,098,766✔
209
                {
1,608,492,377✔
210
                    return value;
1,608,492,377✔
211
                }
212

213
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
214
                value = valueFactory(key);
119,606,389✔
215

216
                if (TryAdd(key, value))
119,606,389✔
217
                {
112,825,679✔
218
                    return value;
112,825,679✔
219
                }
220
            }
6,780,710✔
221
        }
1,721,318,056✔
222

223
        /// <summary>
224
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
225
        /// existing value if the key already exists.
226
        /// </summary>
227
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
228
        /// <param name="key">The key of the element to add.</param>
229
        /// <param name="valueFactory">The factory function used to generate a value for the key.</param>
230
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
231
        /// <returns>The value for the key. This will be either the existing value for the key if the key is already 
232
        /// in the cache, or the new value if the key was not in the cache.</returns>
233
        public V GetOrAdd<TArg>(K key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
234
        {
16,000,008✔
235
            while (true)
17,054,410✔
236
            {
17,054,410✔
237
                if (this.TryGet(key, out var value))
17,054,410✔
238
                {
2,588,762✔
239
                    return value;
2,588,762✔
240
                }
241

242
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
243
                value = valueFactory(key, factoryArgument);
14,465,648✔
244

245
                if (TryAdd(key, value))
14,465,648✔
246
                {
13,411,246✔
247
                    return value;
13,411,246✔
248
                }
249
            }
1,054,402✔
250
        }
16,000,008✔
251

252
        ///<inheritdoc/>
253
        public async ValueTask<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
254
        {
16,000,304✔
255
            while (true)
16,791,464✔
256
            {
16,791,464✔
257
                if (this.TryGet(key, out var value))
16,791,464✔
258
                {
1,915,801✔
259
                    return value;
1,915,801✔
260
                }
261

262
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
263
                // This is identical logic in ConcurrentDictionary.GetOrAdd method.
264
                value = await valueFactory(key).ConfigureAwait(false);
14,875,663✔
265

266
                if (TryAdd(key, value))
14,875,651✔
267
                {
14,084,491✔
268
                    return value;
14,084,491✔
269
                }
270
            }
791,160✔
271
        }
16,000,292✔
272

273
        /// <summary>
274
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
275
        /// existing value if the key already exists.
276
        /// </summary>
277
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
278
        /// <param name="key">The key of the element to add.</param>
279
        /// <param name="valueFactory">The factory function used to asynchronously generate a value for the key.</param>
280
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
281
        /// <returns>A task that represents the asynchronous GetOrAdd operation.</returns>
282
        public async ValueTask<V> GetOrAddAsync<TArg>(K key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
283
        {
16,000,276✔
284
            while (true)
16,912,296✔
285
            {
16,912,296✔
286
                if (this.TryGet(key, out var value))
16,912,296✔
287
                {
2,228,150✔
288
                    return value;
2,228,150✔
289
                }
290

291
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
292
                value = await valueFactory(key, factoryArgument).ConfigureAwait(false);
14,684,146✔
293

294
                if (TryAdd(key, value))
14,684,146✔
295
                {
13,772,126✔
296
                    return value;
13,772,126✔
297
                }
298
            }
912,020✔
299
        }
16,000,276✔
300

301
        /// <summary>
302
        /// Attempts to remove the specified key value pair.
303
        /// </summary>
304
        /// <param name="item">The item to remove.</param>
305
        /// <returns>true if the item was removed successfully; otherwise, false.</returns>
306
        public bool TryRemove(KeyValuePair<K, V> item)
307
        {
16,000,024✔
308
            if (this.dictionary.TryGetValue(item.Key, out var existing))
16,000,024✔
309
            {
80,313✔
310
                if (EqualityComparer<V>.Default.Equals(existing.Value, item.Value))
80,313✔
311
                {
80,297✔
312
                    var kvp = new KeyValuePair<K, I>(item.Key, existing);
80,297✔
313
#if NET6_0_OR_GREATER
314
                    if (this.dictionary.TryRemove(kvp))
62,588✔
315
#else
316
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
317
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
17,709✔
318
#endif
319
                    {
68,590✔
320
                        OnRemove(item.Key, kvp.Value, ItemRemovedReason.Removed);
68,590✔
321
                        return true;
68,590✔
322
                    }
323
                }
11,707✔
324

325
                // it existed, but we couldn't remove - this means value was replaced afer the TryGetValue (a race)
326
            }
11,723✔
327

328
            return false;
15,931,434✔
329
        }
16,000,024✔
330

331
        /// <summary>
332
        /// Attempts to remove and return the value that has the specified key.
333
        /// </summary>
334
        /// <param name="key">The key of the element to remove.</param>
335
        /// <param name="value">When this method returns, contains the object removed, or the default value of the value type if key does not exist.</param>
336
        /// <returns>true if the object was removed successfully; otherwise, false.</returns>
337
        public bool TryRemove(K key, [MaybeNullWhen(false)] out V value)
338
        {
16,000,192✔
339
            if (this.dictionary.TryRemove(key, out var item))
16,000,192✔
340
            {
61,120✔
341
                OnRemove(key, item, ItemRemovedReason.Removed);
61,120✔
342
                value = item.Value;
61,120✔
343
                return true;
61,120✔
344
            }
345

346
            value = default;
15,939,072✔
347
            return false;
15,939,072✔
348
        }
16,000,192✔
349

350
        ///<inheritdoc/>
351
        public bool TryRemove(K key)
352
        {
16,000,132✔
353
            return TryRemove(key, out _);
16,000,132✔
354
        }
16,000,132✔
355

356
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
357
        private void OnRemove(K key, I item, ItemRemovedReason reason)
358
        {
167,330,390✔
359
            // Mark as not accessed, it will later be cycled out of the queues because it can never be fetched 
360
            // from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled 
361
            // from the queue.
362
            item.WasAccessed = false;
167,330,390✔
363
            item.WasRemoved = true;
167,330,390✔
364

365
            this.telemetryPolicy.OnItemRemoved(key, item.Value, reason);
167,330,390✔
366

367
            // serialize dispose (common case dispose not thread safe)
368
            lock (item)
167,330,390✔
369
            {
167,330,390✔
370
                Disposer<V>.Dispose(item.Value);
167,330,390✔
371
            }
167,330,390✔
372
        }
167,330,390✔
373

374
        ///<inheritdoc/>
375
        ///<remarks>Note: Calling this method does not affect LRU order.</remarks>
376
        public bool TryUpdate(K key, V value)
377
        {
32,184,516✔
378
            if (this.dictionary.TryGetValue(key, out var existing))
32,184,516✔
379
            {
3,753,006✔
380
                lock (existing)
3,753,006✔
381
                {
3,753,006✔
382
                    if (!existing.WasRemoved)
3,753,006✔
383
                    {
3,752,964✔
384
                        V oldValue = existing.Value;
3,752,964✔
385
                        existing.Value = value;
3,752,964✔
386
                        this.itemPolicy.Update(existing);
3,752,964✔
387
// backcompat: remove conditional compile
388
#if NETCOREAPP3_0_OR_GREATER
389
                        this.telemetryPolicy.OnItemUpdated(existing.Key, oldValue, existing.Value);
3,752,964✔
390
#endif
391
                        Disposer<V>.Dispose(oldValue);
3,752,964✔
392

393
                        return true;
3,752,964✔
394
                    }
395
                }
42✔
396
            }
42✔
397

398
            return false;
28,431,552✔
399
        }
32,184,516✔
400

401
        ///<inheritdoc/>
402
        ///<remarks>Note: Updates to existing items do not affect LRU order. Added items are at the top of the LRU.</remarks>
403
        public void AddOrUpdate(K key, V value)
404
        {
16,002,690✔
405
            while (true)
16,184,416✔
406
            {
16,184,416✔
407
                // first, try to update
408
                if (this.TryUpdate(key, value))
16,184,416✔
409
                {
2,739,097✔
410
                    return;
2,739,097✔
411
                }
412

413
                // then try add
414
                var newItem = this.itemPolicy.CreateItem(key, value);
13,445,319✔
415

416
                if (this.dictionary.TryAdd(key, newItem))
13,445,319✔
417
                {
13,263,593✔
418
                    this.hotQueue.Enqueue(newItem);
13,263,593✔
419
                    Cycle(Interlocked.Increment(ref counter.hot));
13,263,593✔
420
                    return;
13,263,593✔
421
                }
422

423
                // if both update and add failed there was a race, try again
424
            }
181,726✔
425
        }
16,002,690✔
426

427
        ///<inheritdoc/>
428
        public void Clear()
429
        {
1,250,128✔
430
            // don't overlap Clear/Trim/TrimExpired
431
            lock (this.dictionary)
1,250,128✔
432
            {
1,250,128✔
433
                // evaluate queue count, remove everything including items removed from the dictionary but
434
                // not the queues. This also avoids the expensive o(n) no lock count, or locking the dictionary.
435
                int queueCount = this.HotCount + this.WarmCount + this.ColdCount;
1,250,128✔
436
                this.TrimLiveItems(itemsRemoved: 0, queueCount, ItemRemovedReason.Cleared);
1,250,128✔
437
            }
1,250,128✔
438
        }
1,250,128✔
439

440
        /// <summary>
441
        /// Trim the specified number of items from the cache. Removes all discardable items per IItemPolicy.ShouldDiscard(), then 
442
        /// itemCount-discarded items in LRU order, if any.
443
        /// </summary>
444
        /// <param name="itemCount">The number of items to remove.</param>
445
        /// <returns>The number of items removed from the cache.</returns>
446
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is less than 0./</exception>
447
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is greater than capacity./</exception>
448
        /// <remarks>
449
        /// Note: Trim affects LRU order. Calling Trim resets the internal accessed status of items.
450
        /// </remarks>
451
        public void Trim(int itemCount)
452
        {
240✔
453
            int capacity = this.Capacity;
240✔
454

455
            if (itemCount < 1 || itemCount > capacity)
240✔
456
                Throw.ArgOutOfRange(nameof(itemCount), "itemCount must be greater than or equal to one, and less than the capacity of the cache.");
8✔
457

458
            // clamp itemCount to number of items actually in the cache
459
            itemCount = Math.Min(itemCount, this.HotCount + this.WarmCount + this.ColdCount);
232✔
460

461
            // don't overlap Clear/Trim/TrimExpired
462
            lock (this.dictionary)
232✔
463
            {
232✔
464
                // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items.
465
                int itemsRemoved = this.itemPolicy.CanDiscard() ? TrimAllDiscardedItems() : 0;
232✔
466

467
                TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed);
232✔
468
            }
232✔
469
        }
232✔
470

471
        private void TrimExpired()
472
        {
32✔
473
            if (this.itemPolicy.CanDiscard())
32✔
474
            {
32✔
475
                this.TrimAllDiscardedItems();
32✔
476
            }
32✔
477
        }
32✔
478

479
        /// <summary>
480
        /// Trim discarded items from all queues.
481
        /// </summary>
482
        /// <returns>The number of items removed.</returns>
483
        // backcompat: make internal
484
        protected int TrimAllDiscardedItems()
485
        {
44✔
486
            // don't overlap Clear/Trim/TrimExpired
487
            lock (this.dictionary)
44✔
488
            {
44✔
489
                int RemoveDiscardableItems(ConcurrentQueue<I> q, ref int queueCounter)
490
                {
132✔
491
                    int itemsRemoved = 0;
132✔
492
                    int localCount = queueCounter;
132✔
493

494
                    for (int i = 0; i < localCount; i++)
816✔
495
                    {
276✔
496
                        if (q.TryDequeue(out var item))
276✔
497
                        {
276✔
498
                            if (this.itemPolicy.ShouldDiscard(item))
276✔
499
                            {
240✔
500
                                Interlocked.Decrement(ref queueCounter);
240✔
501
                                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Trimmed);
240✔
502
                                itemsRemoved++;
240✔
503
                            }
240✔
504
                            else
505
                            {
36✔
506
                                q.Enqueue(item);
36✔
507
                            }
36✔
508
                        }
276✔
509
                    }
276✔
510

511
                    return itemsRemoved;
132✔
512
                }
132✔
513

514
                int coldRem = RemoveDiscardableItems(coldQueue, ref this.counter.cold);
44✔
515
                int warmRem = RemoveDiscardableItems(warmQueue, ref this.counter.warm);
44✔
516
                int hotRem = RemoveDiscardableItems(hotQueue, ref this.counter.hot);
44✔
517

518
                if (warmRem > 0)
44✔
519
                {
32✔
520
                    Volatile.Write(ref this.isWarm, false);
32✔
521
                }
32✔
522

523
                return coldRem + warmRem + hotRem;
44✔
524
            }
525
        }
44✔
526

527
        private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason)
528
        {
1,250,360✔
529
            // When items are touched, they are moved to warm by cycling. Therefore, to guarantee 
530
            // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times.
531
            // If clear is called during trimming, it would be possible to get stuck in an infinite
532
            // loop here. The warm + hot limit also guards against this case.
533
            int trimWarmAttempts = 0;
1,250,360✔
534
            int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot;
1,250,360✔
535

536
            while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts)
10,739,935✔
537
            {
9,489,575✔
538
                if (Volatile.Read(ref this.counter.cold) > 0)
9,489,575✔
539
                {
8,866,020✔
540
                    if (TryRemoveCold(reason) == (ItemDestination.Remove, 0))
8,866,020✔
541
                    {
8,865,456✔
542
                        itemsRemoved++;
8,865,456✔
543
                        trimWarmAttempts = 0;
8,865,456✔
544
                    }
8,865,456✔
545

546
                    TrimWarmOrHot(reason);
8,866,020✔
547
                }
8,866,020✔
548
                else
549
                {
623,555✔
550
                    TrimWarmOrHot(reason);
623,555✔
551
                    trimWarmAttempts++;
623,555✔
552
                }
623,555✔
553
            }
9,489,575✔
554

555
            if (Volatile.Read(ref this.counter.warm) < this.capacity.Warm)
1,250,360✔
556
            {
1,250,031✔
557
                Volatile.Write(ref this.isWarm, false);
1,250,031✔
558
            }
1,250,031✔
559
        }
1,250,360✔
560

561
        private void TrimWarmOrHot(ItemRemovedReason reason)
562
        {
9,489,575✔
563
            if (Volatile.Read(ref this.counter.warm) > 0)
9,489,575✔
564
            {
3,459,382✔
565
                CycleWarmUnchecked(reason);
3,459,382✔
566
            }
3,459,382✔
567
            else if (Volatile.Read(ref this.counter.hot) > 0)
6,030,193✔
568
            {
5,181,338✔
569
                CycleHotUnchecked(reason);
5,181,338✔
570
            }
5,181,338✔
571
        }
9,489,575✔
572

573
        private void Cycle(int hotCount)
574
        {
167,357,135✔
575
            if (isWarm)
167,357,135✔
576
            {
161,585,726✔
577
                (var dest, var count) = CycleHot(hotCount);
161,585,726✔
578

579
                int cycles = 0;
161,585,726✔
580
                while (cycles++ < 3 && dest != ItemDestination.Remove)
346,770,423✔
581
                {
185,184,697✔
582
                    if (dest == ItemDestination.Warm)
185,184,697✔
583
                    {
27,947,636✔
584
                        (dest, count) = CycleWarm(count);
27,947,636✔
585
                    }
27,947,636✔
586
                    else if (dest == ItemDestination.Cold)
157,237,061✔
587
                    {
157,237,061✔
588
                        (dest, count) = CycleCold(count);
157,237,061✔
589
                    }
157,237,061✔
590
                }
185,184,697✔
591

592
                // If nothing was removed yet, constrain the size of warm and cold by discarding the coldest item.
593
                if (dest != ItemDestination.Remove)
161,585,726✔
594
                {
2,568,429✔
595
                    if (dest == ItemDestination.Warm && count > this.capacity.Warm)
2,568,429✔
596
                    {
890,099✔
597
                        count = LastWarmToCold();
890,099✔
598
                    }
890,099✔
599

600
                    ConstrainCold(count, ItemRemovedReason.Evicted);
2,568,429✔
601
                }
2,568,429✔
602
            }
161,585,726✔
603
            else
604
            {
5,771,409✔
605
                // fill up the warm queue with new items until warm is full.
606
                // else during warmup the cache will only use the hot + cold queues until any item is requested twice.
607
                CycleDuringWarmup(hotCount);
5,771,409✔
608
            }
5,771,409✔
609
        }
167,357,135✔
610

611
        [MethodImpl(MethodImplOptions.NoInlining)]
612
        private void CycleDuringWarmup(int hotCount)
613
        {
5,771,409✔
614
            // do nothing until hot is full
615
            if (hotCount > this.capacity.Hot)
5,771,409✔
616
            {
3,374,059✔
617
                Interlocked.Decrement(ref this.counter.hot);
3,374,059✔
618

619
                if (this.hotQueue.TryDequeue(out var item))
3,374,059!
620
                {
3,374,059✔
621
                    int count = this.Move(item, ItemDestination.Warm, ItemRemovedReason.Evicted);
3,374,059✔
622

623
                    // if warm is now full, overflow to cold and mark as warm
624
                    if (count > this.capacity.Warm)
3,374,059✔
625
                    {
983,993✔
626
                        Volatile.Write(ref this.isWarm, true);
983,993✔
627
                        count = LastWarmToCold();
983,993✔
628
                        ConstrainCold(count, ItemRemovedReason.Evicted);
983,993✔
629
                    }
983,993✔
630
                }
3,374,059✔
631
                else
632
                {
×
633
                    Interlocked.Increment(ref this.counter.hot);
×
634
                }
×
635
            }
3,374,059✔
636
        }
5,771,409✔
637

638
        private (ItemDestination, int) CycleHot(int hotCount)
639
        {
161,585,726✔
640
            if (hotCount > this.capacity.Hot)
161,585,726✔
641
            {
158,794,964✔
642
                return CycleHotUnchecked(ItemRemovedReason.Evicted);
158,794,964✔
643
            }
644

645
            return (ItemDestination.Remove, 0);
2,790,762✔
646
        }
161,585,726✔
647

648
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
649
        private (ItemDestination, int) CycleHotUnchecked(ItemRemovedReason removedReason)
650
        {
163,976,302✔
651
            Interlocked.Decrement(ref this.counter.hot);
163,976,302✔
652

653
            if (this.hotQueue.TryDequeue(out var item))
163,976,302✔
654
            {
163,976,298✔
655
                var where = this.itemPolicy.RouteHot(item);
163,976,298✔
656
                return (where, this.Move(item, where, removedReason));
163,976,298✔
657
            }
658
            else
659
            {
4✔
660
                Interlocked.Increment(ref this.counter.hot);
4✔
661
                return (ItemDestination.Remove, 0);
4✔
662
            }
663
        }
163,976,302✔
664

665
        private (ItemDestination, int) CycleWarm(int count)
666
        {
27,947,636✔
667
            if (count > this.capacity.Warm)
27,947,636✔
668
            {
26,885,336✔
669
                return CycleWarmUnchecked(ItemRemovedReason.Evicted);
26,885,336✔
670
            }
671

672
            return (ItemDestination.Remove, 0);
1,062,300✔
673
        }
27,947,636✔
674

675
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
676
        private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
677
        {
30,344,718✔
678
            int wc = Interlocked.Decrement(ref this.counter.warm);
30,344,718✔
679

680
            if (this.warmQueue.TryDequeue(out var item))
30,344,718✔
681
            {
30,344,716✔
682
                var where = this.itemPolicy.RouteWarm(item);
30,344,716✔
683

684
                // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
685
                // This only happens when hit rate is high, in which case we can consider all items relatively equal in
686
                // terms of which was least recently used.
687
                if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
30,344,716✔
688
                {
2,639,428✔
689
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
2,639,428✔
690
                }
691
                else
692
                {
27,705,288✔
693
                    return (ItemDestination.Cold, this.Move(item, ItemDestination.Cold, removedReason));
27,705,288✔
694
                }
695
            }
696
            else
697
            {
2✔
698
                Interlocked.Increment(ref this.counter.warm);
2✔
699
                return (ItemDestination.Remove, 0);
2✔
700
            }
701
        }
30,344,718✔
702

703
        private (ItemDestination, int) CycleCold(int count)
704
        {
157,237,061✔
705
            if (count > this.capacity.Cold)
157,237,061✔
706
            {
157,064,622✔
707
                return TryRemoveCold(ItemRemovedReason.Evicted);
157,064,622✔
708
            }
709

710
            return (ItemDestination.Remove, 0);
172,439✔
711
        }
157,237,061✔
712

713
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
714
        private (ItemDestination, int) TryRemoveCold(ItemRemovedReason removedReason)
715
        {
165,930,642✔
716
            Interlocked.Decrement(ref this.counter.cold);
165,930,642✔
717

718
            if (this.coldQueue.TryDequeue(out var item))
165,930,642✔
719
            {
165,930,630✔
720
                var where = this.itemPolicy.RouteCold(item);
165,930,630✔
721

722
                if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
165,930,630✔
723
                {
3,020,679✔
724
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
3,020,679✔
725
                }
726
                else
727
                {
162,909,951✔
728
                    this.Move(item, ItemDestination.Remove, removedReason);
162,909,951✔
729
                    return (ItemDestination.Remove, 0);
162,909,951✔
730
                }
731
            }
732
            else
733
            {
12✔
734
                return (ItemDestination.Cold, Interlocked.Increment(ref this.counter.cold));
12✔
735
            }
736
        }
165,930,642✔
737

738
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
739
        private int LastWarmToCold()
740
        {
1,874,092✔
741
            Interlocked.Decrement(ref this.counter.warm);
1,874,092✔
742

743
            if (this.warmQueue.TryDequeue(out var item))
1,874,092!
744
            {
1,874,092✔
745
                return this.Move(item, ItemDestination.Cold, ItemRemovedReason.Evicted);
1,874,092✔
746
            }
747
            else
748
            {
×
749
                Interlocked.Increment(ref this.counter.warm);
×
750
                return 0;
×
751
            }
752
        }
1,874,092✔
753

754
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
755
        private void ConstrainCold(int coldCount, ItemRemovedReason removedReason)
756
        {
3,552,422✔
757
            if (coldCount > this.capacity.Cold && this.coldQueue.TryDequeue(out var item))
3,552,422✔
758
            {
3,472,211✔
759
                Interlocked.Decrement(ref this.counter.cold);
3,472,211✔
760
                this.Move(item, ItemDestination.Remove, removedReason);
3,472,211✔
761
            }
3,472,211✔
762
        }
3,552,422✔
763

764
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
765
        private int Move(I item, ItemDestination where, ItemRemovedReason removedReason)
766
        {
371,994,711✔
767
            item.WasAccessed = false;
371,994,711✔
768

769
            switch (where)
371,994,711✔
770
            {
771
                case ItemDestination.Warm:
772
                    this.warmQueue.Enqueue(item);
32,233,766✔
773
                    return Interlocked.Increment(ref this.counter.warm);
32,233,766✔
774
                case ItemDestination.Cold:
775
                    this.coldQueue.Enqueue(item);
169,408,783✔
776
                    return Interlocked.Increment(ref this.counter.cold);
169,408,783✔
777
                case ItemDestination.Remove:
778

779
                    var kvp = new KeyValuePair<K, I>(item.Key, item);
170,352,162✔
780

781
#if NET6_0_OR_GREATER
782
                    if (this.dictionary.TryRemove(kvp))
129,649,908✔
783
#else
784
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
785
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
40,702,254✔
786
#endif
787
                    {
167,200,680✔
788
                        OnRemove(item.Key, item, removedReason);
167,200,680✔
789
                    }
167,200,680✔
790
                    break;
170,352,162✔
791
            }
792

793
            return 0;
170,352,162✔
794
        }
371,994,711✔
795

796
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
797
        /// <returns>An enumerator for the cache.</returns>
798
        /// <remarks>
799
        /// The enumerator returned from the cache is safe to use concurrently with
800
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
801
        /// The contents exposed through the enumerator may contain modifications
802
        /// made after <see cref="GetEnumerator"/> was called.
803
        /// </remarks>
804
        IEnumerator IEnumerable.GetEnumerator()
805
        {
12✔
806
            return ((ConcurrentLruCore<K, V, I, P, T>)this).GetEnumerator();
12✔
807
        }
12✔
808

809
        private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
810
        {
268✔
811
            var p = new Proxy(lru);
268✔
812

813
            if (typeof(P) == typeof(AfterAccessPolicy<K, V>))
268✔
814
            {
44✔
815
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), new Optional<ITimePolicy>(p), Optional<IDiscreteTimePolicy>.None());
44✔
816
            }
817

818
            // IsAssignableFrom is a jit intrinsic https://github.com/dotnet/runtime/issues/4920
819
            if (typeof(IDiscreteItemPolicy<K, V>).IsAssignableFrom(typeof(P)))
224✔
820
            {
60✔
821
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), Optional<ITimePolicy>.None(), new Optional<IDiscreteTimePolicy>(new DiscreteExpiryProxy(lru)));
60✔
822
            }
823

824
            return new CachePolicy(new Optional<IBoundedPolicy>(p), lru.itemPolicy.CanDiscard() ? new Optional<ITimePolicy>(p) : Optional<ITimePolicy>.None());
164✔
825
        }
268✔
826

827
        private static Optional<ICacheMetrics> CreateMetrics(ConcurrentLruCore<K, V, I, P, T> lru)
828
        {
108✔
829
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
108✔
830
            {
20✔
831
                return Optional<ICacheMetrics>.None();
20✔
832
            }
833

834
            return new(new Proxy(lru));
88✔
835
        }
108✔
836

837
        private static Optional<ICacheEvents<K, V>> CreateEvents(ConcurrentLruCore<K, V, I, P, T> lru)
838
        {
1,056✔
839
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
1,056✔
840
            {
52✔
841
                return Optional<ICacheEvents<K, V>>.None();
52✔
842
            }
843

844
            return new(new Proxy(lru));
1,004✔
845
        }
1,056✔
846

847
        // To get JIT optimizations, policies must be structs.
848
        // If the structs are returned directly via properties, they will be copied. Since  
849
        // telemetryPolicy is a mutable struct, copy is bad. One workaround is to store the 
850
        // state within the struct in an object. Since the struct points to the same object
851
        // it becomes immutable. However, this object is then somewhere else on the 
852
        // heap, which slows down the policies with hit counter logic in benchmarks. Likely
853
        // this approach keeps the structs data members in the same CPU cache line as the LRU.
854
        // backcompat: remove conditional compile
855
#if NETCOREAPP3_0_OR_GREATER
856
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
857
#else
858
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Evict = {Evicted}")]
859
#endif
860
        private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
861
        {
862
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
863

864
            public Proxy(ConcurrentLruCore<K, V, I, P, T> lru)
1,360✔
865
            {
1,360✔
866
                this.lru = lru;
1,360✔
867
            }
1,360✔
868

869
            public double HitRatio => lru.telemetryPolicy.HitRatio;
8✔
870

871
            public long Total => lru.telemetryPolicy.Total;
4✔
872

873
            public long Hits => lru.telemetryPolicy.Hits;
24✔
874

875
            public long Misses => lru.telemetryPolicy.Misses;
24✔
876

877
            public long Evicted => lru.telemetryPolicy.Evicted;
8✔
878

879
// backcompat: remove conditional compile
880
#if NETCOREAPP3_0_OR_GREATER
881
            public long Updated => lru.telemetryPolicy.Updated;
8✔
882
#endif
883
            public int Capacity => lru.Capacity;
108✔
884

885
            public TimeSpan TimeToLive => lru.itemPolicy.TimeToLive;
16✔
886

887
            public event EventHandler<ItemRemovedEventArgs<K, V>> ItemRemoved
888
            {
889
                add { this.lru.telemetryPolicy.ItemRemoved += value; }
180✔
890
                remove { this.lru.telemetryPolicy.ItemRemoved -= value; }
24✔
891
            }
892

893
// backcompat: remove conditional compile
894
#if NETCOREAPP3_0_OR_GREATER
895
            public event EventHandler<ItemUpdatedEventArgs<K, V>> ItemUpdated
896
            {
897
                add { this.lru.telemetryPolicy.ItemUpdated += value; }
108✔
898
                remove { this.lru.telemetryPolicy.ItemUpdated -= value; }
12✔
899
            }
900
#endif
901
            public void Trim(int itemCount)
902
            {
36✔
903
                lru.Trim(itemCount);
36✔
904
            }
36✔
905

906
            public void TrimExpired()
907
            {
24✔
908
                lru.TrimExpired();
24✔
909
            }
24✔
910
        }
911

912
        private class DiscreteExpiryProxy : IDiscreteTimePolicy
913
        {
914
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
915

916
            public DiscreteExpiryProxy(ConcurrentLruCore<K, V, I, P, T> lru)
60✔
917
            {
60✔
918
                this.lru = lru;
60✔
919
            }
60✔
920

921
            public void TrimExpired()
922
            {
8✔
923
                lru.TrimExpired();
8✔
924
            }
8✔
925

926
            public bool TryGetTimeToExpire<TKey>(TKey key, out TimeSpan timeToLive)
927
            {
12✔
928
                if (key is K k && lru.dictionary.TryGetValue(k, out var item))
12✔
929
                {
4✔
930
                    LongTickCountLruItem<K, V>? tickItem = item as LongTickCountLruItem<K, V>;
4✔
931
                    timeToLive = (new Duration(tickItem!.TickCount) - Duration.SinceEpoch()).ToTimeSpan();
4✔
932
                    return true;
4✔
933
                }
934

935
                timeToLive = default;
8✔
936
                return false;
8✔
937
            }
12✔
938
        }
939
    }
940
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc