• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

bitfaster / BitFaster.Caching / 24416298600

14 Apr 2026 06:33PM UTC coverage: 99.034% (+0.03%) from 99.004%
24416298600

push

github

web-flow
Improved error message for soak failure (#764)

1270 of 1304 branches covered (97.39%)

Branch coverage included in aggregate %.

5391 of 5422 relevant lines covered (99.43%)

58372741.05 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

99.4
/BitFaster.Caching/Lru/ConcurrentLruCore.cs
1
using System;
2
using System.Collections;
3
using System.Collections.Concurrent;
4
using System.Collections.Generic;
5
using System.Diagnostics;
6
using System.Diagnostics.CodeAnalysis;
7
using System.Linq;
8
using System.Runtime.CompilerServices;
9
using System.Threading;
10
using System.Threading.Tasks;
11

12
namespace BitFaster.Caching.Lru
13
{
14
    /// <summary>
15
    /// A pseudo LRU based on the TU-Q eviction policy. The LRU list is composed of 3 segments: hot, warm and cold. 
16
    /// Cost of maintaining segments is amortized across requests. Items are only cycled when capacity is exceeded. 
17
    /// Pure read does not cycle items if all segments are within capacity constraints. There are no global locks. 
18
    /// On cache miss, a new item is added. Tail items in each segment are dequeued, examined, and are either enqueued 
19
    /// or discarded.
20
    /// The TU-Q scheme of hot, warm and cold is similar to that used in MemCached (https://memcached.org/blog/modern-lru/)
21
    /// and OpenBSD (https://flak.tedunangst.com/post/2Q-buffer-cache-algorithm), but does not use a background thread
22
    /// to maintain the internal queues.
23
    /// </summary>
24
    /// <remarks>
25
    /// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
26
    /// <list type="number">
27
    ///   <item><description>New items are added to hot, WasAccessed = false.</description></item>
28
    ///   <item><description>When items are accessed, update WasAccessed = true.</description></item>
29
    ///   <item><description>When items are moved WasAccessed is set to false.</description></item>
30
    ///   <item><description>When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.</description></item>
31
    ///   <item><description>When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.</description></item>
32
    ///   <item><description>When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.</description></item>
33
    ///</list>
34
    /// </remarks>
35
    public class ConcurrentLruCore<K, V, I, P, T> : ICacheExt<K, V>, IAsyncCacheExt<K, V>, IEnumerable<KeyValuePair<K, V>>
36
        where K : notnull
37
        where I : LruItem<K, V>
38
        where P : struct, IItemPolicy<K, V, I>
39
        where T : struct, ITelemetryPolicy<K, V>
40
    {
41
        private readonly ConcurrentDictionary<K, I> dictionary;
42

43
        private readonly ConcurrentQueue<I> hotQueue;
44
        private readonly ConcurrentQueue<I> warmQueue;
45
        private readonly ConcurrentQueue<I> coldQueue;
46

47
        // maintain count outside ConcurrentQueue, since ConcurrentQueue.Count holds a global lock
48
        private PaddedQueueCount counter;
49

50
        private readonly ICapacityPartition capacity;
51

52
        private readonly P itemPolicy;
53
        private bool isWarm = false;
2,593✔
54

55
        /// <summary>
56
        /// The telemetry policy.
57
        /// </summary>
58
        /// <remarks>
59
        /// Since T is a struct, making it readonly will force the runtime to make defensive copies
60
        /// if mutate methods are called. Therefore, field must be mutable to maintain count.
61
        /// </remarks>
62
        protected T telemetryPolicy;
63

64
        /// <summary>
65
        /// Initializes a new instance of the ConcurrentLruCore class with the specified concurrencyLevel, capacity, equality comparer, item policy and telemetry policy.
66
        /// </summary>
67
        /// <param name="concurrencyLevel">The concurrency level.</param>
68
        /// <param name="capacity">The capacity.</param>
69
        /// <param name="comparer">The equality comparer.</param>
70
        /// <param name="itemPolicy">The item policy.</param>
71
        /// <param name="telemetryPolicy">The telemetry policy.</param>
72
        /// <exception cref="ArgumentNullException"></exception>
73
        public ConcurrentLruCore(
2,593✔
74
            int concurrencyLevel,
2,593✔
75
            ICapacityPartition capacity,
2,593✔
76
            IEqualityComparer<K> comparer,
2,593✔
77
            P itemPolicy,
2,593✔
78
            T telemetryPolicy)
2,593✔
79
        {
2,593✔
80
            if (capacity == null)
2,593✔
81
                Throw.ArgNull(ExceptionArgument.capacity);
5✔
82

83
            if (comparer == null)
2,588✔
84
                Throw.ArgNull(ExceptionArgument.comparer);
5✔
85

86
            capacity.Validate();
2,583✔
87
            this.capacity = capacity;
2,578✔
88

89
            this.hotQueue = new ConcurrentQueue<I>();
2,578✔
90
            this.warmQueue = new ConcurrentQueue<I>();
2,578✔
91
            this.coldQueue = new ConcurrentQueue<I>();
2,578✔
92

93
            int dictionaryCapacity = ConcurrentDictionarySize.Estimate(this.Capacity);
2,578✔
94

95
            this.dictionary = new ConcurrentDictionary<K, I>(concurrencyLevel, dictionaryCapacity, comparer);
2,578✔
96
            this.itemPolicy = itemPolicy;
2,568✔
97
            this.telemetryPolicy = telemetryPolicy;
2,568✔
98
            this.telemetryPolicy.SetEventSource(this);
2,568✔
99
        }
2,568✔
100

101
        // No lock count: https://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/
102
        ///<inheritdoc/>
103
        public int Count => this.dictionary.Where(i => !itemPolicy.ShouldDiscard(i.Value)).Count();
17,459,619✔
104

105
        ///<inheritdoc/>
106
        public int Capacity => this.capacity.Hot + this.capacity.Warm + this.capacity.Cold;
2,000,004,063✔
107

108
        ///<inheritdoc/>
109
        public Optional<ICacheMetrics> Metrics => CreateMetrics(this);
165✔
110

111
        ///<inheritdoc/>
112
        public Optional<ICacheEvents<K, V>> Events => CreateEvents(this);
1,352✔
113

114
        ///<inheritdoc/>
115
        public CachePolicy Policy => CreatePolicy(this);
346✔
116

117
        /// <summary>
118
        /// Gets the number of hot items.
119
        /// </summary>
120
        public int HotCount => Volatile.Read(ref this.counter.hot);
1,586,215✔
121

122
        /// <summary>
123
        /// Gets the number of warm items.
124
        /// </summary>
125
        public int WarmCount => Volatile.Read(ref this.counter.warm);
1,586,215✔
126

127
        /// <summary>
128
        /// Gets the number of cold items.
129
        /// </summary>
130
        public int ColdCount => Volatile.Read(ref this.counter.cold);
1,586,220✔
131

132
        /// <summary>
133
        /// Gets a collection containing the keys in the cache.
134
        /// </summary>
135
        public ICollection<K> Keys => this.dictionary.Keys;
1,050✔
136

137
#if NET9_0_OR_GREATER
138
        /// <inheritdoc/>
139
        public IEqualityComparer<K> Comparer => this.dictionary.Comparer;
22✔
140
#endif
141

142
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
143
        /// <returns>An enumerator for the cache.</returns>
144
        /// <remarks>
145
        /// The enumerator returned from the cache is safe to use concurrently with
146
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
147
        /// The contents exposed through the enumerator may contain modifications
148
        /// made after <see cref="GetEnumerator"/> was called.
149
        /// </remarks>
150
        public IEnumerator<KeyValuePair<K, V>> GetEnumerator()
151
        {
315✔
152
            foreach (var kvp in this.dictionary)
1,620✔
153
            {
340✔
154
                if (!itemPolicy.ShouldDiscard(kvp.Value))
340✔
155
                {
325✔
156
                    yield return new KeyValuePair<K, V>(kvp.Key, kvp.Value.Value);
325✔
157
                }
320✔
158
            }
335✔
159
        }
310✔
160

161
        ///<inheritdoc/>
162
        public bool TryGet(K key, [MaybeNullWhen(false)] out V value)
163
        {
2,243,967,999✔
164
            if (dictionary.TryGetValue(key, out var item))
2,243,967,999✔
165
            {
2,013,257,361✔
166
                return GetOrDiscard(item, out value);
2,013,257,361✔
167
            }
168

169
            value = default;
230,710,638✔
170
            this.telemetryPolicy.IncrementMiss();
230,710,638✔
171
            return false;
230,710,638✔
172
        }
2,243,967,999✔
173

174
        // AggressiveInlining forces the JIT to inline policy.ShouldDiscard(). For LRU policy 
175
        // the first branch is completely eliminated due to JIT time constant propogation.
176
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
177
        private bool GetOrDiscard(I item, [MaybeNullWhen(false)] out V value)
178
        {
2,013,792,558✔
179
            if (this.itemPolicy.ShouldDiscard(item))
2,013,792,558✔
180
            {
5,046,364✔
181
                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Evicted);
5,046,364✔
182
                this.telemetryPolicy.IncrementMiss();
5,046,364✔
183
                value = default;
5,046,364✔
184
                return false;
5,046,364✔
185
            }
186

187
            value = item.Value;
2,008,746,194✔
188

189
            this.itemPolicy.Touch(item);
2,008,746,194✔
190
            this.telemetryPolicy.IncrementHit();
2,008,746,194✔
191
            return true;
2,008,746,194✔
192
        }
2,013,792,558✔
193

194
        private bool TryAdd(K key, V value)
195
        {
254,910,143✔
196
            var newItem = this.itemPolicy.CreateItem(key, value);
254,910,143✔
197

198
            if (this.dictionary.TryAdd(key, newItem))
254,910,143✔
199
            {
247,437,919✔
200
                this.hotQueue.Enqueue(newItem);
247,437,919✔
201
                Cycle(Interlocked.Increment(ref counter.hot));
247,437,919✔
202
                return true;
247,437,919✔
203
            }
204

205
            Disposer<V>.Dispose(newItem.Value);
7,472,224✔
206
            return false;
7,472,224✔
207
        }
254,910,143✔
208

209
        ///<inheritdoc/>
210
        public V GetOrAdd(K key, Func<K, V> valueFactory)
211
        {
2,171,101,978✔
212
            while (true)
2,177,396,824✔
213
            {
2,177,396,824✔
214
                if (this.TryGet(key, out var value))
2,177,396,824✔
215
                {
2,000,346,407✔
216
                    return value;
2,000,346,407✔
217
                }
218

219
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
220
                value = valueFactory(key);
177,050,417✔
221

222
                if (TryAdd(key, value))
177,050,417✔
223
                {
170,755,571✔
224
                    return value;
170,755,571✔
225
                }
226
            }
6,294,846✔
227
        }
2,171,101,978✔
228

229
        /// <summary>
230
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
231
        /// existing value if the key already exists.
232
        /// </summary>
233
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
234
        /// <param name="key">The key of the element to add.</param>
235
        /// <param name="valueFactory">The factory function used to generate a value for the key.</param>
236
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
237
        /// <returns>The value for the key. This will be either the existing value for the key if the key is already 
238
        /// in the cache, or the new value if the key was not in the cache.</returns>
239
        public V GetOrAdd<TArg>(K key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
240
        {
20,000,015✔
241
            while (true)
20,226,180✔
242
            {
20,226,180✔
243
                if (this.TryGet(key, out var value))
20,226,180✔
244
                {
744,051✔
245
                    return value;
744,051✔
246
                }
247

248
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
249
                value = valueFactory(key, factoryArgument);
19,482,129✔
250

251
                if (TryAdd(key, value))
19,482,129✔
252
                {
19,255,964✔
253
                    return value;
19,255,964✔
254
                }
255
            }
226,165✔
256
        }
20,000,015✔
257

258
        ///<inheritdoc/>
259
        public async ValueTask<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
260
        {
20,000,380✔
261
            while (true)
20,262,936✔
262
            {
20,262,936✔
263
                if (this.TryGet(key, out var value))
20,262,936✔
264
                {
789,343✔
265
                    return value;
789,343✔
266
                }
267

268
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
269
                // This is identical logic in ConcurrentDictionary.GetOrAdd method.
270
                value = await valueFactory(key).ConfigureAwait(false);
19,473,593✔
271

272
                if (TryAdd(key, value))
19,473,578✔
273
                {
19,211,022✔
274
                    return value;
19,211,022✔
275
                }
276
            }
262,556✔
277
        }
20,000,365✔
278

279
        /// <summary>
280
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
281
        /// existing value if the key already exists.
282
        /// </summary>
283
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
284
        /// <param name="key">The key of the element to add.</param>
285
        /// <param name="valueFactory">The factory function used to asynchronously generate a value for the key.</param>
286
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
287
        /// <returns>A task that represents the asynchronous GetOrAdd operation.</returns>
288
        public async ValueTask<V> GetOrAddAsync<TArg>(K key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
289
        {
20,000,345✔
290
            while (true)
20,325,372✔
291
            {
20,325,372✔
292
                if (this.TryGet(key, out var value))
20,325,372✔
293
                {
994,242✔
294
                    return value;
994,242✔
295
                }
296

297
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
298
                value = await valueFactory(key, factoryArgument).ConfigureAwait(false);
19,331,130✔
299

300
                if (TryAdd(key, value))
19,331,130✔
301
                {
19,006,103✔
302
                    return value;
19,006,103✔
303
                }
304
            }
325,027✔
305
        }
20,000,345✔
306

307
        /// <summary>
308
        /// Attempts to remove the specified key value pair.
309
        /// </summary>
310
        /// <param name="item">The item to remove.</param>
311
        /// <returns>true if the item was removed successfully; otherwise, false.</returns>
312
        public bool TryRemove(KeyValuePair<K, V> item)
313
        {
24,197,786✔
314
            if (this.dictionary.TryGetValue(item.Key, out var existing))
24,197,786✔
315
            {
2,846,779✔
316
                lock (existing)
2,846,779✔
317
                {
2,846,779✔
318
                    if (EqualityComparer<V>.Default.Equals(existing.Value, item.Value))
2,846,779✔
319
                    {
234,337✔
320
                        var kvp = new KeyValuePair<K, I>(item.Key, existing);
234,337✔
321
#if NET6_0_OR_GREATER
322
                    if (this.dictionary.TryRemove(kvp))
188,374✔
323
#else
324
                        // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
325
                        if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
45,963✔
326
#endif
327
                        {
227,887✔
328
                            OnRemove(item.Key, kvp.Value, ItemRemovedReason.Removed);
227,887✔
329
                            return true;
227,887✔
330
                        }
331
                    }
6,450✔
332
                }
2,618,892✔
333

334
                // it existed, but we couldn't remove - this means value was replaced afer the TryGetValue (a race)
335
            }
2,618,892✔
336

337
            return false;
23,969,899✔
338
        }
24,197,786✔
339

340
        /// <summary>
341
        /// Attempts to remove and return the value that has the specified key.
342
        /// </summary>
343
        /// <param name="key">The key of the element to remove.</param>
344
        /// <param name="value">When this method returns, contains the object removed, or the default value of the value type if key does not exist.</param>
345
        /// <returns>true if the object was removed successfully; otherwise, false.</returns>
346
        public bool TryRemove(K key, [MaybeNullWhen(false)] out V value)
347
        {
20,000,298✔
348
            if (this.dictionary.TryRemove(key, out var item))
20,000,298✔
349
            {
48,591✔
350
                OnRemove(key, item, ItemRemovedReason.Removed);
48,591✔
351
                value = item.Value;
48,591✔
352
                return true;
48,591✔
353
            }
354

355
            value = default;
19,951,707✔
356
            return false;
19,951,707✔
357
        }
20,000,298✔
358

359
        ///<inheritdoc/>
360
        public bool TryRemove(K key)
361
        {
20,000,220✔
362
            return TryRemove(key, out _);
20,000,220✔
363
        }
20,000,220✔
364

365
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
366
        private void OnRemove(K key, I item, ItemRemovedReason reason)
367
        {
266,303,788✔
368
            // Mark as not accessed, it will later be cycled out of the queues because it can never be fetched 
369
            // from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled 
370
            // from the queue.
371
            item.WasAccessed = false;
266,303,788✔
372
            item.WasRemoved = true;
266,303,788✔
373

374
            this.telemetryPolicy.OnItemRemoved(key, item.Value, reason);
266,303,788✔
375

376
            // serialize dispose (common case dispose not thread safe)
377
            lock (item)
266,303,788✔
378
            {
266,303,788✔
379
                Disposer<V>.Dispose(item.Value);
266,303,788✔
380
            }
266,303,788✔
381
        }
266,303,788✔
382

383
        ///<inheritdoc/>
384
        ///<remarks>Note: Calling this method does not affect LRU order.</remarks>
385
        public bool TryUpdate(K key, V value)
386
        {
63,598,307✔
387
            if (this.dictionary.TryGetValue(key, out var existing))
63,598,307✔
388
            {
5,578,641✔
389
                return this.TryUpdateValue(existing, value);
5,578,641✔
390
            }
391

392
            return false;
58,019,666✔
393
        }
63,598,307✔
394

395
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
396
        private bool TryUpdateValue(I existing, V value)
397
        {
5,578,647✔
398
            lock (existing)
5,578,647✔
399
            {
5,578,647✔
400
                if (!existing.WasRemoved)
5,578,647✔
401
                {
5,377,067✔
402
                    V oldValue = existing.Value;
5,377,067✔
403

404
                    existing.Value = value;
5,377,067✔
405

406
                    this.itemPolicy.Update(existing);
5,377,067✔
407
                    // backcompat: remove conditional compile
408
#if NETCOREAPP3_0_OR_GREATER
409
                    this.telemetryPolicy.OnItemUpdated(existing.Key, oldValue, existing.Value);
5,377,067✔
410
#endif
411
                    Disposer<V>.Dispose(oldValue);
5,377,067✔
412

413
                    return true;
5,377,067✔
414
                }
415
            }
201,580✔
416

417
            return false;
201,580✔
418
        }
5,578,647✔
419

420
        ///<inheritdoc/>
421
        ///<remarks>Note: Updates to existing items do not affect LRU order. Added items are at the top of the LRU.</remarks>
422
        public void AddOrUpdate(K key, V value)
423
        {
23,467,695✔
424
            while (true)
23,598,182✔
425
            {
23,598,182✔
426
                // first, try to update
427
                if (this.TryUpdate(key, value))
23,598,182✔
428
                {
4,567,946✔
429
                    return;
4,567,946✔
430
                }
431

432
                // then try add
433
                var newItem = this.itemPolicy.CreateItem(key, value);
19,030,236✔
434

435
                if (this.dictionary.TryAdd(key, newItem))
19,030,236✔
436
                {
18,899,749✔
437
                    this.hotQueue.Enqueue(newItem);
18,899,749✔
438
                    Cycle(Interlocked.Increment(ref counter.hot));
18,899,749✔
439
                    return;
18,899,749✔
440
                }
441

442
                // if both update and add failed there was a race, try again
443
            }
130,487✔
444
        }
23,467,695✔
445

446
        ///<inheritdoc/>
447
        public void Clear()
448
        {
1,562,660✔
449
            // don't overlap Clear/Trim/TrimExpired
450
            lock (this.dictionary)
1,562,660✔
451
            {
1,562,660✔
452
                // evaluate queue count, remove everything including items removed from the dictionary but
453
                // not the queues. This also avoids the expensive o(n) no lock count, or locking the dictionary.
454
                int queueCount = this.HotCount + this.WarmCount + this.ColdCount;
1,562,660✔
455
                this.TrimLiveItems(itemsRemoved: 0, queueCount, ItemRemovedReason.Cleared);
1,562,660✔
456
            }
1,562,660✔
457
        }
1,562,660✔
458

459
        /// <summary>
460
        /// Trim the specified number of items from the cache. Removes all discardable items per IItemPolicy.ShouldDiscard(), then 
461
        /// itemCount-discarded items in LRU order, if any.
462
        /// </summary>
463
        /// <param name="itemCount">The number of items to remove.</param>
464
        /// <returns>The number of items removed from the cache.</returns>
465
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is less than 0./</exception>
466
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is greater than capacity./</exception>
467
        /// <remarks>
468
        /// Note: Trim affects LRU order. Calling Trim resets the internal accessed status of items.
469
        /// </remarks>
470
        public void Trim(int itemCount)
471
        {
305✔
472
            int capacity = this.Capacity;
305✔
473

474
            if (itemCount < 1 || itemCount > capacity)
305✔
475
                Throw.ArgOutOfRange(nameof(itemCount), "itemCount must be greater than or equal to one, and less than the capacity of the cache.");
10✔
476

477
            // clamp itemCount to number of items actually in the cache
478
            itemCount = Math.Min(itemCount, this.HotCount + this.WarmCount + this.ColdCount);
295✔
479

480
            // don't overlap Clear/Trim/TrimExpired
481
            lock (this.dictionary)
295✔
482
            {
295✔
483
                // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items.
484
                int itemsRemoved = TrimAllDiscardedItems();
295✔
485

486
                TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed);
295✔
487
            }
295✔
488
        }
295✔
489

490
        private void TrimExpired()
491
        {
46✔
492
            if (this.itemPolicy.CanDiscard())
46✔
493
            {
46✔
494
                lock (this.dictionary)
46✔
495
                {
46✔
496
                    this.TrimAllDiscardedItems();
46✔
497
                }
46✔
498
            }
46✔
499
        }
46✔
500

501
        /// <summary>
502
        /// Trim discarded items from all queues.
503
        /// </summary>
504
        /// <returns>The number of items removed.</returns>
505
        // backcompat: make internal
506
        protected int TrimAllDiscardedItems()
507
        {
341✔
508
            // don't overlap Clear/Trim/TrimExpired
509
            lock (this.dictionary)
341✔
510
            {
341✔
511
                int RemoveDiscardableItems(ConcurrentQueue<I> q, ref int queueCounter)
512
                {
1,023✔
513
                    int itemsRemoved = 0;
1,023✔
514
                    int localCount = queueCounter;
1,023✔
515

516
                    for (int i = 0; i < localCount; i++)
6,232✔
517
                    {
2,093✔
518
                        if (q.TryDequeue(out var item))
2,093✔
519
                        {
2,093✔
520
                            if (this.itemPolicy.ShouldDiscard(item))
2,093✔
521
                            {
288✔
522
                                Interlocked.Decrement(ref queueCounter);
288✔
523
                                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Trimmed);
288✔
524
                                itemsRemoved++;
288✔
525
                            }
288✔
526
                            else if (item.WasRemoved)
1,805✔
527
                            {
30✔
528
                                Interlocked.Decrement(ref queueCounter);
30✔
529
                            }
30✔
530
                            else
531
                            {
1,775✔
532
                                q.Enqueue(item);
1,775✔
533
                            }
1,775✔
534
                        }
2,093✔
535
                    }
2,093✔
536

537
                    return itemsRemoved;
1,023✔
538
                }
1,023✔
539

540
                int coldRem = RemoveDiscardableItems(coldQueue, ref this.counter.cold);
341✔
541
                int warmRem = RemoveDiscardableItems(warmQueue, ref this.counter.warm);
341✔
542
                int hotRem = RemoveDiscardableItems(hotQueue, ref this.counter.hot);
341✔
543

544
                if (warmRem > 0)
341✔
545
                {
41✔
546
                    Volatile.Write(ref this.isWarm, false);
41✔
547
                }
41✔
548

549
                return coldRem + warmRem + hotRem;
341✔
550
            }
551
        }
341✔
552

553
        private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason)
554
        {
1,562,955✔
555
            // When items are touched, they are moved to warm by cycling. Therefore, to guarantee 
556
            // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times.
557
            // If clear is called during trimming, it would be possible to get stuck in an infinite
558
            // loop here. The warm + hot limit also guards against this case.
559
            int trimWarmAttempts = 0;
1,562,955✔
560
            int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot;
1,562,955✔
561

562
            while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts)
15,816,453✔
563
            {
14,253,498✔
564
                if (Volatile.Read(ref this.counter.cold) > 0)
14,253,498✔
565
                {
11,017,813✔
566
                    if (TryRemoveCold(reason) == (ItemDestination.Remove, 0))
11,017,813✔
567
                    {
11,016,726✔
568
                        itemsRemoved++;
11,016,726✔
569
                        trimWarmAttempts = 0;
11,016,726✔
570
                    }
11,016,726✔
571
                    else
572
                    {
1,087✔
573
                        TrimWarmOrHot(reason);
1,087✔
574
                    }
1,087✔
575
                }
11,017,813✔
576
                else
577
                {
3,235,685✔
578
                    TrimWarmOrHot(reason);
3,235,685✔
579
                    trimWarmAttempts++;
3,235,685✔
580
                }
3,235,685✔
581
            }
14,253,498✔
582

583
            if (Volatile.Read(ref this.counter.warm) < this.capacity.Warm)
1,562,955✔
584
            {
834,195✔
585
                Volatile.Write(ref this.isWarm, false);
834,195✔
586
            }
834,195✔
587
        }
1,562,955✔
588

589
        private void TrimWarmOrHot(ItemRemovedReason reason)
590
        {
3,236,772✔
591
            if (Volatile.Read(ref this.counter.warm) > 0)
3,236,772✔
592
            {
1,039,165✔
593
                CycleWarmUnchecked(reason);
1,039,165✔
594
            }
1,039,165✔
595
            else if (Volatile.Read(ref this.counter.hot) > 0)
2,197,607✔
596
            {
2,196,891✔
597
                CycleHotUnchecked(reason);
2,196,891✔
598
            }
2,196,891✔
599
        }
3,236,772✔
600

601
        private void Cycle(int hotCount)
602
        {
266,337,668✔
603
            if (isWarm)
266,337,668✔
604
            {
262,681,457✔
605
                (var dest, var count) = CycleHot(hotCount);
262,681,457✔
606

607
                int cycles = 0;
262,681,457✔
608
                while (cycles++ < 3 && dest != ItemDestination.Remove)
559,287,808✔
609
                {
296,606,351✔
610
                    if (dest == ItemDestination.Warm)
296,606,351✔
611
                    {
38,564,819✔
612
                        (dest, count) = CycleWarm(count);
38,564,819✔
613
                    }
38,564,819✔
614
                    else if (dest == ItemDestination.Cold)
258,041,532✔
615
                    {
258,041,532✔
616
                        (dest, count) = CycleCold(count);
258,041,532✔
617
                    }
258,041,532✔
618
                }
296,606,351✔
619

620
                // If nothing was removed yet, constrain the size of warm and cold by discarding the coldest item.
621
                if (dest != ItemDestination.Remove)
262,681,457✔
622
                {
6,175,879✔
623
                    if (dest == ItemDestination.Warm && count > this.capacity.Warm)
6,175,879✔
624
                    {
3,973,308✔
625
                        count = LastWarmToCold();
3,973,308✔
626
                    }
3,973,308✔
627

628
                    ConstrainCold(count, ItemRemovedReason.Evicted);
6,175,879✔
629
                }
6,175,879✔
630
            }
262,681,457✔
631
            else
632
            {
3,656,211✔
633
                // fill up the warm queue with new items until warm is full.
634
                // else during warmup the cache will only use the hot + cold queues until any item is requested twice.
635
                CycleDuringWarmup(hotCount);
3,656,211✔
636
            }
3,656,211✔
637
        }
266,337,668✔
638

639
        [MethodImpl(MethodImplOptions.NoInlining)]
640
        private void CycleDuringWarmup(int hotCount)
641
        {
3,656,211✔
642
            // do nothing until hot is full
643
            if (hotCount > this.capacity.Hot)
3,656,211✔
644
            {
1,452,656✔
645
                Interlocked.Decrement(ref this.counter.hot);
1,452,656✔
646

647
                if (this.hotQueue.TryDequeue(out var item))
1,452,656✔
648
                {
1,452,653✔
649
                    // special case: removed during warmup
650
                    if (item.WasRemoved)
1,452,653✔
651
                    {
201,636✔
652
                        return;
201,636✔
653
                    }
654

655
                    int count = this.Move(item, ItemDestination.Warm, ItemRemovedReason.Evicted);
1,251,017✔
656

657
                    // if warm is now full, overflow to cold and mark as warm
658
                    if (count > this.capacity.Warm)
1,251,017✔
659
                    {
226,382✔
660
                        Volatile.Write(ref this.isWarm, true);
226,382✔
661
                        count = LastWarmToCold();
226,382✔
662
                        ConstrainCold(count, ItemRemovedReason.Evicted);
226,382✔
663
                    }
226,382✔
664
                }
1,251,017✔
665
                else
666
                {
3✔
667
                    Interlocked.Increment(ref this.counter.hot);
3✔
668
                }
3✔
669
            }
1,251,020✔
670
        }
3,656,211✔
671

672
        private (ItemDestination, int) CycleHot(int hotCount)
673
        {
262,681,457✔
674
            if (hotCount > this.capacity.Hot)
262,681,457✔
675
            {
262,679,309✔
676
                return CycleHotUnchecked(ItemRemovedReason.Evicted);
262,679,309✔
677
            }
678

679
            return (ItemDestination.Remove, 0);
2,148✔
680
        }
262,681,457✔
681

682
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
683
        private (ItemDestination, int) CycleHotUnchecked(ItemRemovedReason removedReason)
684
        {
264,876,200✔
685
            Interlocked.Decrement(ref this.counter.hot);
264,876,200✔
686

687
            if (this.hotQueue.TryDequeue(out var item))
264,876,200✔
688
            {
264,876,184✔
689
                var where = this.itemPolicy.RouteHot(item);
264,876,184✔
690
                return (where, this.Move(item, where, removedReason));
264,876,184✔
691
            }
692
            else
693
            {
16✔
694
                Interlocked.Increment(ref this.counter.hot);
16✔
695
                return (ItemDestination.Remove, 0);
16✔
696
            }
697
        }
264,876,200✔
698

699
        private (ItemDestination, int) CycleWarm(int count)
700
        {
38,564,819✔
701
            if (count > this.capacity.Warm)
38,564,819✔
702
            {
38,535,670✔
703
                return CycleWarmUnchecked(ItemRemovedReason.Evicted);
38,535,670✔
704
            }
705

706
            return (ItemDestination.Remove, 0);
29,149✔
707
        }
38,564,819✔
708

709
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
710
        private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
711
        {
39,574,835✔
712
            int wc = Interlocked.Decrement(ref this.counter.warm);
39,574,835✔
713

714
            if (this.warmQueue.TryDequeue(out var item))
39,574,835!
715
            {
39,574,835✔
716
                if (item.WasRemoved)
39,574,835✔
717
                {
1,157,065✔
718
                    return (ItemDestination.Remove, 0);
1,157,065✔
719
                }
720

721
                var where = this.itemPolicy.RouteWarm(item);
38,417,770✔
722

723
                // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
724
                // This only happens when hit rate is high, in which case we can consider all items relatively equal in
725
                // terms of which was least recently used.
726
                if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
38,417,770✔
727
                {
11,768,171✔
728
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
11,768,171✔
729
                }
730
                else
731
                {
26,649,599✔
732
                    return (ItemDestination.Cold, this.Move(item, ItemDestination.Cold, removedReason));
26,649,599✔
733
                }
734
            }
735
            else
736
            {
×
737
                Interlocked.Increment(ref this.counter.warm);
×
738
                return (ItemDestination.Remove, 0);
×
739
            }
740
        }
39,574,835✔
741

742
        private (ItemDestination, int) CycleCold(int count)
743
        {
258,041,532✔
744
            if (count > this.capacity.Cold)
258,041,532✔
745
            {
250,419,564✔
746
                return TryRemoveCold(ItemRemovedReason.Evicted);
250,419,564✔
747
            }
748

749
            return (ItemDestination.Remove, 0);
7,621,968✔
750
        }
258,041,532✔
751

752
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
753
        private (ItemDestination, int) TryRemoveCold(ItemRemovedReason removedReason)
754
        {
261,437,377✔
755
            Interlocked.Decrement(ref this.counter.cold);
261,437,377✔
756

757
            if (this.coldQueue.TryDequeue(out var item))
261,437,377✔
758
            {
261,436,819✔
759
                var where = this.itemPolicy.RouteCold(item);
261,436,819✔
760

761
                if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
261,436,819✔
762
                {
4,102,815✔
763
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
4,102,815✔
764
                }
765
                else
766
                {
257,334,004✔
767
                    this.Move(item, ItemDestination.Remove, removedReason);
257,334,004✔
768
                    return (ItemDestination.Remove, 0);
257,334,004✔
769
                }
770
            }
771
            else
772
            {
558✔
773
                return (ItemDestination.Cold, Interlocked.Increment(ref this.counter.cold));
558✔
774
            }
775
        }
261,437,377✔
776

777
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
778
        private int LastWarmToCold()
779
        {
4,199,690✔
780
            Interlocked.Decrement(ref this.counter.warm);
4,199,690✔
781

782
            if (this.warmQueue.TryDequeue(out var item))
4,199,690✔
783
            {
4,199,686✔
784
                var destination = item.WasRemoved ? ItemDestination.Remove : ItemDestination.Cold;
4,199,686✔
785
                return this.Move(item, destination, ItemRemovedReason.Evicted);
4,199,686✔
786
            }
787
            else
788
            {
4✔
789
                Interlocked.Increment(ref this.counter.warm);
4✔
790
                return 0;
4✔
791
            }
792
        }
4,199,690✔
793

794
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
795
        private void ConstrainCold(int coldCount, ItemRemovedReason removedReason)
796
        {
6,402,261✔
797
            if (coldCount > this.capacity.Cold && this.coldQueue.TryDequeue(out var item))
6,402,261✔
798
            {
6,128,500✔
799
                Interlocked.Decrement(ref this.counter.cold);
6,128,500✔
800
                this.Move(item, ItemDestination.Remove, removedReason);
6,128,500✔
801
            }
6,128,500✔
802
        }
6,402,261✔
803

804
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
805
        private int Move(I item, ItemDestination where, ItemRemovedReason removedReason)
806
        {
581,356,628✔
807
            item.WasAccessed = false;
581,356,628✔
808

809
            switch (where)
581,356,628✔
810
            {
811
                case ItemDestination.Warm:
812
                    this.warmQueue.Enqueue(item);
43,793,476✔
813
                    return Interlocked.Increment(ref this.counter.warm);
43,793,476✔
814
                case ItemDestination.Cold:
815
                    this.coldQueue.Enqueue(item);
267,572,568✔
816
                    return Interlocked.Increment(ref this.counter.cold);
267,572,568✔
817
                case ItemDestination.Remove:
818

819
                    var kvp = new KeyValuePair<K, I>(item.Key, item);
269,990,584✔
820

821
#if NET6_0_OR_GREATER
822
                    if (this.dictionary.TryRemove(kvp))
219,558,894✔
823
#else
824
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
825
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
50,431,690✔
826
#endif
827
                    {
266,015,548✔
828
                        OnRemove(item.Key, item, removedReason);
266,015,548✔
829
                    }
266,015,548✔
830
                    break;
269,990,584✔
831
            }
832

833
            return 0;
269,990,584✔
834
        }
581,356,628✔
835

836
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
837
        /// <returns>An enumerator for the cache.</returns>
838
        /// <remarks>
839
        /// The enumerator returned from the cache is safe to use concurrently with
840
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
841
        /// The contents exposed through the enumerator may contain modifications
842
        /// made after <see cref="GetEnumerator"/> was called.
843
        /// </remarks>
844
        IEnumerator IEnumerable.GetEnumerator()
845
        {
15✔
846
            return ((ConcurrentLruCore<K, V, I, P, T>)this).GetEnumerator();
15✔
847
        }
15✔
848

849
#if DEBUG
850
        /// <summary>
851
        /// Format the LRU as a string by converting all the keys to strings.
852
        /// </summary>
853
        /// <returns>The LRU formatted as a string.</returns>
854
        internal string FormatLruString()
855
        {
75✔
856
            var sb = new System.Text.StringBuilder();
75✔
857

858
            sb.Append("Hot [");
75✔
859
            sb.Append(string.Join(",", this.hotQueue.Select(n => n.Key.ToString())));
280✔
860
            sb.Append("] Warm [");
75✔
861
            sb.Append(string.Join(",", this.warmQueue.Select(n => n.Key.ToString())));
250✔
862
            sb.Append("] Cold [");
75✔
863
            sb.Append(string.Join(",", this.coldQueue.Select(n => n.Key.ToString())));
225✔
864
            sb.Append(']');
75✔
865

866
            return sb.ToString();
75✔
867
        }
75✔
868
#endif
869

870
        private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
871
        {
346✔
872
            var p = new Proxy(lru);
346✔
873

874
            if (typeof(P) == typeof(AfterAccessPolicy<K, V>))
346✔
875
            {
55✔
876
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), new Optional<ITimePolicy>(p), Optional<IDiscreteTimePolicy>.None());
55✔
877
            }
878

879
            // IsAssignableFrom is a jit intrinsic https://github.com/dotnet/runtime/issues/4920
880
            if (typeof(IDiscreteItemPolicy<K, V>).IsAssignableFrom(typeof(P)))
291✔
881
            {
75✔
882
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), Optional<ITimePolicy>.None(), new Optional<IDiscreteTimePolicy>(new DiscreteExpiryProxy(lru)));
75✔
883
            }
884

885
            return new CachePolicy(new Optional<IBoundedPolicy>(p), lru.itemPolicy.CanDiscard() ? new Optional<ITimePolicy>(p) : Optional<ITimePolicy>.None());
216✔
886
        }
346✔
887

888
        private static Optional<ICacheMetrics> CreateMetrics(ConcurrentLruCore<K, V, I, P, T> lru)
889
        {
165✔
890
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
165✔
891
            {
25✔
892
                return Optional<ICacheMetrics>.None();
25✔
893
            }
894

895
            return new(new Proxy(lru));
140✔
896
        }
165✔
897

898
        private static Optional<ICacheEvents<K, V>> CreateEvents(ConcurrentLruCore<K, V, I, P, T> lru)
899
        {
1,352✔
900
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
1,352✔
901
            {
65✔
902
                return Optional<ICacheEvents<K, V>>.None();
65✔
903
            }
904

905
            return new(new Proxy(lru));
1,287✔
906
        }
1,352✔
907

908
#if NET9_0_OR_GREATER
909
        ///<inheritdoc/>
910
        public IAlternateLookup<TAlternateKey, K, V> GetAlternateLookup<TAlternateKey>()
911
            where TAlternateKey : notnull, allows ref struct
912
        {
24✔
913
            if (!this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
24✔
914
            {
3✔
915
                Throw.IncompatibleComparer();
3✔
916
            }
917

918
            return new AlternateLookup<TAlternateKey>(this);
21✔
919
        }
21✔
920

921
        ///<inheritdoc/>
922
        public bool TryGetAlternateLookup<TAlternateKey>([MaybeNullWhen(false)] out IAlternateLookup<TAlternateKey, K, V> lookup)
923
            where TAlternateKey : notnull, allows ref struct
924
        {
6✔
925
            if (this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
6✔
926
            {
3✔
927
                lookup = new AlternateLookup<TAlternateKey>(this);
3✔
928
                return true;
3✔
929
            }
930

931
            lookup = default;
3✔
932
            return false;
3✔
933
        }
6✔
934

935
        ///<inheritdoc/>
936
        public IAsyncAlternateLookup<TAlternateKey, K, V> GetAsyncAlternateLookup<TAlternateKey>()
937
            where TAlternateKey : notnull, allows ref struct
938
        {
5✔
939
            if (!this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
5✔
940
            {
1✔
941
                Throw.IncompatibleComparer();
1✔
942
            }
943

944
            return new AlternateLookup<TAlternateKey>(this);
4✔
945
        }
4✔
946

947
        ///<inheritdoc/>
948
        public bool TryGetAsyncAlternateLookup<TAlternateKey>([MaybeNullWhen(false)] out IAsyncAlternateLookup<TAlternateKey, K, V> lookup)
949
            where TAlternateKey : notnull, allows ref struct
950
        {
2✔
951
            if (this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
2✔
952
            {
1✔
953
                lookup = new AlternateLookup<TAlternateKey>(this);
1✔
954
                return true;
1✔
955
            }
956

957
            lookup = default;
1✔
958
            return false;
1✔
959
        }
2✔
960

961
        internal readonly struct AlternateLookup<TAlternateKey> : IAlternateLookup<TAlternateKey, K, V>, IAsyncAlternateLookup<TAlternateKey, K, V>
962
            where TAlternateKey : notnull, allows ref struct
963
        {
964
            internal AlternateLookup(ConcurrentLruCore<K, V, I, P, T> lru)
965
            {
29✔
966
                Debug.Assert(lru is not null);
29✔
967
                Debug.Assert(lru.dictionary.IsCompatibleKey<TAlternateKey, K, I>());
29✔
968
                this.Lru = lru;
29✔
969
                this.Alternate = lru.dictionary.GetAlternateLookup<TAlternateKey>();
29✔
970
            }
29✔
971

972
            internal ConcurrentLruCore<K, V, I, P, T> Lru { get; }
59,521,171✔
973

974
            internal ConcurrentDictionary<K, I>.AlternateLookup<TAlternateKey> Alternate { get; }
24,108,072✔
975

976
            public bool TryGet(TAlternateKey key, [MaybeNullWhen(false)] out V value)
977
            {
20,108,055✔
978
                if (this.Alternate.TryGetValue(key, out var item))
20,108,055✔
979
                {
535,197✔
980
                    return this.Lru.GetOrDiscard(item, out value);
535,197✔
981
                }
982

983
                value = default;
19,572,858✔
984
                this.Lru.telemetryPolicy.IncrementMiss();
19,572,858✔
985
                return false;
19,572,858✔
986
            }
20,108,055✔
987

988
            public bool TryRemove(TAlternateKey key, [MaybeNullWhen(false)] out K actualKey, [MaybeNullWhen(false)] out V value)
989
            {
4,000,005✔
990
                if (this.Alternate.TryRemove(key, out actualKey, out var item))
4,000,005✔
991
                {
11,762✔
992
                    this.Lru.OnRemove(actualKey, item, ItemRemovedReason.Removed);
11,762✔
993
                    value = item.Value;
11,762✔
994
                    return true;
11,762✔
995
                }
996

997
                actualKey = default;
3,988,243✔
998
                value = default;
3,988,243✔
999
                return false;
3,988,243✔
1000
            }
4,000,005✔
1001

1002
            public bool TryUpdate(TAlternateKey key, V value)
1003
            {
12✔
1004
                if (this.Alternate.TryGetValue(key, out var existing))
12✔
1005
                {
6✔
1006
                    return this.Lru.TryUpdateValue(existing, value);
6✔
1007
                }
1008

1009
                return false;
6✔
1010
            }
12✔
1011

1012
            public void AddOrUpdate(TAlternateKey key, V value)
1013
            {
6✔
1014
                K actualKey = default!;
6✔
1015
                bool hasActualKey = false;
6✔
1016

1017
                while (true)
6✔
1018
                {
6✔
1019
                    if (this.TryUpdate(key, value))
6✔
1020
                    {
3✔
1021
                        return;
3✔
1022
                    }
1023

1024
                    if (!hasActualKey)
3✔
1025
                    {
3✔
1026
                        actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
3✔
1027
                        hasActualKey = true;
3✔
1028
                    }
3✔
1029

1030
                    if (this.Lru.TryAdd(actualKey, value))
3!
1031
                    {
3✔
1032
                        return;
3✔
1033
                    }
1034
                }
1035
            }
6✔
1036

1037
            public V GetOrAdd(TAlternateKey key, Func<K, V> valueFactory)
1038
            {
8,000,009✔
1039
                while (true)
8,052,114✔
1040
                {
8,052,114✔
1041
                    if (this.TryGet(key, out var value))
8,052,114✔
1042
                    {
136,662✔
1043
                        return value;
136,662✔
1044
                    }
1045

1046
                    K actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
7,915,452✔
1047

1048
                    value = valueFactory(actualKey);
7,915,452✔
1049
                    if (this.Lru.TryAdd(actualKey, value))
7,915,452✔
1050
                    {
7,863,347✔
1051
                        return value;
7,863,347✔
1052
                    }
1053
                }
52,105✔
1054
            }
8,000,009✔
1055

1056
            public V GetOrAdd<TArg>(TAlternateKey key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
1057
            {
4,000,001✔
1058
                while (true)
4,055,926✔
1059
                {
4,055,926✔
1060
                    if (this.TryGet(key, out var value))
4,055,926✔
1061
                    {
146,280✔
1062
                        return value;
146,280✔
1063
                    }
1064

1065
                    K actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
3,909,646✔
1066

1067
                    value = valueFactory(actualKey, factoryArgument);
3,909,646✔
1068
                    if (this.Lru.TryAdd(actualKey, value))
3,909,646✔
1069
                    {
3,853,721✔
1070
                        return value;
3,853,721✔
1071
                    }
1072
                }
55,925✔
1073
            }
4,000,001✔
1074

1075
            public ValueTask<V> GetOrAddAsync(TAlternateKey key, Func<K, Task<V>> valueFactory)
1076
            {
4,000,002✔
1077
                if (this.TryGet(key, out var value))
4,000,002✔
1078
                {
147,939✔
1079
                    return new ValueTask<V>(value);
147,939✔
1080
                }
1081

1082
                K actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
3,852,063✔
1083
                Task<V> task = valueFactory(actualKey);
3,852,063✔
1084

1085
                return GetOrAddAsyncSlow(actualKey, task);
3,852,063✔
1086
            }
4,000,002✔
1087

1088
            public ValueTask<V> GetOrAddAsync<TArg>(TAlternateKey key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
1089
            {
4,000,002✔
1090
                if (this.TryGet(key, out var value))
4,000,002✔
1091
                {
104,307✔
1092
                    return new ValueTask<V>(value);
104,307✔
1093
                }
1094

1095
                K actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
3,895,695✔
1096
                Task<V> task = valueFactory(actualKey, factoryArgument);
3,895,695✔
1097

1098
                return GetOrAddAsyncSlow(actualKey, task);
3,895,695✔
1099
            }
4,000,002✔
1100

1101
            // Since TAlternateKey can be a ref struct, we can't use async/await in the public GetOrAddAsync methods,
1102
            // so we delegate to this private async method after the value factory is invoked.
1103
            private async ValueTask<V> GetOrAddAsyncSlow(K actualKey, Task<V> task)
1104
            {
7,747,758✔
1105
                V value = await task.ConfigureAwait(false);
7,747,758✔
1106

1107
                while (true)
7,747,788✔
1108
                {
7,747,788✔
1109
                    if (this.Lru.TryAdd(actualKey, value))
7,747,788✔
1110
                    {
7,492,188✔
1111
                        return value;
7,492,188✔
1112
                    }
1113

1114
                    // Another thread added a value for this key first, retrieve it.
1115
                    if (this.Lru.TryGet(actualKey, out V? existing))
255,600✔
1116
                    {
255,570✔
1117
                        return existing;
255,570✔
1118
                    }
1119
                }
30✔
1120
            }
7,747,758✔
1121
        }
1122
#endif
1123

1124
        // To get JIT optimizations, policies must be structs.
1125
        // If the structs are returned directly via properties, they will be copied. Since  
1126
        // telemetryPolicy is a mutable struct, copy is bad. One workaround is to store the 
1127
        // state within the struct in an object. Since the struct points to the same object
1128
        // it becomes immutable. However, this object is then somewhere else on the 
1129
        // heap, which slows down the policies with hit counter logic in benchmarks. Likely
1130
        // this approach keeps the structs data members in the same CPU cache line as the LRU.
1131
        // backcompat: remove conditional compile
1132
#if NETCOREAPP3_0_OR_GREATER
1133
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
1134
#else
1135
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Evict = {Evicted}")]
1136
#endif
1137
        private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
1138
        {
1139
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
1140

1141
            public Proxy(ConcurrentLruCore<K, V, I, P, T> lru)
1,773✔
1142
            {
1,773✔
1143
                this.lru = lru;
1,773✔
1144
            }
1,773✔
1145

1146
            public double HitRatio => lru.telemetryPolicy.HitRatio;
10✔
1147

1148
            public long Total => lru.telemetryPolicy.Total;
5✔
1149

1150
            public long Hits => lru.telemetryPolicy.Hits;
30✔
1151

1152
            public long Misses => lru.telemetryPolicy.Misses;
30✔
1153

1154
            public long Evicted => lru.telemetryPolicy.Evicted;
40✔
1155

1156
            // backcompat: remove conditional compile
1157
#if NETCOREAPP3_0_OR_GREATER
1158
            public long Updated => lru.telemetryPolicy.Updated;
10✔
1159
#endif
1160
            public int Capacity => lru.Capacity;
135✔
1161

1162
            public TimeSpan TimeToLive => lru.itemPolicy.TimeToLive;
20✔
1163

1164
            public event EventHandler<ItemRemovedEventArgs<K, V>> ItemRemoved
1165
            {
1166
                add { this.lru.telemetryPolicy.ItemRemoved += value; }
225✔
1167
                remove { this.lru.telemetryPolicy.ItemRemoved -= value; }
30✔
1168
            }
1169

1170
            // backcompat: remove conditional compile
1171
#if NETCOREAPP3_0_OR_GREATER
1172
            public event EventHandler<ItemUpdatedEventArgs<K, V>> ItemUpdated
1173
            {
1174
                add { this.lru.telemetryPolicy.ItemUpdated += value; }
132✔
1175
                remove { this.lru.telemetryPolicy.ItemUpdated -= value; }
15✔
1176
            }
1177
#endif
1178
            public void Trim(int itemCount)
1179
            {
50✔
1180
                lru.Trim(itemCount);
50✔
1181
            }
50✔
1182

1183
            public void TrimExpired()
1184
            {
36✔
1185
                lru.TrimExpired();
36✔
1186
            }
36✔
1187
        }
1188

1189
        private class DiscreteExpiryProxy : IDiscreteTimePolicy
1190
        {
1191
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
1192

1193
            public DiscreteExpiryProxy(ConcurrentLruCore<K, V, I, P, T> lru)
75✔
1194
            {
75✔
1195
                this.lru = lru;
75✔
1196
            }
75✔
1197

1198
            public void TrimExpired()
1199
            {
10✔
1200
                lru.TrimExpired();
10✔
1201
            }
10✔
1202

1203
            public bool TryGetTimeToExpire<TKey>(TKey key, out TimeSpan timeToLive)
1204
            {
15✔
1205
                if (key is K k && lru.dictionary.TryGetValue(k, out var item))
15✔
1206
                {
5✔
1207
                    LongTickCountLruItem<K, V>? tickItem = item as LongTickCountLruItem<K, V>;
5✔
1208
                    timeToLive = (new Duration(tickItem!.TickCount) - Duration.SinceEpoch()).ToTimeSpan();
5✔
1209
                    return true;
5✔
1210
                }
1211

1212
                timeToLive = default;
10✔
1213
                return false;
10✔
1214
            }
15✔
1215
        }
1216
    }
1217
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc