• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

bitfaster / BitFaster.Caching / 25272120698

03 May 2026 06:37AM UTC coverage: 99.156% (-0.05%) from 99.21%
25272120698

Pull #801

github

web-flow
Merge 863cb1091 into 43a882278
Pull Request #801: Update gate workflow actions for Node.js 24

1386 of 1418 branches covered (97.74%)

Branch coverage included in aggregate %.

5895 of 5925 relevant lines covered (99.49%)

87431941.04 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

99.4
/BitFaster.Caching/Lru/ConcurrentLruCore.cs
1
using System;
2
using System.Collections;
3
using System.Collections.Concurrent;
4
using System.Collections.Generic;
5
using System.Diagnostics;
6
using System.Diagnostics.CodeAnalysis;
7
using System.Linq;
8
using System.Runtime.CompilerServices;
9
using System.Threading;
10
using System.Threading.Tasks;
11

12
namespace BitFaster.Caching.Lru
13
{
14
    /// <summary>
15
    /// A pseudo LRU based on the TU-Q eviction policy. The LRU list is composed of 3 segments: hot, warm and cold. 
16
    /// Cost of maintaining segments is amortized across requests. Items are only cycled when capacity is exceeded. 
17
    /// Pure read does not cycle items if all segments are within capacity constraints. There are no global locks. 
18
    /// On cache miss, a new item is added. Tail items in each segment are dequeued, examined, and are either enqueued 
19
    /// or discarded.
20
    /// The TU-Q scheme of hot, warm and cold is similar to that used in MemCached (https://memcached.org/blog/modern-lru/)
21
    /// and OpenBSD (https://flak.tedunangst.com/post/2Q-buffer-cache-algorithm), but does not use a background thread
22
    /// to maintain the internal queues.
23
    /// </summary>
24
    /// <remarks>
25
    /// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
26
    /// <list type="number">
27
    ///   <item><description>New items are added to hot, WasAccessed = false.</description></item>
28
    ///   <item><description>When items are accessed, update WasAccessed = true.</description></item>
29
    ///   <item><description>When items are moved WasAccessed is set to false.</description></item>
30
    ///   <item><description>When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.</description></item>
31
    ///   <item><description>When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.</description></item>
32
    ///   <item><description>When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.</description></item>
33
    ///</list>
34
    /// </remarks>
35
    public class ConcurrentLruCore<K, V, I, P, T> : ICacheExt<K, V>, IAsyncCacheExt<K, V>, IEnumerable<KeyValuePair<K, V>>
36
        where K : notnull
37
        where I : LruItem<K, V>
38
        where P : struct, IItemPolicy<K, V, I>
39
        where T : struct, ITelemetryPolicy<K, V>
40
    {
41
        private readonly ConcurrentDictionary<K, I> dictionary;
42

43
        private readonly ConcurrentQueue<I> hotQueue;
44
        private readonly ConcurrentQueue<I> warmQueue;
45
        private readonly ConcurrentQueue<I> coldQueue;
46

47
        // maintain count outside ConcurrentQueue, since ConcurrentQueue.Count holds a global lock
48
        private PaddedQueueCount counter;
49

50
        private readonly ICapacityPartition capacity;
51

52
        private readonly P itemPolicy;
53
        private bool isWarm = false;
4,200✔
54

55
        /// <summary>
56
        /// The telemetry policy.
57
        /// </summary>
58
        /// <remarks>
59
        /// Since T is a struct, making it readonly will force the runtime to make defensive copies
60
        /// if mutate methods are called. Therefore, field must be mutable to maintain count.
61
        /// </remarks>
62
        protected T telemetryPolicy;
63

64
        /// <summary>
65
        /// Initializes a new instance of the ConcurrentLruCore class with the specified concurrencyLevel, capacity, equality comparer, item policy and telemetry policy.
66
        /// </summary>
67
        /// <param name="concurrencyLevel">The concurrency level.</param>
68
        /// <param name="capacity">The capacity.</param>
69
        /// <param name="comparer">The equality comparer.</param>
70
        /// <param name="itemPolicy">The item policy.</param>
71
        /// <param name="telemetryPolicy">The telemetry policy.</param>
72
        /// <exception cref="ArgumentNullException"></exception>
73
        public ConcurrentLruCore(
4,200✔
74
            int concurrencyLevel,
4,200✔
75
            ICapacityPartition capacity,
4,200✔
76
            IEqualityComparer<K> comparer,
4,200✔
77
            P itemPolicy,
4,200✔
78
            T telemetryPolicy)
4,200✔
79
        {
4,200✔
80
            if (capacity == null)
4,200✔
81
                Throw.ArgNull(ExceptionArgument.capacity);
7✔
82

83
            if (comparer == null)
4,193✔
84
                Throw.ArgNull(ExceptionArgument.comparer);
7✔
85

86
            capacity.Validate();
4,186✔
87
            this.capacity = capacity;
4,179✔
88

89
            this.hotQueue = new ConcurrentQueue<I>();
4,179✔
90
            this.warmQueue = new ConcurrentQueue<I>();
4,179✔
91
            this.coldQueue = new ConcurrentQueue<I>();
4,179✔
92

93
            int dictionaryCapacity = ConcurrentDictionarySize.Estimate(this.Capacity);
4,179✔
94

95
            this.dictionary = new ConcurrentDictionary<K, I>(concurrencyLevel, dictionaryCapacity, comparer);
4,179✔
96
            this.itemPolicy = itemPolicy;
4,165✔
97
            this.telemetryPolicy = telemetryPolicy;
4,165✔
98
            this.telemetryPolicy.SetEventSource(this);
4,165✔
99
        }
4,165✔
100

101
        // No lock count: https://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/
102
        ///<inheritdoc/>
103
        public int Count => this.dictionary.Where(i => !itemPolicy.ShouldDiscard(i.Value)).Count();
24,445,141✔
104

105
        ///<inheritdoc/>
106
        public int Capacity => this.capacity.Hot + this.capacity.Warm + this.capacity.Cold;
2,800,006,338✔
107

108
        ///<inheritdoc/>
109
        public Optional<ICacheMetrics> Metrics => CreateMetrics(this);
231✔
110

111
        ///<inheritdoc/>
112
        public Optional<ICacheEvents<K, V>> Events => CreateEvents(this);
2,624✔
113

114
        ///<inheritdoc/>
115
        public CachePolicy Policy => CreatePolicy(this);
485✔
116

117
        /// <summary>
118
        /// Gets the number of hot items.
119
        /// </summary>
120
        public int HotCount => Volatile.Read(ref this.counter.hot);
2,220,861✔
121

122
        /// <summary>
123
        /// Gets the number of warm items.
124
        /// </summary>
125
        public int WarmCount => Volatile.Read(ref this.counter.warm);
2,220,861✔
126

127
        /// <summary>
128
        /// Gets the number of cold items.
129
        /// </summary>
130
        public int ColdCount => Volatile.Read(ref this.counter.cold);
2,220,868✔
131

132
        /// <summary>
133
        /// Gets a collection containing the keys in the cache.
134
        /// </summary>
135
        public ICollection<K> Keys => this.dictionary.Keys;
1,550✔
136

137
#if NET9_0_OR_GREATER
138
        /// <inheritdoc/>
139
        public IEqualityComparer<K> Comparer => this.dictionary.Comparer;
276✔
140
#endif
141

142
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
143
        /// <returns>An enumerator for the cache.</returns>
144
        /// <remarks>
145
        /// The enumerator returned from the cache is safe to use concurrently with
146
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
147
        /// The contents exposed through the enumerator may contain modifications
148
        /// made after <see cref="GetEnumerator"/> was called.
149
        /// </remarks>
150
        public IEnumerator<KeyValuePair<K, V>> GetEnumerator()
151
        {
441✔
152
            foreach (var kvp in this.dictionary)
2,268✔
153
            {
476✔
154
                if (!itemPolicy.ShouldDiscard(kvp.Value))
476✔
155
                {
455✔
156
                    yield return new KeyValuePair<K, V>(kvp.Key, kvp.Value.Value);
455✔
157
                }
448✔
158
            }
469✔
159
        }
434✔
160

161
        ///<inheritdoc/>
162
        public bool TryGet(K key, [MaybeNullWhen(false)] out V value)
163
        {
3,188,275,752✔
164
            if (dictionary.TryGetValue(key, out var item))
3,188,275,752✔
165
            {
2,831,542,893✔
166
                return GetOrDiscard(item, out value);
2,831,542,893✔
167
            }
168

169
            value = default;
356,732,859✔
170
            this.telemetryPolicy.IncrementMiss();
356,732,859✔
171
            return false;
356,732,859✔
172
        }
3,188,275,752✔
173

174
        // AggressiveInlining forces the JIT to inline policy.ShouldDiscard(). For LRU policy 
175
        // the first branch is completely eliminated due to JIT time constant propogation.
176
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
177
        private bool GetOrDiscard(I item, [MaybeNullWhen(false)] out V value)
178
        {
2,853,942,175✔
179
            if (this.itemPolicy.ShouldDiscard(item))
2,853,942,175✔
180
            {
6,119,711✔
181
                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Evicted);
6,119,711✔
182
                this.telemetryPolicy.IncrementMiss();
6,119,711✔
183
                value = default;
6,119,711✔
184
                return false;
6,119,711✔
185
            }
186

187
            value = item.Value;
2,847,822,464✔
188

189
            this.itemPolicy.Touch(item);
2,847,822,464✔
190
            this.telemetryPolicy.IncrementHit();
2,847,822,464✔
191
            return true;
2,847,822,464✔
192
        }
2,853,942,175✔
193

194
        private bool TryAdd(K key, V value)
195
        {
477,433,008✔
196
            var newItem = this.itemPolicy.CreateItem(key, value);
477,433,008✔
197

198
            if (this.dictionary.TryAdd(key, newItem))
477,433,008✔
199
            {
465,264,478✔
200
                this.hotQueue.Enqueue(newItem);
465,264,478✔
201
                Cycle(Interlocked.Increment(ref counter.hot));
465,264,478✔
202
                return true;
465,264,478✔
203
            }
204

205
            Disposer<V>.Dispose(newItem.Value);
12,168,530✔
206
            return false;
12,168,530✔
207
        }
477,433,008✔
208

209
        ///<inheritdoc/>
210
        public V GetOrAdd(K key, Func<K, V> valueFactory)
211
        {
3,059,470,318✔
212
            while (true)
3,067,840,659✔
213
            {
3,067,840,659✔
214
                if (this.TryGet(key, out var value))
3,067,840,659✔
215
                {
2,813,587,355✔
216
                    return value;
2,813,587,355✔
217
                }
218

219
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
220
                value = valueFactory(key);
254,253,304✔
221

222
                if (TryAdd(key, value))
254,253,304✔
223
                {
245,882,963✔
224
                    return value;
245,882,963✔
225
                }
226
            }
8,370,341✔
227
        }
3,059,470,318✔
228

229
        /// <summary>
230
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
231
        /// existing value if the key already exists.
232
        /// </summary>
233
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
234
        /// <param name="key">The key of the element to add.</param>
235
        /// <param name="valueFactory">The factory function used to generate a value for the key.</param>
236
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
237
        /// <returns>The value for the key. This will be either the existing value for the key if the key is already 
238
        /// in the cache, or the new value if the key was not in the cache.</returns>
239
        public V GetOrAdd<TArg>(K key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
240
#if NET9_0_OR_GREATER
241
            where TArg : allows ref struct
242
#endif
243
        {
54,932,157✔
244
            while (true)
55,398,391✔
245
            {
55,398,391✔
246
                if (this.TryGet(key, out var value))
55,398,391✔
247
                {
1,628,519✔
248
                    return value;
1,628,519✔
249
                }
250

251
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
252
                value = valueFactory(key, factoryArgument);
53,769,872✔
253

254
                if (TryAdd(key, value))
53,769,872✔
255
                {
53,303,638✔
256
                    return value;
53,303,638✔
257
                }
258
            }
466,234✔
259
        }
54,932,157✔
260

261
        ///<inheritdoc/>
262
        public async ValueTask<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
263
        {
28,000,544✔
264
            while (true)
28,476,965✔
265
            {
28,476,965✔
266
                if (this.TryGet(key, out var value))
28,476,965✔
267
                {
1,385,293✔
268
                    return value;
1,385,293✔
269
                }
270

271
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
272
                // This is identical logic in ConcurrentDictionary.GetOrAdd method.
273
                value = await valueFactory(key).ConfigureAwait(false);
27,091,672✔
274

275
                if (TryAdd(key, value))
27,091,651✔
276
                {
26,615,230✔
277
                    return value;
26,615,230✔
278
                }
279
            }
476,421✔
280
        }
28,000,523✔
281

282
        /// <summary>
283
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
284
        /// existing value if the key already exists.
285
        /// </summary>
286
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
287
        /// <param name="key">The key of the element to add.</param>
288
        /// <param name="valueFactory">The factory function used to asynchronously generate a value for the key.</param>
289
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
290
        /// <returns>A task that represents the asynchronous GetOrAdd operation.</returns>
291
        public async ValueTask<V> GetOrAddAsync<TArg>(K key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
292
        {
28,000,687✔
293
            while (true)
28,419,209✔
294
            {
28,419,209✔
295
                if (this.TryGet(key, out var value))
28,419,209✔
296
                {
1,319,335✔
297
                    return value;
1,319,335✔
298
                }
299

300
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
301
                value = await valueFactory(key, factoryArgument).ConfigureAwait(false);
27,099,874✔
302

303
                if (TryAdd(key, value))
27,099,874✔
304
                {
26,681,352✔
305
                    return value;
26,681,352✔
306
                }
307
            }
418,522✔
308
        }
28,000,687✔
309

310
        /// <summary>
311
        /// Attempts to remove the specified key value pair.
312
        /// </summary>
313
        /// <param name="item">The item to remove.</param>
314
        /// <returns>true if the item was removed successfully; otherwise, false.</returns>
315
        public bool TryRemove(KeyValuePair<K, V> item)
316
        {
34,991,941✔
317
            if (this.dictionary.TryGetValue(item.Key, out var existing))
34,991,941✔
318
            {
5,055,374✔
319
                lock (existing)
5,055,374✔
320
                {
5,055,374✔
321
                    if (EqualityComparer<V>.Default.Equals(existing.Value, item.Value))
5,055,374✔
322
                    {
316,446✔
323
                        var kvp = new KeyValuePair<K, I>(item.Key, existing);
316,446✔
324
#if NET6_0_OR_GREATER
325
                    if (this.dictionary.TryRemove(kvp))
263,301✔
326
#else
327
                        // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
328
                        if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
53,145✔
329
#endif
330
                        {
303,359✔
331
                            OnRemove(item.Key, kvp.Value, ItemRemovedReason.Removed);
303,359✔
332
                            return true;
303,359✔
333
                        }
334
                    }
13,087✔
335
                }
4,752,015✔
336

337
                // it existed, but we couldn't remove - this means value was replaced afer the TryGetValue (a race)
338
            }
4,752,015✔
339

340
            return false;
34,688,582✔
341
        }
34,991,941✔
342

343
        /// <summary>
344
        /// Attempts to remove and return the value that has the specified key.
345
        /// </summary>
346
        /// <param name="key">The key of the element to remove.</param>
347
        /// <param name="value">When this method returns, contains the object removed, or the default value of the value type if key does not exist.</param>
348
        /// <returns>true if the object was removed successfully; otherwise, false.</returns>
349
        public bool TryRemove(K key, [MaybeNullWhen(false)] out V value)
350
        {
28,000,418✔
351
            if (this.dictionary.TryRemove(key, out var item))
28,000,418✔
352
            {
53,848✔
353
                OnRemove(key, item, ItemRemovedReason.Removed);
53,848✔
354
                value = item.Value;
53,848✔
355
                return true;
53,848✔
356
            }
357

358
            value = default;
27,946,570✔
359
            return false;
27,946,570✔
360
        }
28,000,418✔
361

362
        ///<inheritdoc/>
363
        public bool TryRemove(K key)
364
        {
28,000,308✔
365
            return TryRemove(key, out _);
28,000,308✔
366
        }
28,000,308✔
367

368
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
369
        private void OnRemove(K key, I item, ItemRemovedReason reason)
370
        {
491,657,370✔
371
            // Mark as not accessed, it will later be cycled out of the queues because it can never be fetched 
372
            // from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled 
373
            // from the queue.
374
            item.WasAccessed = false;
491,657,370✔
375
            item.WasRemoved = true;
491,657,370✔
376

377
            this.telemetryPolicy.OnItemRemoved(key, item.Value, reason);
491,657,370✔
378

379
            // serialize dispose (common case dispose not thread safe)
380
            lock (item)
491,657,370✔
381
            {
491,657,370✔
382
                Disposer<V>.Dispose(item.Value);
491,657,370✔
383
            }
491,657,370✔
384
        }
491,657,370✔
385

386
        ///<inheritdoc/>
387
        ///<remarks>Note: Calling this method does not affect LRU order.</remarks>
388
        public bool TryUpdate(K key, V value)
389
        {
88,317,219✔
390
            if (this.dictionary.TryGetValue(key, out var existing))
88,317,219✔
391
            {
7,287,367✔
392
                return this.TryUpdateValue(existing, value);
7,287,367✔
393
            }
394

395
            return false;
81,029,852✔
396
        }
88,317,219✔
397

398
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
399
        private bool TryUpdateValue(I existing, V value)
400
        {
7,287,409✔
401
            lock (existing)
7,287,409✔
402
            {
7,287,409✔
403
                if (!existing.WasRemoved)
7,287,409✔
404
                {
7,043,055✔
405
                    V oldValue = existing.Value;
7,043,055✔
406

407
                    existing.Value = value;
7,043,055✔
408

409
                    this.itemPolicy.Update(existing);
7,043,055✔
410
                    // backcompat: remove conditional compile
411
#if NETCOREAPP3_0_OR_GREATER
412
                    this.telemetryPolicy.OnItemUpdated(existing.Key, oldValue, existing.Value);
7,043,055✔
413
#endif
414
                    Disposer<V>.Dispose(oldValue);
7,043,055✔
415

416
                    return true;
7,043,055✔
417
                }
418
            }
244,354✔
419

420
            return false;
244,354✔
421
        }
7,287,409✔
422

423
        ///<inheritdoc/>
424
        ///<remarks>Note: Updates to existing items do not affect LRU order. Added items are at the top of the LRU.</remarks>
425
        public void AddOrUpdate(K key, V value)
426
        {
32,136,169✔
427
            while (true)
32,317,044✔
428
            {
32,317,044✔
429
                // first, try to update
430
                if (this.TryUpdate(key, value))
32,317,044✔
431
                {
5,694,419✔
432
                    return;
5,694,419✔
433
                }
434

435
                // then try add
436
                var newItem = this.itemPolicy.CreateItem(key, value);
26,622,625✔
437

438
                if (this.dictionary.TryAdd(key, newItem))
26,622,625✔
439
                {
26,441,750✔
440
                    this.hotQueue.Enqueue(newItem);
26,441,750✔
441
                    Cycle(Interlocked.Increment(ref counter.hot));
26,441,750✔
442
                    return;
26,441,750✔
443
                }
444

445
                // if both update and add failed there was a race, try again
446
            }
180,875✔
447
        }
32,136,169✔
448

449
        ///<inheritdoc/>
450
        public void Clear()
451
        {
2,187,724✔
452
            // don't overlap Clear/Trim/TrimExpired
453
            lock (this.dictionary)
2,187,724✔
454
            {
2,187,724✔
455
                // evaluate queue count, remove everything including items removed from the dictionary but
456
                // not the queues. This also avoids the expensive o(n) no lock count, or locking the dictionary.
457
                int queueCount = this.HotCount + this.WarmCount + this.ColdCount;
2,187,724✔
458
                this.TrimLiveItems(itemsRemoved: 0, queueCount, ItemRemovedReason.Cleared);
2,187,724✔
459
            }
2,187,724✔
460
        }
2,187,724✔
461

462
        /// <summary>
463
        /// Trim the specified number of items from the cache. Removes all discardable items per IItemPolicy.ShouldDiscard(), then 
464
        /// itemCount-discarded items in LRU order, if any.
465
        /// </summary>
466
        /// <param name="itemCount">The number of items to remove.</param>
467
        /// <returns>The number of items removed from the cache.</returns>
468
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is less than 0./</exception>
469
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is greater than capacity./</exception>
470
        /// <remarks>
471
        /// Note: Trim affects LRU order. Calling Trim resets the internal accessed status of items.
472
        /// </remarks>
473
        public void Trim(int itemCount)
474
        {
427✔
475
            int capacity = this.Capacity;
427✔
476

477
            if (itemCount < 1 || itemCount > capacity)
427✔
478
                Throw.ArgOutOfRange(nameof(itemCount), "itemCount must be greater than or equal to one, and less than the capacity of the cache.");
14✔
479

480
            // clamp itemCount to number of items actually in the cache
481
            itemCount = Math.Min(itemCount, this.HotCount + this.WarmCount + this.ColdCount);
413✔
482

483
            // don't overlap Clear/Trim/TrimExpired
484
            lock (this.dictionary)
413✔
485
            {
413✔
486
                // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items.
487
                int itemsRemoved = TrimAllDiscardedItems();
413✔
488

489
                TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed);
413✔
490
            }
413✔
491
        }
413✔
492

493
        private void TrimExpired()
494
        {
65✔
495
            if (this.itemPolicy.CanDiscard())
65✔
496
            {
65✔
497
                lock (this.dictionary)
65✔
498
                {
65✔
499
                    this.TrimAllDiscardedItems();
65✔
500
                }
65✔
501
            }
65✔
502
        }
65✔
503

504
        /// <summary>
505
        /// Trim discarded items from all queues.
506
        /// </summary>
507
        /// <returns>The number of items removed.</returns>
508
        // backcompat: make internal
509
        protected int TrimAllDiscardedItems()
510
        {
478✔
511
            // don't overlap Clear/Trim/TrimExpired
512
            lock (this.dictionary)
478✔
513
            {
478✔
514
                int RemoveDiscardableItems(ConcurrentQueue<I> q, ref int queueCounter)
515
                {
1,434✔
516
                    int itemsRemoved = 0;
1,434✔
517
                    int localCount = queueCounter;
1,434✔
518

519
                    for (int i = 0; i < localCount; i++)
8,740✔
520
                    {
2,936✔
521
                        if (q.TryDequeue(out var item))
2,936✔
522
                        {
2,936✔
523
                            if (this.itemPolicy.ShouldDiscard(item))
2,936✔
524
                            {
409✔
525
                                Interlocked.Decrement(ref queueCounter);
409✔
526
                                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Trimmed);
409✔
527
                                itemsRemoved++;
409✔
528
                            }
409✔
529
                            else if (item.WasRemoved)
2,527✔
530
                            {
42✔
531
                                Interlocked.Decrement(ref queueCounter);
42✔
532
                            }
42✔
533
                            else
534
                            {
2,485✔
535
                                q.Enqueue(item);
2,485✔
536
                            }
2,485✔
537
                        }
2,936✔
538
                    }
2,936✔
539

540
                    return itemsRemoved;
1,434✔
541
                }
1,434✔
542

543
                int coldRem = RemoveDiscardableItems(coldQueue, ref this.counter.cold);
478✔
544
                int warmRem = RemoveDiscardableItems(warmQueue, ref this.counter.warm);
478✔
545
                int hotRem = RemoveDiscardableItems(hotQueue, ref this.counter.hot);
478✔
546

547
                if (warmRem > 0)
478✔
548
                {
58✔
549
                    Volatile.Write(ref this.isWarm, false);
58✔
550
                }
58✔
551

552
                return coldRem + warmRem + hotRem;
478✔
553
            }
554
        }
478✔
555

556
        private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason)
557
        {
2,188,137✔
558
            // When items are touched, they are moved to warm by cycling. Therefore, to guarantee 
559
            // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times.
560
            // If clear is called during trimming, it would be possible to get stuck in an infinite
561
            // loop here. The warm + hot limit also guards against this case.
562
            int trimWarmAttempts = 0;
2,188,137✔
563
            int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot;
2,188,137✔
564

565
            while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts)
22,224,426✔
566
            {
20,036,289✔
567
                if (Volatile.Read(ref this.counter.cold) > 0)
20,036,289✔
568
                {
15,585,492✔
569
                    if (TryRemoveCold(reason) == (ItemDestination.Remove, 0))
15,585,492✔
570
                    {
15,584,672✔
571
                        itemsRemoved++;
15,584,672✔
572
                        trimWarmAttempts = 0;
15,584,672✔
573
                    }
15,584,672✔
574
                    else
575
                    {
820✔
576
                        TrimWarmOrHot(reason);
820✔
577
                    }
820✔
578
                }
15,585,492✔
579
                else
580
                {
4,450,797✔
581
                    TrimWarmOrHot(reason);
4,450,797✔
582
                    trimWarmAttempts++;
4,450,797✔
583
                }
4,450,797✔
584
            }
20,036,289✔
585

586
            if (Volatile.Read(ref this.counter.warm) < this.capacity.Warm)
2,188,137✔
587
            {
1,203,340✔
588
                Volatile.Write(ref this.isWarm, false);
1,203,340✔
589
            }
1,203,340✔
590
        }
2,188,137✔
591

592
        private void TrimWarmOrHot(ItemRemovedReason reason)
593
        {
4,451,617✔
594
            if (Volatile.Read(ref this.counter.warm) > 0)
4,451,617✔
595
            {
1,487,423✔
596
                CycleWarmUnchecked(reason);
1,487,423✔
597
            }
1,487,423✔
598
            else if (Volatile.Read(ref this.counter.hot) > 0)
2,964,194✔
599
            {
2,963,507✔
600
                CycleHotUnchecked(reason);
2,963,507✔
601
            }
2,963,507✔
602
        }
4,451,617✔
603

604
        private void Cycle(int hotCount)
605
        {
491,706,228✔
606
            if (isWarm)
491,706,228✔
607
            {
486,572,238✔
608
                (var dest, var count) = CycleHot(hotCount);
486,572,238✔
609

610
                int cycles = 0;
486,572,238✔
611
                while (cycles++ < 3 && dest != ItemDestination.Remove)
1,053,818,134✔
612
                {
567,245,896✔
613
                    if (dest == ItemDestination.Warm)
567,245,896✔
614
                    {
78,911,886✔
615
                        (dest, count) = CycleWarm(count);
78,911,886✔
616
                    }
78,911,886✔
617
                    else if (dest == ItemDestination.Cold)
488,334,010✔
618
                    {
488,334,010✔
619
                        (dest, count) = CycleCold(count);
488,334,010✔
620
                    }
488,334,010✔
621
                }
567,245,896✔
622

623
                // If nothing was removed yet, constrain the size of warm and cold by discarding the coldest item.
624
                if (dest != ItemDestination.Remove)
486,572,238✔
625
                {
13,864,769✔
626
                    if (dest == ItemDestination.Warm && count > this.capacity.Warm)
13,864,769✔
627
                    {
8,227,396✔
628
                        count = LastWarmToCold();
8,227,396✔
629
                    }
8,227,396✔
630

631
                    ConstrainCold(count, ItemRemovedReason.Evicted);
13,864,769✔
632
                }
13,864,769✔
633
            }
486,572,238✔
634
            else
635
            {
5,133,990✔
636
                // fill up the warm queue with new items until warm is full.
637
                // else during warmup the cache will only use the hot + cold queues until any item is requested twice.
638
                CycleDuringWarmup(hotCount);
5,133,990✔
639
            }
5,133,990✔
640
        }
491,706,228✔
641

642
        [MethodImpl(MethodImplOptions.NoInlining)]
643
        private void CycleDuringWarmup(int hotCount)
644
        {
5,133,990✔
645
            // do nothing until hot is full
646
            if (hotCount > this.capacity.Hot)
5,133,990✔
647
            {
2,160,800✔
648
                Interlocked.Decrement(ref this.counter.hot);
2,160,800✔
649

650
                if (this.hotQueue.TryDequeue(out var item))
2,160,800✔
651
                {
2,160,792✔
652
                    // special case: removed during warmup
653
                    if (item.WasRemoved)
2,160,792✔
654
                    {
244,420✔
655
                        return;
244,420✔
656
                    }
657

658
                    int count = this.Move(item, ItemDestination.Warm, ItemRemovedReason.Evicted);
1,916,372✔
659

660
                    // if warm is now full, overflow to cold and mark as warm
661
                    if (count > this.capacity.Warm)
1,916,372✔
662
                    {
441,537✔
663
                        Volatile.Write(ref this.isWarm, true);
441,537✔
664
                        count = LastWarmToCold();
441,537✔
665
                        ConstrainCold(count, ItemRemovedReason.Evicted);
441,537✔
666
                    }
441,537✔
667
                }
1,916,372✔
668
                else
669
                {
8✔
670
                    Interlocked.Increment(ref this.counter.hot);
8✔
671
                }
8✔
672
            }
1,916,380✔
673
        }
5,133,990✔
674

675
        private (ItemDestination, int) CycleHot(int hotCount)
676
        {
486,572,238✔
677
            if (hotCount > this.capacity.Hot)
486,572,238✔
678
            {
486,569,236✔
679
                return CycleHotUnchecked(ItemRemovedReason.Evicted);
486,569,236✔
680
            }
681

682
            return (ItemDestination.Remove, 0);
3,002✔
683
        }
486,572,238✔
684

685
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
686
        private (ItemDestination, int) CycleHotUnchecked(ItemRemovedReason removedReason)
687
        {
489,532,743✔
688
            Interlocked.Decrement(ref this.counter.hot);
489,532,743✔
689

690
            if (this.hotQueue.TryDequeue(out var item))
489,532,743✔
691
            {
489,532,730✔
692
                var where = this.itemPolicy.RouteHot(item);
489,532,730✔
693
                return (where, this.Move(item, where, removedReason));
489,532,730✔
694
            }
695
            else
696
            {
13✔
697
                Interlocked.Increment(ref this.counter.hot);
13✔
698
                return (ItemDestination.Remove, 0);
13✔
699
            }
700
        }
489,532,743✔
701

702
        private (ItemDestination, int) CycleWarm(int count)
703
        {
78,911,886✔
704
            if (count > this.capacity.Warm)
78,911,886✔
705
            {
78,878,322✔
706
                return CycleWarmUnchecked(ItemRemovedReason.Evicted);
78,878,322✔
707
            }
708

709
            return (ItemDestination.Remove, 0);
33,564✔
710
        }
78,911,886✔
711

712
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
713
        private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
714
        {
80,365,745✔
715
            int wc = Interlocked.Decrement(ref this.counter.warm);
80,365,745✔
716

717
            if (this.warmQueue.TryDequeue(out var item))
80,365,745!
718
            {
80,365,745✔
719
                if (item.WasRemoved)
80,365,745✔
720
                {
1,147,420✔
721
                    return (ItemDestination.Remove, 0);
1,147,420✔
722
                }
723

724
                var where = this.itemPolicy.RouteWarm(item);
79,218,325✔
725

726
                // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
727
                // This only happens when hit rate is high, in which case we can consider all items relatively equal in
728
                // terms of which was least recently used.
729
                if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
79,218,325✔
730
                {
19,103,187✔
731
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
19,103,187✔
732
                }
733
                else
734
                {
60,115,138✔
735
                    return (ItemDestination.Cold, this.Move(item, ItemDestination.Cold, removedReason));
60,115,138✔
736
                }
737
            }
738
            else
739
            {
×
740
                Interlocked.Increment(ref this.counter.warm);
×
741
                return (ItemDestination.Remove, 0);
×
742
            }
743
        }
80,365,745✔
744

745
        private (ItemDestination, int) CycleCold(int count)
746
        {
488,334,010✔
747
            if (count > this.capacity.Cold)
488,334,010✔
748
            {
477,471,750✔
749
                return TryRemoveCold(ItemRemovedReason.Evicted);
477,471,750✔
750
            }
751

752
            return (ItemDestination.Remove, 0);
10,862,260✔
753
        }
488,334,010✔
754

755
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
756
        private (ItemDestination, int) TryRemoveCold(ItemRemovedReason removedReason)
757
        {
493,057,242✔
758
            Interlocked.Decrement(ref this.counter.cold);
493,057,242✔
759

760
            if (this.coldQueue.TryDequeue(out var item))
493,057,242✔
761
            {
493,056,839✔
762
                var where = this.itemPolicy.RouteCold(item);
493,056,839✔
763

764
                if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
493,056,839✔
765
                {
17,933,541✔
766
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
17,933,541✔
767
                }
768
                else
769
                {
475,123,298✔
770
                    this.Move(item, ItemDestination.Remove, removedReason);
475,123,298✔
771
                    return (ItemDestination.Remove, 0);
475,123,298✔
772
                }
773
            }
774
            else
775
            {
403✔
776
                return (ItemDestination.Cold, Interlocked.Increment(ref this.counter.cold));
403✔
777
            }
778
        }
493,057,242✔
779

780
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
781
        private int LastWarmToCold()
782
        {
8,668,933✔
783
            Interlocked.Decrement(ref this.counter.warm);
8,668,933✔
784

785
            if (this.warmQueue.TryDequeue(out var item))
8,668,933✔
786
            {
8,668,924✔
787
                var destination = item.WasRemoved ? ItemDestination.Remove : ItemDestination.Cold;
8,668,924✔
788
                return this.Move(item, destination, ItemRemovedReason.Evicted);
8,668,924✔
789
            }
790
            else
791
            {
9✔
792
                Interlocked.Increment(ref this.counter.warm);
9✔
793
                return 0;
9✔
794
            }
795
        }
8,668,933✔
796

797
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
798
        private void ConstrainCold(int coldCount, ItemRemovedReason removedReason)
799
        {
14,306,306✔
800
            if (coldCount > this.capacity.Cold && this.coldQueue.TryDequeue(out var item))
14,306,306✔
801
            {
13,841,028✔
802
                Interlocked.Decrement(ref this.counter.cold);
13,841,028✔
803
                this.Move(item, ItemDestination.Remove, removedReason);
13,841,028✔
804
            }
13,841,028✔
805
        }
14,306,306✔
806

807
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
808
        private int Move(I item, ItemDestination where, ItemRemovedReason removedReason)
809
        {
1,092,354,338✔
810
            item.WasAccessed = false;
1,092,354,338✔
811

812
            switch (where)
1,092,354,338✔
813
            {
814
                case ItemDestination.Warm:
815
                    this.warmQueue.Enqueue(item);
89,061,882✔
816
                    return Interlocked.Increment(ref this.counter.warm);
89,061,882✔
817
                case ItemDestination.Cold:
818
                    this.coldQueue.Enqueue(item);
506,908,208✔
819
                    return Interlocked.Increment(ref this.counter.cold);
506,908,208✔
820
                case ItemDestination.Remove:
821

822
                    var kvp = new KeyValuePair<K, I>(item.Key, item);
496,384,248✔
823

824
#if NET6_0_OR_GREATER
825
                    if (this.dictionary.TryRemove(kvp))
442,150,430✔
826
#else
827
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
828
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
54,233,818✔
829
#endif
830
                    {
491,275,004✔
831
                        OnRemove(item.Key, item, removedReason);
491,275,004✔
832
                    }
491,275,004✔
833
                    break;
496,384,248✔
834
            }
835

836
            return 0;
496,384,248✔
837
        }
1,092,354,338✔
838

839
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
840
        /// <returns>An enumerator for the cache.</returns>
841
        /// <remarks>
842
        /// The enumerator returned from the cache is safe to use concurrently with
843
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
844
        /// The contents exposed through the enumerator may contain modifications
845
        /// made after <see cref="GetEnumerator"/> was called.
846
        /// </remarks>
847
        IEnumerator IEnumerable.GetEnumerator()
848
        {
21✔
849
            return ((ConcurrentLruCore<K, V, I, P, T>)this).GetEnumerator();
21✔
850
        }
21✔
851

852
#if DEBUG
853
        /// <summary>
854
        /// Format the LRU as a string by converting all the keys to strings.
855
        /// </summary>
856
        /// <returns>The LRU formatted as a string.</returns>
857
        internal string FormatLruString()
858
        {
105✔
859
            var sb = new System.Text.StringBuilder();
105✔
860

861
            sb.Append("Hot [");
105✔
862
            sb.Append(string.Join(",", this.hotQueue.Select(n => n.Key.ToString())));
392✔
863
            sb.Append("] Warm [");
105✔
864
            sb.Append(string.Join(",", this.warmQueue.Select(n => n.Key.ToString())));
350✔
865
            sb.Append("] Cold [");
105✔
866
            sb.Append(string.Join(",", this.coldQueue.Select(n => n.Key.ToString())));
315✔
867
            sb.Append(']');
105✔
868

869
            return sb.ToString();
105✔
870
        }
105✔
871
#endif
872

873
        private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
874
        {
485✔
875
            var p = new Proxy(lru);
485✔
876

877
            if (typeof(P) == typeof(AfterAccessPolicy<K, V>))
485✔
878
            {
77✔
879
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), new Optional<ITimePolicy>(p), Optional<IDiscreteTimePolicy>.None());
77✔
880
            }
881

882
            // IsAssignableFrom is a jit intrinsic https://github.com/dotnet/runtime/issues/4920
883
            if (typeof(IDiscreteItemPolicy<K, V>).IsAssignableFrom(typeof(P)))
408✔
884
            {
105✔
885
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), Optional<ITimePolicy>.None(), new Optional<IDiscreteTimePolicy>(new DiscreteExpiryProxy(lru)));
105✔
886
            }
887

888
            return new CachePolicy(new Optional<IBoundedPolicy>(p), lru.itemPolicy.CanDiscard() ? new Optional<ITimePolicy>(p) : Optional<ITimePolicy>.None());
303✔
889
        }
485✔
890

891
        private static Optional<ICacheMetrics> CreateMetrics(ConcurrentLruCore<K, V, I, P, T> lru)
892
        {
231✔
893
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
231✔
894
            {
35✔
895
                return Optional<ICacheMetrics>.None();
35✔
896
            }
897

898
            return new(new Proxy(lru));
196✔
899
        }
231✔
900

901
        private static Optional<ICacheEvents<K, V>> CreateEvents(ConcurrentLruCore<K, V, I, P, T> lru)
902
        {
2,624✔
903
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
2,624✔
904
            {
91✔
905
                return Optional<ICacheEvents<K, V>>.None();
91✔
906
            }
907

908
            return new(new Proxy(lru));
2,533✔
909
        }
2,624✔
910

911
#if NET9_0_OR_GREATER
912
        ///<inheritdoc/>
913
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
914
        public IAlternateLookup<TAlternateKey, K, V> GetAlternateLookup<TAlternateKey>()
915
            where TAlternateKey : notnull, allows ref struct
916
        {
267✔
917
            if (!this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
267✔
918
            {
18✔
919
                Throw.IncompatibleComparer();
18✔
920
            }
921

922
            return new AlternateLookup<TAlternateKey>(this);
249✔
923
        }
249✔
924

925
        ///<inheritdoc/>
926
        public bool TryGetAlternateLookup<TAlternateKey>([MaybeNullWhen(false)] out IAlternateLookup<TAlternateKey, K, V> lookup)
927
            where TAlternateKey : notnull, allows ref struct
928
        {
36✔
929
            if (this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
36✔
930
            {
18✔
931
                lookup = new AlternateLookup<TAlternateKey>(this);
18✔
932
                return true;
18✔
933
            }
934

935
            lookup = default;
18✔
936
            return false;
18✔
937
        }
36✔
938

939
        ///<inheritdoc/>
940
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
941
        public IAsyncAlternateLookup<TAlternateKey, K, V> GetAsyncAlternateLookup<TAlternateKey>()
942
            where TAlternateKey : notnull, allows ref struct
943
        {
51✔
944
            if (!this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
51✔
945
            {
6✔
946
                Throw.IncompatibleComparer();
6✔
947
            }
948

949
            return new AlternateLookup<TAlternateKey>(this);
45✔
950
        }
45✔
951

952
        ///<inheritdoc/>
953
        public bool TryGetAsyncAlternateLookup<TAlternateKey>([MaybeNullWhen(false)] out IAsyncAlternateLookup<TAlternateKey, K, V> lookup)
954
            where TAlternateKey : notnull, allows ref struct
955
        {
12✔
956
            if (this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
12✔
957
            {
6✔
958
                lookup = new AlternateLookup<TAlternateKey>(this);
6✔
959
                return true;
6✔
960
            }
961

962
            lookup = default;
6✔
963
            return false;
6✔
964
        }
12✔
965

966
        internal readonly struct AlternateLookup<TAlternateKey> : IAlternateLookup<TAlternateKey, K, V>, IAsyncAlternateLookup<TAlternateKey, K, V>
967
            where TAlternateKey : notnull, allows ref struct
968
        {
969
            internal AlternateLookup(ConcurrentLruCore<K, V, I, P, T> lru)
970
            {
318✔
971
                Debug.Assert(lru is not null);
318✔
972
                Debug.Assert(lru.dictionary.IsCompatibleKey<TAlternateKey, K, I>());
318✔
973
                this.Lru = lru;
318✔
974
                this.Alternate = lru.dictionary.GetAlternateLookup<TAlternateKey>();
318✔
975
                this.Comparer = lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>();
318✔
976
            }
318✔
977

978
            internal ConcurrentLruCore<K, V, I, P, T> Lru { get; }
253,300,006✔
979

980
            internal ConcurrentDictionary<K, I>.AlternateLookup<TAlternateKey> Alternate { get; }
149,617,691✔
981

982
            internal IAlternateEqualityComparer<TAlternateKey, K> Comparer { get; }
115,218,277✔
983

984
            public bool TryGet(TAlternateKey key, [MaybeNullWhen(false)] out V value)
985
            {
137,617,550✔
986
                if (this.Alternate.TryGetValue(key, out var item))
137,617,550✔
987
                {
22,399,282✔
988
                    return this.Lru.GetOrDiscard(item, out value);
22,399,282✔
989
                }
990

991
                value = default;
115,218,268✔
992
                this.Lru.telemetryPolicy.IncrementMiss();
115,218,268✔
993
                return false;
115,218,268✔
994
            }
137,617,550✔
995

996
            public bool TryRemove(TAlternateKey key, [MaybeNullWhen(false)] out K actualKey, [MaybeNullWhen(false)] out V value)
997
            {
12,000,051✔
998
                if (this.Alternate.TryRemove(key, out actualKey, out var item))
12,000,051✔
999
                {
25,159✔
1000
                    this.Lru.OnRemove(actualKey, item, ItemRemovedReason.Removed);
25,159✔
1001
                    value = item.Value;
25,159✔
1002
                    return true;
25,159✔
1003
                }
1004

1005
                actualKey = default;
11,974,892✔
1006
                value = default;
11,974,892✔
1007
                return false;
11,974,892✔
1008
            }
12,000,051✔
1009

1010
            public bool TryUpdate(TAlternateKey key, V value)
1011
            {
90✔
1012
                if (this.Alternate.TryGetValue(key, out var existing))
90✔
1013
                {
42✔
1014
                    return this.Lru.TryUpdateValue(existing, value);
42✔
1015
                }
1016

1017
                return false;
48✔
1018
            }
90✔
1019

1020
            public void AddOrUpdate(TAlternateKey key, V value)
1021
            {
48✔
1022
                K actualKey = default!;
48✔
1023
                bool hasActualKey = false;
48✔
1024

1025
                while (true)
48✔
1026
                {
48✔
1027
                    if (this.TryUpdate(key, value))
48✔
1028
                    {
21✔
1029
                        return;
21✔
1030
                    }
1031

1032
                    if (!hasActualKey)
27✔
1033
                    {
27✔
1034
                        actualKey = this.Comparer.Create(key);
27✔
1035
                        hasActualKey = true;
27✔
1036
                    }
27✔
1037

1038
                    if (this.Lru.TryAdd(actualKey, value))
27!
1039
                    {
27✔
1040
                        return;
27✔
1041
                    }
1042
                }
1043
            }
48✔
1044

1045
            public V GetOrAdd(TAlternateKey key, Func<K, V> valueFactory)
1046
            {
85,936,351✔
1047
                while (true)
87,660,587✔
1048
                {
87,660,587✔
1049
                    if (this.TryGet(key, out var value))
87,660,587✔
1050
                    {
20,966,151✔
1051
                        return value;
20,966,151✔
1052
                    }
1053

1054
                    K actualKey = this.Comparer.Create(key);
66,694,436✔
1055

1056
                    value = valueFactory(actualKey);
66,694,436✔
1057
                    if (this.Lru.TryAdd(actualKey, value))
66,694,436✔
1058
                    {
64,970,200✔
1059
                        return value;
64,970,200✔
1060
                    }
1061
                }
1,724,236✔
1062
            }
85,936,351✔
1063

1064
            public V GetOrAdd<TArg>(TAlternateKey key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
1065
                where TArg : allows ref struct
1066
            {
25,683,021✔
1067
                while (true)
25,956,849✔
1068
                {
25,956,849✔
1069
                    if (this.TryGet(key, out var value))
25,956,849✔
1070
                    {
799,899✔
1071
                        return value;
799,899✔
1072
                    }
1073

1074
                    K actualKey = this.Comparer.Create(key);
25,156,950✔
1075

1076
                    value = valueFactory(actualKey, factoryArgument);
25,156,950✔
1077
                    if (this.Lru.TryAdd(actualKey, value))
25,156,950✔
1078
                    {
24,883,122✔
1079
                        return value;
24,883,122✔
1080
                    }
1081
                }
273,828✔
1082
            }
25,683,021✔
1083

1084
            public ValueTask<V> GetOrAddAsync(TAlternateKey key, Func<K, Task<V>> valueFactory)
1085
            {
12,000,006✔
1086
                if (this.TryGet(key, out var value))
12,000,006✔
1087
                {
302,746✔
1088
                    return new ValueTask<V>(value);
302,746✔
1089
                }
1090

1091
                K actualKey = this.Comparer.Create(key);
11,697,260✔
1092
                Task<V> task = valueFactory(actualKey);
11,697,260✔
1093

1094
                return GetOrAddAsyncSlow(actualKey, task);
11,697,260✔
1095
            }
12,000,006✔
1096

1097
            public ValueTask<V> GetOrAddAsync<TArg>(TAlternateKey key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
1098
            {
12,000,006✔
1099
                if (this.TryGet(key, out var value))
12,000,006✔
1100
                {
330,402✔
1101
                    return new ValueTask<V>(value);
330,402✔
1102
                }
1103

1104
                K actualKey = this.Comparer.Create(key);
11,669,604✔
1105
                Task<V> task = valueFactory(actualKey, factoryArgument);
11,669,604✔
1106

1107
                return GetOrAddAsyncSlow(actualKey, task);
11,669,604✔
1108
            }
12,000,006✔
1109

1110
            // Since TAlternateKey can be a ref struct, we can't use async/await in the public GetOrAddAsync methods,
1111
            // so we delegate to this private async method after the value factory is invoked.
1112
            private async ValueTask<V> GetOrAddAsyncSlow(K actualKey, Task<V> task)
1113
            {
23,366,864✔
1114
                V value = await task.ConfigureAwait(false);
23,366,864✔
1115

1116
                while (true)
23,366,894✔
1117
                {
23,366,894✔
1118
                    if (this.Lru.TryAdd(actualKey, value))
23,366,894✔
1119
                    {
22,927,946✔
1120
                        return value;
22,927,946✔
1121
                    }
1122

1123
                    // Another thread added a value for this key first, retrieve it.
1124
                    if (this.Lru.TryGet(actualKey, out V? existing))
438,948✔
1125
                    {
438,918✔
1126
                        return existing;
438,918✔
1127
                    }
1128
                }
30✔
1129
            }
23,366,864✔
1130
        }
1131
#endif
1132

1133
        // To get JIT optimizations, policies must be structs.
1134
        // If the structs are returned directly via properties, they will be copied. Since  
1135
        // telemetryPolicy is a mutable struct, copy is bad. One workaround is to store the 
1136
        // state within the struct in an object. Since the struct points to the same object
1137
        // it becomes immutable. However, this object is then somewhere else on the 
1138
        // heap, which slows down the policies with hit counter logic in benchmarks. Likely
1139
        // this approach keeps the structs data members in the same CPU cache line as the LRU.
1140
        // backcompat: remove conditional compile
1141
#if NETCOREAPP3_0_OR_GREATER
1142
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
1143
#else
1144
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Evict = {Evicted}")]
1145
#endif
1146
        private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
1147
        {
1148
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
1149

1150
            public Proxy(ConcurrentLruCore<K, V, I, P, T> lru)
3,214✔
1151
            {
3,214✔
1152
                this.lru = lru;
3,214✔
1153
            }
3,214✔
1154

1155
            public double HitRatio => lru.telemetryPolicy.HitRatio;
14✔
1156

1157
            public long Total => lru.telemetryPolicy.Total;
7✔
1158

1159
            public long Hits => lru.telemetryPolicy.Hits;
42✔
1160

1161
            public long Misses => lru.telemetryPolicy.Misses;
42✔
1162

1163
            public long Evicted => lru.telemetryPolicy.Evicted;
56✔
1164

1165
            // backcompat: remove conditional compile
1166
#if NETCOREAPP3_0_OR_GREATER
1167
            public long Updated => lru.telemetryPolicy.Updated;
14✔
1168
#endif
1169
            public int Capacity => lru.Capacity;
189✔
1170

1171
            public TimeSpan TimeToLive => lru.itemPolicy.TimeToLive;
28✔
1172

1173
            public event EventHandler<ItemRemovedEventArgs<K, V>> ItemRemoved
1174
            {
1175
                add { this.lru.telemetryPolicy.ItemRemoved += value; }
315✔
1176
                remove { this.lru.telemetryPolicy.ItemRemoved -= value; }
42✔
1177
            }
1178

1179
            // backcompat: remove conditional compile
1180
#if NETCOREAPP3_0_OR_GREATER
1181
            public event EventHandler<ItemUpdatedEventArgs<K, V>> ItemUpdated
1182
            {
1183
                add { this.lru.telemetryPolicy.ItemUpdated += value; }
186✔
1184
                remove { this.lru.telemetryPolicy.ItemUpdated -= value; }
21✔
1185
            }
1186
#endif
1187
            public void Trim(int itemCount)
1188
            {
70✔
1189
                lru.Trim(itemCount);
70✔
1190
            }
70✔
1191

1192
            public void TrimExpired()
1193
            {
51✔
1194
                lru.TrimExpired();
51✔
1195
            }
51✔
1196
        }
1197

1198
        private class DiscreteExpiryProxy : IDiscreteTimePolicy
1199
        {
1200
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
1201

1202
            public DiscreteExpiryProxy(ConcurrentLruCore<K, V, I, P, T> lru)
105✔
1203
            {
105✔
1204
                this.lru = lru;
105✔
1205
            }
105✔
1206

1207
            public void TrimExpired()
1208
            {
14✔
1209
                lru.TrimExpired();
14✔
1210
            }
14✔
1211

1212
            public bool TryGetTimeToExpire<TKey>(TKey key, out TimeSpan timeToLive)
1213
            {
21✔
1214
                if (key is K k && lru.dictionary.TryGetValue(k, out var item))
21✔
1215
                {
7✔
1216
                    LongTickCountLruItem<K, V>? tickItem = item as LongTickCountLruItem<K, V>;
7✔
1217
                    timeToLive = (new Duration(tickItem!.TickCount) - Duration.SinceEpoch()).ToTimeSpan();
7✔
1218
                    return true;
7✔
1219
                }
1220

1221
                timeToLive = default;
14✔
1222
                return false;
14✔
1223
            }
21✔
1224
        }
1225
    }
1226
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc