• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

bitfaster / BitFaster.Caching / 23520908738

25 Mar 2026 01:45AM UTC coverage: 98.975% (-0.02%) from 98.99%
23520908738

Pull #739

github

web-flow
Merge 023c08509 into fc6739c4d
Pull Request #739: Extract net9 alternate-key cache support for ConcurrentLru

1156 of 1188 branches covered (97.31%)

Branch coverage included in aggregate %.

113 of 113 new or added lines in 3 files covered. (100.0%)

3 existing lines in 1 file now uncovered.

4930 of 4961 relevant lines covered (99.38%)

62637104.65 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.57
/BitFaster.Caching/Lru/ConcurrentLruCore.cs
1
using System;
2
using System.Collections;
3
using System.Collections.Concurrent;
4
using System.Collections.Generic;
5
using System.Diagnostics;
6
using System.Diagnostics.CodeAnalysis;
7
using System.Linq;
8
using System.Runtime.CompilerServices;
9
using System.Threading;
10
using System.Threading.Tasks;
11

12
namespace BitFaster.Caching.Lru
13
{
14
    /// <summary>
15
    /// A pseudo LRU based on the TU-Q eviction policy. The LRU list is composed of 3 segments: hot, warm and cold. 
16
    /// Cost of maintaining segments is amortized across requests. Items are only cycled when capacity is exceeded. 
17
    /// Pure read does not cycle items if all segments are within capacity constraints. There are no global locks. 
18
    /// On cache miss, a new item is added. Tail items in each segment are dequeued, examined, and are either enqueued 
19
    /// or discarded.
20
    /// The TU-Q scheme of hot, warm and cold is similar to that used in MemCached (https://memcached.org/blog/modern-lru/)
21
    /// and OpenBSD (https://flak.tedunangst.com/post/2Q-buffer-cache-algorithm), but does not use a background thread
22
    /// to maintain the internal queues.
23
    /// </summary>
24
    /// <remarks>
25
    /// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
26
    /// <list type="number">
27
    ///   <item><description>New items are added to hot, WasAccessed = false.</description></item>
28
    ///   <item><description>When items are accessed, update WasAccessed = true.</description></item>
29
    ///   <item><description>When items are moved WasAccessed is set to false.</description></item>
30
    ///   <item><description>When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.</description></item>
31
    ///   <item><description>When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.</description></item>
32
    ///   <item><description>When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.</description></item>
33
    ///</list>
34
    /// </remarks>
35
    public class ConcurrentLruCore<K, V, I, P, T> : ICacheExt<K, V>, IAsyncCacheExt<K, V>, IEnumerable<KeyValuePair<K, V>>
36
        where K : notnull
37
        where I : LruItem<K, V>
38
        where P : struct, IItemPolicy<K, V, I>
39
        where T : struct, ITelemetryPolicy<K, V>
40
    {
41
        private readonly ConcurrentDictionary<K, I> dictionary;
42

43
        private readonly ConcurrentQueue<I> hotQueue;
44
        private readonly ConcurrentQueue<I> warmQueue;
45
        private readonly ConcurrentQueue<I> coldQueue;
46

47
        // maintain count outside ConcurrentQueue, since ConcurrentQueue.Count holds a global lock
48
        private PaddedQueueCount counter;
49

50
        private readonly ICapacityPartition capacity;
51

52
        private readonly P itemPolicy;
53
        private bool isWarm = false;
2,551✔
54

55
        /// <summary>
56
        /// The telemetry policy.
57
        /// </summary>
58
        /// <remarks>
59
        /// Since T is a struct, making it readonly will force the runtime to make defensive copies
60
        /// if mutate methods are called. Therefore, field must be mutable to maintain count.
61
        /// </remarks>
62
        protected T telemetryPolicy;
63

64
        /// <summary>
65
        /// Initializes a new instance of the ConcurrentLruCore class with the specified concurrencyLevel, capacity, equality comparer, item policy and telemetry policy.
66
        /// </summary>
67
        /// <param name="concurrencyLevel">The concurrency level.</param>
68
        /// <param name="capacity">The capacity.</param>
69
        /// <param name="comparer">The equality comparer.</param>
70
        /// <param name="itemPolicy">The item policy.</param>
71
        /// <param name="telemetryPolicy">The telemetry policy.</param>
72
        /// <exception cref="ArgumentNullException"></exception>
73
        public ConcurrentLruCore(
2,551✔
74
            int concurrencyLevel,
2,551✔
75
            ICapacityPartition capacity,
2,551✔
76
            IEqualityComparer<K> comparer,
2,551✔
77
            P itemPolicy,
2,551✔
78
            T telemetryPolicy)
2,551✔
79
        {
2,551✔
80
            if (capacity == null)
2,551✔
81
                Throw.ArgNull(ExceptionArgument.capacity);
5✔
82

83
            if (comparer == null)
2,546✔
84
                Throw.ArgNull(ExceptionArgument.comparer);
5✔
85

86
            capacity.Validate();
2,541✔
87
            this.capacity = capacity;
2,536✔
88

89
            this.hotQueue = new ConcurrentQueue<I>();
2,536✔
90
            this.warmQueue = new ConcurrentQueue<I>();
2,536✔
91
            this.coldQueue = new ConcurrentQueue<I>();
2,536✔
92

93
            int dictionaryCapacity = ConcurrentDictionarySize.Estimate(this.Capacity);
2,536✔
94

95
            this.dictionary = new ConcurrentDictionary<K, I>(concurrencyLevel, dictionaryCapacity, comparer);
2,536✔
96
            this.itemPolicy = itemPolicy;
2,526✔
97
            this.telemetryPolicy = telemetryPolicy;
2,526✔
98
            this.telemetryPolicy.SetEventSource(this);
2,526✔
99
        }
2,526✔
100

101
        // No lock count: https://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/
102
        ///<inheritdoc/>
103
        public int Count => this.dictionary.Where(i => !itemPolicy.ShouldDiscard(i.Value)).Count();
17,458,660✔
104

105
        ///<inheritdoc/>
106
        public int Capacity => this.capacity.Hot + this.capacity.Warm + this.capacity.Cold;
2,000,003,971✔
107

108
        ///<inheritdoc/>
109
        public Optional<ICacheMetrics> Metrics => CreateMetrics(this);
165✔
110

111
        ///<inheritdoc/>
112
        public Optional<ICacheEvents<K, V>> Events => CreateEvents(this);
1,308✔
113

114
        ///<inheritdoc/>
115
        public CachePolicy Policy => CreatePolicy(this);
345✔
116

117
        /// <summary>
118
        /// Gets the number of hot items.
119
        /// </summary>
120
        public int HotCount => Volatile.Read(ref this.counter.hot);
1,586,115✔
121

122
        /// <summary>
123
        /// Gets the number of warm items.
124
        /// </summary>
125
        public int WarmCount => Volatile.Read(ref this.counter.warm);
1,586,115✔
126

127
        /// <summary>
128
        /// Gets the number of cold items.
129
        /// </summary>
130
        public int ColdCount => Volatile.Read(ref this.counter.cold);
1,586,120✔
131

132
        /// <summary>
133
        /// Gets a collection containing the keys in the cache.
134
        /// </summary>
135
        public ICollection<K> Keys => this.dictionary.Keys;
1,000✔
136

137
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
138
        /// <returns>An enumerator for the cache.</returns>
139
        /// <remarks>
140
        /// The enumerator returned from the cache is safe to use concurrently with
141
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
142
        /// The contents exposed through the enumerator may contain modifications
143
        /// made after <see cref="GetEnumerator"/> was called.
144
        /// </remarks>
145
        public IEnumerator<KeyValuePair<K, V>> GetEnumerator()
146
        {
315✔
147
            foreach (var kvp in this.dictionary)
1,620✔
148
            {
340✔
149
                if (!itemPolicy.ShouldDiscard(kvp.Value))
340✔
150
                {
325✔
151
                    yield return new KeyValuePair<K, V>(kvp.Key, kvp.Value.Value);
325✔
152
                }
320✔
153
            }
335✔
154
        }
310✔
155

156
        ///<inheritdoc/>
157
        public bool TryGet(K key, [MaybeNullWhen(false)] out V value)
158
        {
2,243,280,994✔
159
            if (dictionary.TryGetValue(key, out var item))
2,243,280,994✔
160
            {
2,014,467,785✔
161
                return GetOrDiscard(item, out value);
2,014,467,785✔
162
            }
163

164
            value = default;
228,813,209✔
165
            this.telemetryPolicy.IncrementMiss();
228,813,209✔
166
            return false;
228,813,209✔
167
        }
2,243,280,994✔
168

169
        // AggressiveInlining forces the JIT to inline policy.ShouldDiscard(). For LRU policy 
170
        // the first branch is completely eliminated due to JIT time constant propogation.
171
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
172
        private bool GetOrDiscard(I item, [MaybeNullWhen(false)] out V value)
173
        {
2,014,467,789✔
174
            if (this.itemPolicy.ShouldDiscard(item))
2,014,467,789✔
175
            {
4,310,552✔
176
                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Evicted);
4,310,552✔
177
                this.telemetryPolicy.IncrementMiss();
4,310,552✔
178
                value = default;
4,310,552✔
179
                return false;
4,310,552✔
180
            }
181

182
            value = item.Value;
2,010,157,237✔
183

184
            this.itemPolicy.Touch(item);
2,010,157,237✔
185
            this.telemetryPolicy.IncrementHit();
2,010,157,237✔
186
            return true;
2,010,157,237✔
187
        }
2,014,467,789✔
188

189
        private bool TryAdd(K key, V value)
190
        {
233,122,979✔
191
            var newItem = this.itemPolicy.CreateItem(key, value);
233,122,979✔
192

193
            if (this.dictionary.TryAdd(key, newItem))
233,122,979✔
194
            {
226,645,542✔
195
                this.hotQueue.Enqueue(newItem);
226,645,542✔
196
                Cycle(Interlocked.Increment(ref counter.hot));
226,645,542✔
197
                return true;
226,645,542✔
198
            }
199

200
            Disposer<V>.Dispose(newItem.Value);
6,477,437✔
201
            return false;
6,477,437✔
202
        }
233,122,979✔
203

204
        ///<inheritdoc/>
205
        public V GetOrAdd(K key, Func<K, V> valueFactory)
206
        {
2,171,301,721✔
207
            while (true)
2,176,904,132✔
208
            {
2,176,904,132✔
209
                if (this.TryGet(key, out var value))
2,176,904,132✔
210
                {
2,002,091,427✔
211
                    return value;
2,002,091,427✔
212
                }
213

214
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
215
                value = valueFactory(key);
174,812,705✔
216

217
                if (TryAdd(key, value))
174,812,705✔
218
                {
169,210,294✔
219
                    return value;
169,210,294✔
220
                }
221
            }
5,602,411✔
222
        }
2,171,301,721✔
223

224
        /// <summary>
225
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
226
        /// existing value if the key already exists.
227
        /// </summary>
228
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
229
        /// <param name="key">The key of the element to add.</param>
230
        /// <param name="valueFactory">The factory function used to generate a value for the key.</param>
231
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
232
        /// <returns>The value for the key. This will be either the existing value for the key if the key is already 
233
        /// in the cache, or the new value if the key was not in the cache.</returns>
234
        public V GetOrAdd<TArg>(K key, Func<K, TArg, V> valueFactory, TArg factoryArgument)
235
        {
20,000,015✔
236
            while (true)
20,320,065✔
237
            {
20,320,065✔
238
                if (this.TryGet(key, out var value))
20,320,065✔
239
                {
1,011,108✔
240
                    return value;
1,011,108✔
241
                }
242

243
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
244
                value = valueFactory(key, factoryArgument);
19,308,957✔
245

246
                if (TryAdd(key, value))
19,308,957✔
247
                {
18,988,907✔
248
                    return value;
18,988,907✔
249
                }
250
            }
320,050✔
251
        }
20,000,015✔
252

253
        ///<inheritdoc/>
254
        public async ValueTask<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
255
        {
20,000,380✔
256
            while (true)
20,303,923✔
257
            {
20,303,923✔
258
                if (this.TryGet(key, out var value))
20,303,923✔
259
                {
836,347✔
260
                    return value;
836,347✔
261
                }
262

263
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
264
                // This is identical logic in ConcurrentDictionary.GetOrAdd method.
265
                value = await valueFactory(key).ConfigureAwait(false);
19,467,576✔
266

267
                if (TryAdd(key, value))
19,467,561✔
268
                {
19,164,018✔
269
                    return value;
19,164,018✔
270
                }
271
            }
303,543✔
272
        }
20,000,365✔
273

274
        /// <summary>
275
        /// Adds a key/value pair to the cache if the key does not already exist. Returns the new value, or the 
276
        /// existing value if the key already exists.
277
        /// </summary>
278
        /// <typeparam name="TArg">The type of an argument to pass into valueFactory.</typeparam>
279
        /// <param name="key">The key of the element to add.</param>
280
        /// <param name="valueFactory">The factory function used to asynchronously generate a value for the key.</param>
281
        /// <param name="factoryArgument">An argument value to pass into valueFactory.</param>
282
        /// <returns>A task that represents the asynchronous GetOrAdd operation.</returns>
283
        public async ValueTask<V> GetOrAddAsync<TArg>(K key, Func<K, TArg, Task<V>> valueFactory, TArg factoryArgument)
284
        {
20,000,345✔
285
            while (true)
20,251,778✔
286
            {
20,251,778✔
287
                if (this.TryGet(key, out var value))
20,251,778✔
288
                {
718,024✔
289
                    return value;
718,024✔
290
                }
291

292
                // The value factory may be called concurrently for the same key, but the first write to the dictionary wins.
293
                value = await valueFactory(key, factoryArgument).ConfigureAwait(false);
19,533,754✔
294

295
                if (TryAdd(key, value))
19,533,754✔
296
                {
19,282,321✔
297
                    return value;
19,282,321✔
298
                }
299
            }
251,433✔
300
        }
20,000,345✔
301

302
        /// <summary>
303
        /// Attempts to remove the specified key value pair.
304
        /// </summary>
305
        /// <param name="item">The item to remove.</param>
306
        /// <returns>true if the item was removed successfully; otherwise, false.</returns>
307
        public bool TryRemove(KeyValuePair<K, V> item)
308
        {
23,677,747✔
309
            if (this.dictionary.TryGetValue(item.Key, out var existing))
23,677,747✔
310
            {
2,585,700✔
311
                lock (existing)
2,585,700✔
312
                {
2,585,700✔
313
                    if (EqualityComparer<V>.Default.Equals(existing.Value, item.Value))
2,585,700✔
314
                    {
230,666✔
315
                        var kvp = new KeyValuePair<K, I>(item.Key, existing);
230,666✔
316
#if NET6_0_OR_GREATER
317
                    if (this.dictionary.TryRemove(kvp))
165,922✔
318
#else
319
                        // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
320
                        if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
64,744✔
321
#endif
322
                        {
224,755✔
323
                            OnRemove(item.Key, kvp.Value, ItemRemovedReason.Removed);
224,755✔
324
                            return true;
224,755✔
325
                        }
326
                    }
5,911✔
327
                }
2,360,945✔
328

329
                // it existed, but we couldn't remove - this means value was replaced afer the TryGetValue (a race)
330
            }
2,360,945✔
331

332
            return false;
23,452,992✔
333
        }
23,677,747✔
334

335
        /// <summary>
336
        /// Attempts to remove and return the value that has the specified key.
337
        /// </summary>
338
        /// <param name="key">The key of the element to remove.</param>
339
        /// <param name="value">When this method returns, contains the object removed, or the default value of the value type if key does not exist.</param>
340
        /// <returns>true if the object was removed successfully; otherwise, false.</returns>
341
        public bool TryRemove(K key, [MaybeNullWhen(false)] out V value)
342
        {
20,000,298✔
343
            if (this.dictionary.TryRemove(key, out var item))
20,000,298✔
344
            {
34,376✔
345
                OnRemove(key, item, ItemRemovedReason.Removed);
34,376✔
346
                value = item.Value;
34,376✔
347
                return true;
34,376✔
348
            }
349

350
            value = default;
19,965,922✔
351
            return false;
19,965,922✔
352
        }
20,000,298✔
353

354
        ///<inheritdoc/>
355
        public bool TryRemove(K key)
356
        {
20,000,220✔
357
            return TryRemove(key, out _);
20,000,220✔
358
        }
20,000,220✔
359

360
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
361
        private void OnRemove(K key, I item, ItemRemovedReason reason)
362
        {
245,415,603✔
363
            // Mark as not accessed, it will later be cycled out of the queues because it can never be fetched 
364
            // from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled 
365
            // from the queue.
366
            item.WasAccessed = false;
245,415,603✔
367
            item.WasRemoved = true;
245,415,603✔
368

369
            this.telemetryPolicy.OnItemRemoved(key, item.Value, reason);
245,415,603✔
370

371
            // serialize dispose (common case dispose not thread safe)
372
            lock (item)
245,415,603✔
373
            {
245,415,603✔
374
                Disposer<V>.Dispose(item.Value);
245,415,603✔
375
            }
245,415,603✔
376
        }
245,415,603✔
377

378
        ///<inheritdoc/>
379
        ///<remarks>Note: Calling this method does not affect LRU order.</remarks>
380
        public bool TryUpdate(K key, V value)
381
        {
63,453,570✔
382
            if (this.dictionary.TryGetValue(key, out var existing))
63,453,570✔
383
            {
5,506,863✔
384
                return this.TryUpdateValue(existing, value);
5,506,863✔
385
            }
386

387
            return false;
57,946,707✔
388
        }
63,453,570✔
389

390
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
391
        private bool TryUpdateValue(I existing, V value)
392
        {
5,506,865✔
393
            lock (existing)
5,506,865✔
394
            {
5,506,865✔
395
                if (!existing.WasRemoved)
5,506,865✔
396
                {
5,315,527✔
397
                    V oldValue = existing.Value;
5,315,527✔
398

399
                    existing.Value = value;
5,315,527✔
400

401
                    this.itemPolicy.Update(existing);
5,315,527✔
402
                    // backcompat: remove conditional compile
403
#if NETCOREAPP3_0_OR_GREATER
404
                    this.telemetryPolicy.OnItemUpdated(existing.Key, oldValue, existing.Value);
5,315,527✔
405
#endif
406
                    Disposer<V>.Dispose(oldValue);
5,315,527✔
407

408
                    return true;
5,315,527✔
409
                }
410
            }
191,338✔
411

412
            return false;
191,338✔
413
        }
5,506,865✔
414

415
        ///<inheritdoc/>
416
        ///<remarks>Note: Updates to existing items do not affect LRU order. Added items are at the top of the LRU.</remarks>
417
        public void AddOrUpdate(K key, V value)
418
        {
23,316,504✔
419
            while (true)
23,453,444✔
420
            {
23,453,444✔
421
                // first, try to update
422
                if (this.TryUpdate(key, value))
23,453,444✔
423
                {
4,512,705✔
424
                    return;
4,512,705✔
425
                }
426

427
                // then try add
428
                var newItem = this.itemPolicy.CreateItem(key, value);
18,940,739✔
429

430
                if (this.dictionary.TryAdd(key, newItem))
18,940,739✔
431
                {
18,803,799✔
432
                    this.hotQueue.Enqueue(newItem);
18,803,799✔
433
                    Cycle(Interlocked.Increment(ref counter.hot));
18,803,799✔
434
                    return;
18,803,799✔
435
                }
436

437
                // if both update and add failed there was a race, try again
438
            }
136,940✔
439
        }
23,316,504✔
440

441
        ///<inheritdoc/>
442
        public void Clear()
443
        {
1,562,660✔
444
            // don't overlap Clear/Trim/TrimExpired
445
            lock (this.dictionary)
1,562,660✔
446
            {
1,562,660✔
447
                // evaluate queue count, remove everything including items removed from the dictionary but
448
                // not the queues. This also avoids the expensive o(n) no lock count, or locking the dictionary.
449
                int queueCount = this.HotCount + this.WarmCount + this.ColdCount;
1,562,660✔
450
                this.TrimLiveItems(itemsRemoved: 0, queueCount, ItemRemovedReason.Cleared);
1,562,660✔
451
            }
1,562,660✔
452
        }
1,562,660✔
453

454
        /// <summary>
455
        /// Trim the specified number of items from the cache. Removes all discardable items per IItemPolicy.ShouldDiscard(), then 
456
        /// itemCount-discarded items in LRU order, if any.
457
        /// </summary>
458
        /// <param name="itemCount">The number of items to remove.</param>
459
        /// <returns>The number of items removed from the cache.</returns>
460
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is less than 0./</exception>
461
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="itemCount"/> is greater than capacity./</exception>
462
        /// <remarks>
463
        /// Note: Trim affects LRU order. Calling Trim resets the internal accessed status of items.
464
        /// </remarks>
465
        public void Trim(int itemCount)
466
        {
305✔
467
            int capacity = this.Capacity;
305✔
468

469
            if (itemCount < 1 || itemCount > capacity)
305✔
470
                Throw.ArgOutOfRange(nameof(itemCount), "itemCount must be greater than or equal to one, and less than the capacity of the cache.");
10✔
471

472
            // clamp itemCount to number of items actually in the cache
473
            itemCount = Math.Min(itemCount, this.HotCount + this.WarmCount + this.ColdCount);
295✔
474

475
            // don't overlap Clear/Trim/TrimExpired
476
            lock (this.dictionary)
295✔
477
            {
295✔
478
                // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items.
479
                int itemsRemoved = TrimAllDiscardedItems();
295✔
480

481
                TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed);
295✔
482
            }
295✔
483
        }
295✔
484

485
        private void TrimExpired()
486
        {
45✔
487
            if (this.itemPolicy.CanDiscard())
45✔
488
            {
45✔
489
                lock (this.dictionary)
45✔
490
                {
45✔
491
                    this.TrimAllDiscardedItems();
45✔
492
                }
45✔
493
            }
45✔
494
        }
45✔
495

496
        /// <summary>
497
        /// Trim discarded items from all queues.
498
        /// </summary>
499
        /// <returns>The number of items removed.</returns>
500
        // backcompat: make internal
501
        protected int TrimAllDiscardedItems()
502
        {
340✔
503
            // don't overlap Clear/Trim/TrimExpired
504
            lock (this.dictionary)
340✔
505
            {
340✔
506
                int RemoveDiscardableItems(ConcurrentQueue<I> q, ref int queueCounter)
507
                {
1,020✔
508
                    int itemsRemoved = 0;
1,020✔
509
                    int localCount = queueCounter;
1,020✔
510

511
                    for (int i = 0; i < localCount; i++)
6,220✔
512
                    {
2,090✔
513
                        if (q.TryDequeue(out var item))
2,090✔
514
                        {
2,090✔
515
                            if (this.itemPolicy.ShouldDiscard(item))
2,090✔
516
                            {
285✔
517
                                Interlocked.Decrement(ref queueCounter);
285✔
518
                                this.Move(item, ItemDestination.Remove, ItemRemovedReason.Trimmed);
285✔
519
                                itemsRemoved++;
285✔
520
                            }
285✔
521
                            else if (item.WasRemoved)
1,805✔
522
                            {
30✔
523
                                Interlocked.Decrement(ref queueCounter);
30✔
524
                            }
30✔
525
                            else
526
                            {
1,775✔
527
                                q.Enqueue(item);
1,775✔
528
                            }
1,775✔
529
                        }
2,090✔
530
                    }
2,090✔
531

532
                    return itemsRemoved;
1,020✔
533
                }
1,020✔
534

535
                int coldRem = RemoveDiscardableItems(coldQueue, ref this.counter.cold);
340✔
536
                int warmRem = RemoveDiscardableItems(warmQueue, ref this.counter.warm);
340✔
537
                int hotRem = RemoveDiscardableItems(hotQueue, ref this.counter.hot);
340✔
538

539
                if (warmRem > 0)
340✔
540
                {
40✔
541
                    Volatile.Write(ref this.isWarm, false);
40✔
542
                }
40✔
543

544
                return coldRem + warmRem + hotRem;
340✔
545
            }
546
        }
340✔
547

548
        private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason)
549
        {
1,562,955✔
550
            // When items are touched, they are moved to warm by cycling. Therefore, to guarantee 
551
            // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times.
552
            // If clear is called during trimming, it would be possible to get stuck in an infinite
553
            // loop here. The warm + hot limit also guards against this case.
554
            int trimWarmAttempts = 0;
1,562,955✔
555
            int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot;
1,562,955✔
556

557
            while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts)
15,835,837✔
558
            {
14,272,882✔
559
                if (Volatile.Read(ref this.counter.cold) > 0)
14,272,882✔
560
                {
11,063,175✔
561
                    if (TryRemoveCold(reason) == (ItemDestination.Remove, 0))
11,063,175✔
562
                    {
11,062,516✔
563
                        itemsRemoved++;
11,062,516✔
564
                        trimWarmAttempts = 0;
11,062,516✔
565
                    }
11,062,516✔
566
                    else
567
                    {
659✔
568
                        TrimWarmOrHot(reason);
659✔
569
                    }
659✔
570
                }
11,063,175✔
571
                else
572
                {
3,209,707✔
573
                    TrimWarmOrHot(reason);
3,209,707✔
574
                    trimWarmAttempts++;
3,209,707✔
575
                }
3,209,707✔
576
            }
14,272,882✔
577

578
            if (Volatile.Read(ref this.counter.warm) < this.capacity.Warm)
1,562,955✔
579
            {
845,693✔
580
                Volatile.Write(ref this.isWarm, false);
845,693✔
581
            }
845,693✔
582
        }
1,562,955✔
583

584
        private void TrimWarmOrHot(ItemRemovedReason reason)
585
        {
3,210,366✔
586
            if (Volatile.Read(ref this.counter.warm) > 0)
3,210,366✔
587
            {
1,051,874✔
588
                CycleWarmUnchecked(reason);
1,051,874✔
589
            }
1,051,874✔
590
            else if (Volatile.Read(ref this.counter.hot) > 0)
2,158,492✔
591
            {
2,157,611✔
592
                CycleHotUnchecked(reason);
2,157,611✔
593
            }
2,157,611✔
594
        }
3,210,366✔
595

596
        private void Cycle(int hotCount)
597
        {
245,449,341✔
598
            if (isWarm)
245,449,341✔
599
            {
241,789,679✔
600
                (var dest, var count) = CycleHot(hotCount);
241,789,679✔
601

602
                int cycles = 0;
241,789,679✔
603
                while (cycles++ < 3 && dest != ItemDestination.Remove)
516,621,356✔
604
                {
274,831,677✔
605
                    if (dest == ItemDestination.Warm)
274,831,677✔
606
                    {
36,607,317✔
607
                        (dest, count) = CycleWarm(count);
36,607,317✔
608
                    }
36,607,317✔
609
                    else if (dest == ItemDestination.Cold)
238,224,360✔
610
                    {
238,224,360✔
611
                        (dest, count) = CycleCold(count);
238,224,360✔
612
                    }
238,224,360✔
613
                }
274,831,677✔
614

615
                // If nothing was removed yet, constrain the size of warm and cold by discarding the coldest item.
616
                if (dest != ItemDestination.Remove)
241,789,679✔
617
                {
5,844,041✔
618
                    if (dest == ItemDestination.Warm && count > this.capacity.Warm)
5,844,041✔
619
                    {
3,575,444✔
620
                        count = LastWarmToCold();
3,575,444✔
621
                    }
3,575,444✔
622

623
                    ConstrainCold(count, ItemRemovedReason.Evicted);
5,844,041✔
624
                }
5,844,041✔
625
            }
241,789,679✔
626
            else
627
            {
3,659,662✔
628
                // fill up the warm queue with new items until warm is full.
629
                // else during warmup the cache will only use the hot + cold queues until any item is requested twice.
630
                CycleDuringWarmup(hotCount);
3,659,662✔
631
            }
3,659,662✔
632
        }
245,449,341✔
633

634
        [MethodImpl(MethodImplOptions.NoInlining)]
635
        private void CycleDuringWarmup(int hotCount)
636
        {
3,659,662✔
637
            // do nothing until hot is full
638
            if (hotCount > this.capacity.Hot)
3,659,662✔
639
            {
1,498,151✔
640
                Interlocked.Decrement(ref this.counter.hot);
1,498,151✔
641

642
                if (this.hotQueue.TryDequeue(out var item))
1,498,151✔
643
                {
1,498,138✔
644
                    // special case: removed during warmup
645
                    if (item.WasRemoved)
1,498,138✔
646
                    {
191,395✔
647
                        return;
191,395✔
648
                    }
649

650
                    int count = this.Move(item, ItemDestination.Warm, ItemRemovedReason.Evicted);
1,306,743✔
651

652
                    // if warm is now full, overflow to cold and mark as warm
653
                    if (count > this.capacity.Warm)
1,306,743✔
654
                    {
255,060✔
655
                        Volatile.Write(ref this.isWarm, true);
255,060✔
656
                        count = LastWarmToCold();
255,060✔
657
                        ConstrainCold(count, ItemRemovedReason.Evicted);
255,060✔
658
                    }
255,060✔
659
                }
1,306,743✔
660
                else
661
                {
13✔
662
                    Interlocked.Increment(ref this.counter.hot);
13✔
663
                }
13✔
664
            }
1,306,756✔
665
        }
3,659,662✔
666

667
        private (ItemDestination, int) CycleHot(int hotCount)
668
        {
241,789,679✔
669
            if (hotCount > this.capacity.Hot)
241,789,679✔
670
            {
241,784,816✔
671
                return CycleHotUnchecked(ItemRemovedReason.Evicted);
241,784,816✔
672
            }
673

674
            return (ItemDestination.Remove, 0);
4,863✔
675
        }
241,789,679✔
676

677
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
678
        private (ItemDestination, int) CycleHotUnchecked(ItemRemovedReason removedReason)
679
        {
243,942,427✔
680
            Interlocked.Decrement(ref this.counter.hot);
243,942,427✔
681

682
            if (this.hotQueue.TryDequeue(out var item))
243,942,427✔
683
            {
243,942,410✔
684
                var where = this.itemPolicy.RouteHot(item);
243,942,410✔
685
                return (where, this.Move(item, where, removedReason));
243,942,410✔
686
            }
687
            else
688
            {
17✔
689
                Interlocked.Increment(ref this.counter.hot);
17✔
690
                return (ItemDestination.Remove, 0);
17✔
691
            }
692
        }
243,942,427✔
693

694
        private (ItemDestination, int) CycleWarm(int count)
695
        {
36,607,317✔
696
            if (count > this.capacity.Warm)
36,607,317✔
697
            {
36,593,230✔
698
                return CycleWarmUnchecked(ItemRemovedReason.Evicted);
36,593,230✔
699
            }
700

701
            return (ItemDestination.Remove, 0);
14,087✔
702
        }
36,607,317✔
703

704
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
705
        private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
706
        {
37,645,104✔
707
            int wc = Interlocked.Decrement(ref this.counter.warm);
37,645,104✔
708

709
            if (this.warmQueue.TryDequeue(out var item))
37,645,104!
710
            {
37,645,104✔
711
                if (item.WasRemoved)
37,645,104✔
712
                {
918,950✔
713
                    return (ItemDestination.Remove, 0);
918,950✔
714
                }
715

716
                var where = this.itemPolicy.RouteWarm(item);
36,726,154✔
717

718
                // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
719
                // This only happens when hit rate is high, in which case we can consider all items relatively equal in
720
                // terms of which was least recently used.
721
                if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
36,726,154✔
722
                {
10,665,187✔
723
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
10,665,187✔
724
                }
725
                else
726
                {
26,060,967✔
727
                    return (ItemDestination.Cold, this.Move(item, ItemDestination.Cold, removedReason));
26,060,967✔
728
                }
729
            }
730
            else
UNCOV
731
            {
×
UNCOV
732
                Interlocked.Increment(ref this.counter.warm);
×
UNCOV
733
                return (ItemDestination.Remove, 0);
×
734
            }
735
        }
37,645,104✔
736

737
        private (ItemDestination, int) CycleCold(int count)
738
        {
238,224,360✔
739
            if (count > this.capacity.Cold)
238,224,360✔
740
            {
230,558,082✔
741
                return TryRemoveCold(ItemRemovedReason.Evicted);
230,558,082✔
742
            }
743

744
            return (ItemDestination.Remove, 0);
7,666,278✔
745
        }
238,224,360✔
746

747
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
748
        private (ItemDestination, int) TryRemoveCold(ItemRemovedReason removedReason)
749
        {
241,621,257✔
750
            Interlocked.Decrement(ref this.counter.cold);
241,621,257✔
751

752
            if (this.coldQueue.TryDequeue(out var item))
241,621,257✔
753
            {
241,620,866✔
754
                var where = this.itemPolicy.RouteCold(item);
241,620,866✔
755

756
                if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
241,620,866✔
757
                {
4,274,343✔
758
                    return (ItemDestination.Warm, this.Move(item, where, removedReason));
4,274,343✔
759
                }
760
                else
761
                {
237,346,523✔
762
                    this.Move(item, ItemDestination.Remove, removedReason);
237,346,523✔
763
                    return (ItemDestination.Remove, 0);
237,346,523✔
764
                }
765
            }
766
            else
767
            {
391✔
768
                return (ItemDestination.Cold, Interlocked.Increment(ref this.counter.cold));
391✔
769
            }
770
        }
241,621,257✔
771

772
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
773
        private int LastWarmToCold()
774
        {
3,830,504✔
775
            Interlocked.Decrement(ref this.counter.warm);
3,830,504✔
776

777
            if (this.warmQueue.TryDequeue(out var item))
3,830,504✔
778
            {
3,830,488✔
779
                var destination = item.WasRemoved ? ItemDestination.Remove : ItemDestination.Cold;
3,830,488✔
780
                return this.Move(item, destination, ItemRemovedReason.Evicted);
3,830,488✔
781
            }
782
            else
783
            {
16✔
784
                Interlocked.Increment(ref this.counter.warm);
16✔
785
                return 0;
16✔
786
            }
787
        }
3,830,504✔
788

789
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
790
        private void ConstrainCold(int coldCount, ItemRemovedReason removedReason)
791
        {
6,099,101✔
792
            if (coldCount > this.capacity.Cold && this.coldQueue.TryDequeue(out var item))
6,099,101✔
793
            {
5,802,130✔
794
                Interlocked.Decrement(ref this.counter.cold);
5,802,130✔
795
                this.Move(item, ItemDestination.Remove, removedReason);
5,802,130✔
796
            }
5,802,130✔
797
        }
6,099,101✔
798

799
        [MethodImpl(MethodImplOptions.AggressiveInlining)]
800
        private int Move(I item, ItemDestination where, ItemRemovedReason removedReason)
801
        {
537,539,628✔
802
            item.WasAccessed = false;
537,539,628✔
803

804
            switch (where)
537,539,628✔
805
            {
806
                case ItemDestination.Warm:
807
                    this.warmQueue.Enqueue(item);
41,494,520✔
808
                    return Interlocked.Increment(ref this.counter.warm);
41,494,520✔
809
                case ItemDestination.Cold:
810
                    this.coldQueue.Enqueue(item);
247,430,224✔
811
                    return Interlocked.Increment(ref this.counter.cold);
247,430,224✔
812
                case ItemDestination.Remove:
813

814
                    var kvp = new KeyValuePair<K, I>(item.Key, item);
248,614,884✔
815

816
#if NET6_0_OR_GREATER
817
                    if (this.dictionary.TryRemove(kvp))
198,661,439✔
818
#else
819
                    // https://devblogs.microsoft.com/pfxteam/little-known-gems-atomic-conditional-removals-from-concurrentdictionary/
820
                    if (((ICollection<KeyValuePair<K, I>>)this.dictionary).Remove(kvp))
49,953,445✔
821
#endif
822
                    {
245,156,471✔
823
                        OnRemove(item.Key, item, removedReason);
245,156,471✔
824
                    }
245,156,471✔
825
                    break;
248,614,884✔
826
            }
827

828
            return 0;
248,614,884✔
829
        }
537,539,628✔
830

831
        /// <summary>Returns an enumerator that iterates through the cache.</summary>
832
        /// <returns>An enumerator for the cache.</returns>
833
        /// <remarks>
834
        /// The enumerator returned from the cache is safe to use concurrently with
835
        /// reads and writes, however it does not represent a moment-in-time snapshot.  
836
        /// The contents exposed through the enumerator may contain modifications
837
        /// made after <see cref="GetEnumerator"/> was called.
838
        /// </remarks>
839
        IEnumerator IEnumerable.GetEnumerator()
840
        {
15✔
841
            return ((ConcurrentLruCore<K, V, I, P, T>)this).GetEnumerator();
15✔
842
        }
15✔
843

844
#if DEBUG
845
        /// <summary>
846
        /// Format the LRU as a string by converting all the keys to strings.
847
        /// </summary>
848
        /// <returns>The LRU formatted as a string.</returns>
849
        internal string FormatLruString()
850
        {
75✔
851
            var sb = new System.Text.StringBuilder();
75✔
852

853
            sb.Append("Hot [");
75✔
854
            sb.Append(string.Join(",", this.hotQueue.Select(n => n.Key.ToString())));
280✔
855
            sb.Append("] Warm [");
75✔
856
            sb.Append(string.Join(",", this.warmQueue.Select(n => n.Key.ToString())));
250✔
857
            sb.Append("] Cold [");
75✔
858
            sb.Append(string.Join(",", this.coldQueue.Select(n => n.Key.ToString())));
225✔
859
            sb.Append(']');
75✔
860

861
            return sb.ToString();
75✔
862
        }
75✔
863
#endif
864

865
        private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
866
        {
345✔
867
            var p = new Proxy(lru);
345✔
868

869
            if (typeof(P) == typeof(AfterAccessPolicy<K, V>))
345✔
870
            {
55✔
871
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), new Optional<ITimePolicy>(p), Optional<IDiscreteTimePolicy>.None());
55✔
872
            }
873

874
            // IsAssignableFrom is a jit intrinsic https://github.com/dotnet/runtime/issues/4920
875
            if (typeof(IDiscreteItemPolicy<K, V>).IsAssignableFrom(typeof(P)))
290✔
876
            {
75✔
877
                return new CachePolicy(new Optional<IBoundedPolicy>(p), Optional<ITimePolicy>.None(), Optional<ITimePolicy>.None(), new Optional<IDiscreteTimePolicy>(new DiscreteExpiryProxy(lru)));
75✔
878
            }
879

880
            return new CachePolicy(new Optional<IBoundedPolicy>(p), lru.itemPolicy.CanDiscard() ? new Optional<ITimePolicy>(p) : Optional<ITimePolicy>.None());
215✔
881
        }
345✔
882

883
        private static Optional<ICacheMetrics> CreateMetrics(ConcurrentLruCore<K, V, I, P, T> lru)
884
        {
165✔
885
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
165✔
886
            {
25✔
887
                return Optional<ICacheMetrics>.None();
25✔
888
            }
889

890
            return new(new Proxy(lru));
140✔
891
        }
165✔
892

893
        private static Optional<ICacheEvents<K, V>> CreateEvents(ConcurrentLruCore<K, V, I, P, T> lru)
894
        {
1,308✔
895
            if (typeof(T) == typeof(NoTelemetryPolicy<K, V>))
1,308✔
896
            {
65✔
897
                return Optional<ICacheEvents<K, V>>.None();
65✔
898
            }
899

900
            return new(new Proxy(lru));
1,243✔
901
        }
1,308✔
902

903
#if NET9_0_OR_GREATER
904
        /// <summary>
905
        /// Gets an alternate lookup that can use an alternate key type with the configured comparer.
906
        /// </summary>
907
        /// <typeparam name="TAlternateKey">The alternate key type.</typeparam>
908
        /// <returns>An alternate lookup.</returns>
909
        /// <exception cref="InvalidOperationException">The configured comparer does not support <typeparamref name="TAlternateKey" />.</exception>
910
        public IAlternateLookup<TAlternateKey, K, V> GetAlternateLookup<TAlternateKey>()
911
            where TAlternateKey : notnull, allows ref struct
912
        {
5✔
913
            if (!this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
5✔
914
            {
1✔
915
                Throw.IncompatibleComparer();
1✔
916
            }
917

918
            return new AlternateLookup<TAlternateKey>(this);
4✔
919
        }
4✔
920

921
        /// <summary>
922
        /// Attempts to get an alternate lookup that can use an alternate key type with the configured comparer.
923
        /// </summary>
924
        /// <typeparam name="TAlternateKey">The alternate key type.</typeparam>
925
        /// <param name="lookup">The alternate lookup when available.</param>
926
        /// <returns><see langword="true" /> when the configured comparer supports <typeparamref name="TAlternateKey" />; otherwise, <see langword="false" />.</returns>
927
        public bool TryGetAlternateLookup<TAlternateKey>([MaybeNullWhen(false)] out IAlternateLookup<TAlternateKey, K, V> lookup)
928
            where TAlternateKey : notnull, allows ref struct
929
        {
2✔
930
            if (this.dictionary.IsCompatibleKey<TAlternateKey, K, I>())
2✔
931
            {
1✔
932
                lookup = new AlternateLookup<TAlternateKey>(this);
1✔
933
                return true;
1✔
934
            }
935

936
            lookup = default;
1✔
937
            return false;
1✔
938
        }
2✔
939

940
        internal readonly struct AlternateLookup<TAlternateKey> : IAlternateLookup<TAlternateKey, K, V>
941
            where TAlternateKey : notnull, allows ref struct
942
        {
943
            internal AlternateLookup(ConcurrentLruCore<K, V, I, P, T> lru)
944
            {
5✔
945
                Debug.Assert(lru is not null);
5✔
946
                Debug.Assert(lru.dictionary.IsCompatibleKey<TAlternateKey, K, I>());
5✔
947
                this.Lru = lru;
5✔
948
                this.Alternate = lru.dictionary.GetAlternateLookup<TAlternateKey>();
5✔
949
            }
5✔
950

951
            internal ConcurrentLruCore<K, V, I, P, T> Lru { get; }
12✔
952

953
            internal ConcurrentDictionary<K, I>.AlternateLookup<TAlternateKey> Alternate { get; }
10✔
954

955
            public bool TryGet(TAlternateKey key, [MaybeNullWhen(false)] out V value)
956
            {
5✔
957
                if (this.Alternate.TryGetValue(key, out var item))
5✔
958
                {
4✔
959
                    return this.Lru.GetOrDiscard(item, out value);
4✔
960
                }
961

962
                value = default;
1✔
963
                this.Lru.telemetryPolicy.IncrementMiss();
1✔
964
                return false;
1✔
965
            }
5✔
966

967
            public bool TryRemove(TAlternateKey key, [MaybeNullWhen(false)] out K actualKey, [MaybeNullWhen(false)] out V value)
968
            {
1✔
969
                if (this.Alternate.TryRemove(key, out actualKey, out var item))
1!
970
                {
1✔
971
                    this.Lru.OnRemove(actualKey, item, ItemRemovedReason.Removed);
1✔
972
                    value = item.Value;
1✔
973
                    return true;
1✔
974
                }
975

976
                actualKey = default;
977
                value = default;
978
                return false;
979
            }
1✔
980

981
            public bool TryUpdate(TAlternateKey key, V value)
982
            {
4✔
983
                if (this.Alternate.TryGetValue(key, out var existing))
4✔
984
                {
2✔
985
                    return this.Lru.TryUpdateValue(existing, value);
2✔
986
                }
987

988
                return false;
2✔
989
            }
4✔
990

991
            public void AddOrUpdate(TAlternateKey key, V value)
992
            {
2✔
993
                K actualKey = default!;
2✔
994
                bool hasActualKey = false;
2✔
995

996
                while (true)
2✔
997
                {
2✔
998
                    if (this.TryUpdate(key, value))
2✔
999
                    {
1✔
1000
                        return;
1✔
1001
                    }
1002

1003
                    if (!hasActualKey)
1✔
1004
                    {
1✔
1005
                        actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
1✔
1006
                        hasActualKey = true;
1✔
1007
                    }
1✔
1008

1009
                    if (this.Lru.TryAdd(actualKey, value))
1!
1010
                    {
1✔
1011
                        return;
1✔
1012
                    }
1013
                }
1014
            }
2✔
1015

1016
            public V GetOrAdd(TAlternateKey key, Func<TAlternateKey, V> valueFactory)
1017
            {
1✔
1018
                while (true)
1✔
1019
                {
1✔
1020
                    if (this.TryGet(key, out var value))
1!
1021
                    {
1022
                        return value;
1023
                    }
1024

1025
                    K actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
1✔
1026

1027
                    value = valueFactory(key);
1✔
1028
                    if (this.Lru.TryAdd(actualKey, value))
1!
1029
                    {
1✔
1030
                        return value;
1✔
1031
                    }
1032
                }
1033
            }
1✔
1034

1035
            public V GetOrAdd<TArg>(TAlternateKey key, Func<TAlternateKey, TArg, V> valueFactory, TArg factoryArgument)
1036
            {
1✔
1037
                while (true)
1✔
1038
                {
1✔
1039
                    if (this.TryGet(key, out var value))
1!
1040
                    {
1✔
1041
                        return value;
1✔
1042
                    }
1043

1044
                    K actualKey = this.Lru.dictionary.GetAlternateComparer<TAlternateKey, K, I>().Create(key);
1045

1046
                    value = valueFactory(key, factoryArgument);
1047
                    if (this.Lru.TryAdd(actualKey, value))
×
1048
                    {
1049
                        return value;
1050
                    }
1051
                }
1052
            }
1✔
1053
        }
1054
#endif
1055

1056
        // To get JIT optimizations, policies must be structs.
1057
        // If the structs are returned directly via properties, they will be copied. Since  
1058
        // telemetryPolicy is a mutable struct, copy is bad. One workaround is to store the 
1059
        // state within the struct in an object. Since the struct points to the same object
1060
        // it becomes immutable. However, this object is then somewhere else on the 
1061
        // heap, which slows down the policies with hit counter logic in benchmarks. Likely
1062
        // this approach keeps the structs data members in the same CPU cache line as the LRU.
1063
        // backcompat: remove conditional compile
1064
#if NETCOREAPP3_0_OR_GREATER
1065
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
1066
#else
1067
        [DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Evict = {Evicted}")]
1068
#endif
1069
        private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
1070
        {
1071
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
1072

1073
            public Proxy(ConcurrentLruCore<K, V, I, P, T> lru)
1,728✔
1074
            {
1,728✔
1075
                this.lru = lru;
1,728✔
1076
            }
1,728✔
1077

1078
            public double HitRatio => lru.telemetryPolicy.HitRatio;
10✔
1079

1080
            public long Total => lru.telemetryPolicy.Total;
5✔
1081

1082
            public long Hits => lru.telemetryPolicy.Hits;
30✔
1083

1084
            public long Misses => lru.telemetryPolicy.Misses;
30✔
1085

1086
            public long Evicted => lru.telemetryPolicy.Evicted;
40✔
1087

1088
            // backcompat: remove conditional compile
1089
#if NETCOREAPP3_0_OR_GREATER
1090
            public long Updated => lru.telemetryPolicy.Updated;
10✔
1091
#endif
1092
            public int Capacity => lru.Capacity;
135✔
1093

1094
            public TimeSpan TimeToLive => lru.itemPolicy.TimeToLive;
20✔
1095

1096
            public event EventHandler<ItemRemovedEventArgs<K, V>> ItemRemoved
1097
            {
1098
                add { this.lru.telemetryPolicy.ItemRemoved += value; }
225✔
1099
                remove { this.lru.telemetryPolicy.ItemRemoved -= value; }
30✔
1100
            }
1101

1102
            // backcompat: remove conditional compile
1103
#if NETCOREAPP3_0_OR_GREATER
1104
            public event EventHandler<ItemUpdatedEventArgs<K, V>> ItemUpdated
1105
            {
1106
                add { this.lru.telemetryPolicy.ItemUpdated += value; }
132✔
1107
                remove { this.lru.telemetryPolicy.ItemUpdated -= value; }
15✔
1108
            }
1109
#endif
1110
            public void Trim(int itemCount)
1111
            {
50✔
1112
                lru.Trim(itemCount);
50✔
1113
            }
50✔
1114

1115
            public void TrimExpired()
1116
            {
35✔
1117
                lru.TrimExpired();
35✔
1118
            }
35✔
1119
        }
1120

1121
        private class DiscreteExpiryProxy : IDiscreteTimePolicy
1122
        {
1123
            private readonly ConcurrentLruCore<K, V, I, P, T> lru;
1124

1125
            public DiscreteExpiryProxy(ConcurrentLruCore<K, V, I, P, T> lru)
75✔
1126
            {
75✔
1127
                this.lru = lru;
75✔
1128
            }
75✔
1129

1130
            public void TrimExpired()
1131
            {
10✔
1132
                lru.TrimExpired();
10✔
1133
            }
10✔
1134

1135
            public bool TryGetTimeToExpire<TKey>(TKey key, out TimeSpan timeToLive)
1136
            {
15✔
1137
                if (key is K k && lru.dictionary.TryGetValue(k, out var item))
15✔
1138
                {
5✔
1139
                    LongTickCountLruItem<K, V>? tickItem = item as LongTickCountLruItem<K, V>;
5✔
1140
                    timeToLive = (new Duration(tickItem!.TickCount) - Duration.SinceEpoch()).ToTimeSpan();
5✔
1141
                    return true;
5✔
1142
                }
1143

1144
                timeToLive = default;
10✔
1145
                return false;
10✔
1146
            }
15✔
1147
        }
1148
    }
1149
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc