• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

SHAdd0WTAka / Zen-Ai-Pentest / 23112413498

15 Mar 2026 02:22PM UTC coverage: 4.72% (-6.7%) from 11.431%
23112413498

push

github

root
Cleanup: Remove broken test files and old scripts

- Remove test_cache_corrected.py, test_false_positive_corrected.py
- Remove test_memory_cve_registry_corrected.py
- Remove run_batch2.sh, run_corrected.sh
- Keep only working tests: test_batch3_final.py, test_cache_real_api.py

13 of 1234 branches covered (1.05%)

Branch coverage included in aggregate %.

599 of 11731 relevant lines covered (5.11%)

0.05 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/core/cache.py
1
"""
2
Enhanced Caching Strategy - Redis/Memory/SQLite backends with LRU and TTL support
3

4
Optimizations:
5
- LRU eviction for memory cache
6
- Async batch operations
7
- Cache warming support
×
8
- Hit/miss statistics
×
9
- Size-based eviction
×
10
"""
×
11

×
12
import asyncio
13
import hashlib
×
14
import json
15
import logging
16
import pickle
×
17
import sys
×
18
import time
19
from collections import OrderedDict
×
20
from dataclasses import dataclass
21
from datetime import datetime, timedelta
22
from functools import wraps
23
from pathlib import Path
24
from typing import Any, Callable, Dict, List, Optional, TypeVar, Union
25

×
26
logger = logging.getLogger(__name__)
×
27

28
T = TypeVar("T")
29

×
30
# Redis availability check
31
try:
32
    import redis.asyncio as redis
×
33

34
    REDIS_AVAILABLE = True
35
except ImportError:
36
    REDIS_AVAILABLE = False
×
37

×
38
try:
39
    import aiosqlite
×
40

×
41
    SQLITE_AVAILABLE = True
×
42
except ImportError:
43
    SQLITE_AVAILABLE = False
44

45

46
@dataclass
47
class CacheStats:
48
    """Cache performance statistics"""
49

×
50
    hits: int = 0
×
51
    misses: int = 0
×
52
    evictions: int = 0
×
53
    total_gets: int = 0
×
54
    total_sets: int = 0
×
55
    total_deletes: int = 0
×
56
    bytes_stored: int = 0
57

58
    @property
×
59
    def hit_rate(self) -> float:
60
        """Cache hit rate (0-1)"""
×
61
        if self.total_gets == 0:
×
62
            return 0.0
63
        return self.hits / self.total_gets
64

×
65
    @property
×
66
    def miss_rate(self) -> float:
×
67
        """Cache miss rate (0-1)"""
68
        if self.total_gets == 0:
×
69
            return 0.0
70
        return self.misses / self.total_gets
71

×
72
    def to_dict(self) -> Dict[str, Any]:
73
        return {
74
            "hits": self.hits,
×
75
            "misses": self.misses,
×
76
            "evictions": self.evictions,
×
77
            "hit_rate": f"{self.hit_rate:.2%}",
×
78
            "miss_rate": f"{self.miss_rate:.2%}",
×
79
            "total_gets": self.total_gets,
80
            "total_sets": self.total_sets,
81
            "total_deletes": self.total_deletes,
×
82
            "bytes_stored": self.bytes_stored,
83
        }
×
84

×
85

×
86
class CacheBackend:
87
    """Abstract cache backend interface with stats support"""
88

×
89
    def __init__(self):
90
        self.stats = CacheStats()
×
91

92
    async def get(self, key: str) -> Optional[Any]:
93
        raise NotImplementedError()
×
94

×
95
    async def set(
×
96
        self, key: str, value: Any, ttl: Optional[int] = None
97
    ) -> bool:
98
        raise NotImplementedError()
×
99

100
    async def delete(self, key: str) -> bool:
101
        raise NotImplementedError()
102

103
    async def exists(self, key: str) -> bool:
104
        raise NotImplementedError()
105

×
106
    async def clear(self) -> bool:
×
107
        raise NotImplementedError()
×
108

×
109
    async def mget(self, keys: List[str]) -> Dict[str, Any]:
×
110
        """Batch get operation"""
×
111
        result = {}
×
112
        for key in keys:
×
113
            value = await self.get(key)
×
114
            if value is not None:
×
115
                result[key] = value
116
        return result
117

118
    async def mset(
×
119
        self, items: Dict[str, Any], ttl: Optional[int] = None
120
    ) -> bool:
×
121
        """Batch set operation"""
×
122
        success = True
123
        for key, value in items.items():
×
124
            if not await self.set(key, value, ttl):
125
                success = False
126
        return success
×
127

128
    async def close(self):
129
        pass
130

131
    def get_stats(self) -> CacheStats:
132
        """Get cache statistics"""
133
        return self.stats
134

135

136
class MemoryCache(CacheBackend):
×
137
    """
×
138
    Enhanced in-memory cache with LRU eviction and TTL support.
×
139

×
140
    Optimizations:
141
    - O(1) LRU using OrderedDict
142
    - Size-based eviction
143
    - Hit/miss tracking
144
    - Thread-safe operations
×
145
    """
×
146

×
147
    def __init__(
148
        self,
149
        max_size: int = 1000,
150
        max_memory_mb: float = 100.0,
×
151
        default_ttl: Optional[int] = None,
×
152
    ):
153
        super().__init__()
154
        self._max_size = max_size
×
155
        self._max_memory_bytes = max_memory_mb * 1024 * 1024
156
        self._default_ttl = default_ttl
×
157

158
        # Use OrderedDict for O(1) LRU operations
159
        self._cache: OrderedDict[str, Any] = OrderedDict()
×
160
        self._expiry: Dict[str, float] = {}
×
161
        self._sizes: Dict[str, int] = {}
×
162
        self._lock = asyncio.Lock()
×
163
        self._current_memory = 0
×
164

165
    async def get(self, key: str) -> Optional[Any]:
166
        async with self._lock:
167
            self.stats.total_gets += 1
×
168

×
169
            # Check if key exists
×
170
            if key not in self._cache:
×
171
                self.stats.misses += 1
172
                return None
×
173

174
            # Check expiry
175
            if key in self._expiry and time.time() > self._expiry[key]:
×
176
                self._remove(key)
177
                self.stats.misses += 1
178
                return None
179

180
            # Move to end (most recently used)
181
            self._cache.move_to_end(key)
182
            self.stats.hits += 1
183
            return self._cache[key]
184

185
    async def set(
×
186
        self, key: str, value: Any, ttl: Optional[int] = None
187
    ) -> bool:
188
        async with self._lock:
×
189
            return await self._set_locked(key, value, ttl)
×
190

×
191
    async def _set_locked(
×
192
        self, key: str, value: Any, ttl: Optional[int] = None
×
193
    ) -> bool:
×
194
        """Internal set with lock held"""
×
195
        self.stats.total_sets += 1
196

197
        # Calculate value size
198
        try:
×
199
            value_size = len(pickle.dumps(value))
×
200
        except (pickle.PicklingError, TypeError):
×
201
            value_size = sys.getsizeof(value)
×
202

203
        # If updating existing key, adjust memory
204
        if key in self._cache:
×
205
            self._current_memory -= self._sizes.get(key, 0)
206

×
207
        # Check if value is too large
208
        if value_size > self._max_memory_bytes * 0.5:
209
            logger.warning(
×
210
                f"Value for key {key} is too large ({value_size} bytes), skipping"
×
211
            )
×
212
            return False
×
213

214
        # Evict entries if needed
×
215
        while (
216
            len(self._cache) >= self._max_size
217
            or self._current_memory + value_size > self._max_memory_bytes
×
218
        ) and self._cache:
219
            self._evict_lru()
×
220

221
        # Store value
222
        self._cache[key] = value
223
        self._cache.move_to_end(key)
224
        self._sizes[key] = value_size
×
225
        self._current_memory += value_size
×
226
        self.stats.bytes_stored = self._current_memory
×
227

×
228
        # Set expiry
×
229
        effective_ttl = ttl or self._default_ttl
×
230
        if effective_ttl:
×
231
            self._expiry[key] = time.time() + effective_ttl
×
232

×
233
        return True
234

×
235
    def _remove(self, key: str):
236
        """Remove key from cache (assumes lock held)"""
237
        if key in self._cache:
238
            self._current_memory -= self._sizes.get(key, 0)
×
239
            del self._cache[key]
240
            self._expiry.pop(key, None)
241
            self._sizes.pop(key, None)
242

243
    def _evict_lru(self):
244
        """Evict least recently used entry (assumes lock held)"""
245
        if not self._cache:
246
            return
247

×
248
        # Remove expired entries first
×
249
        now = time.time()
250
        expired = [k for k, exp in self._expiry.items() if exp and now > exp]
×
251
        for k in expired:
252
            self._remove(k)
253
            self.stats.evictions += 1
254

255
        # If still need space, remove LRU
×
256
        if self._cache:
×
257
            oldest_key = next(iter(self._cache))
258
            self._remove(oldest_key)
×
259
            self.stats.evictions += 1
260

261
    async def delete(self, key: str) -> bool:
×
262
        async with self._lock:
×
263
            self.stats.total_deletes += 1
×
264
            if key in self._cache:
265
                self._remove(key)
×
266
                return True
267
            return False
×
268

269
    async def exists(self, key: str) -> bool:
270
        value = await self.get(key)
271
        return value is not None
272

273
    async def clear(self) -> bool:
274
        async with self._lock:
275
            self._cache.clear()
276
            self._expiry.clear()
277
            self._sizes.clear()
278
            self._current_memory = 0
279
            self.stats.bytes_stored = 0
280
            return True
281

282
    async def mget(self, keys: List[str]) -> Dict[str, Any]:
283
        """Optimized batch get"""
284
        result = {}
285
        async with self._lock:
286
            for key in keys:
287
                self.stats.total_gets += 1
288
                if key in self._cache:
289
                    if key in self._expiry and time.time() > self._expiry[key]:
290
                        self._remove(key)
291
                        self.stats.misses += 1
292
                    else:
293
                        self._cache.move_to_end(key)
294
                        result[key] = self._cache[key]
295
                        self.stats.hits += 1
296
                else:
297
                    self.stats.misses += 1
298
        return result
299

300
    async def mset(
301
        self, items: Dict[str, Any], ttl: Optional[int] = None
302
    ) -> bool:
303
        """Optimized batch set"""
304
        async with self._lock:
305
            for key, value in items.items():
306
                await self._set_locked(key, value, ttl)
307
        return True
308

309
    def get_stats(self) -> CacheStats:
310
        """Get detailed cache statistics"""
311
        stats = super().get_stats()
312
        stats.bytes_stored = self._current_memory
313
        return stats
314

315

316
class SQLiteCache(CacheBackend):
317
    """
318
    SQLite-based persistent cache with async support.
319

320
    Optimizations:
321
    - Connection pooling
322
    - Prepared statements
323
    - Batch operations
324
    - Automatic cleanup
325
    """
326

327
    def __init__(
328
        self,
329
        db_path: Path = None,
330
        pool_size: int = 5,
331
        cleanup_interval: int = 3600,
332
    ):
333
        super().__init__()
334
        self.db_path = (
335
            db_path or Path.home() / ".cache" / "zen-ai-pentest" / "cache.db"
336
        )
337
        self.db_path.parent.mkdir(parents=True, exist_ok=True)
338
        self._pool_size = pool_size
339
        self._cleanup_interval = cleanup_interval
340
        self._db: Optional[aiosqlite.Connection] = None
341
        self._lock = asyncio.Lock()
342
        self._last_cleanup = 0
343

344
    async def _get_db(self) -> aiosqlite.Connection:
345
        if self._db is None:
346
            self._db = await aiosqlite.connect(self.db_path)
347
            await self._db.execute("PRAGMA journal_mode=WAL")
348
            await self._db.execute("PRAGMA synchronous=NORMAL")
349
            await self._db.execute(
350
                """
351
                CREATE TABLE IF NOT EXISTS cache (
352
                    key TEXT PRIMARY KEY,
353
                    value BLOB,
354
                    expires TIMESTAMP,
355
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
356
                )
357
            """
358
            )
359
            await self._db.execute(
360
                """
361
                CREATE INDEX IF NOT EXISTS idx_expires ON cache(expires)
362
            """
363
            )
364
            await self._db.commit()
365
        return self._db
366

367
    async def _maybe_cleanup(self):
368
        """Periodically clean up expired entries"""
369
        now = time.time()
370
        if now - self._last_cleanup > self._cleanup_interval:
371
            await self.cleanup_expired()
372
            self._last_cleanup = now
373

374
    async def get(self, key: str) -> Optional[Any]:
375
        self.stats.total_gets += 1
376
        await self._maybe_cleanup()
377

378
        async with self._lock:
379
            db = await self._get_db()
380
            cursor = await db.execute(
381
                "SELECT value, expires FROM cache WHERE key = ?",
382
                (key,),
383
            )
384
            row = await cursor.fetchone()
385

386
            if row is None:
387
                self.stats.misses += 1
388
                return None
389

390
            value, expires = row
391

392
            # Check expiry
393
            if expires and datetime.utcnow() > datetime.fromisoformat(expires):
394
                await self.delete(key)
395
                self.stats.misses += 1
396
                return None
397

398
            try:
399
                self.stats.hits += 1
400
                return pickle.loads(value)  # nosec B301
401
            except (pickle.PickleError, EOFError):
402
                await self.delete(key)
403
                return None
404

405
    async def set(
406
        self, key: str, value: Any, ttl: Optional[int] = None
407
    ) -> bool:
408
        async with self._lock:
409
            return await self._set_locked(key, value, ttl)
410

411
    async def _set_locked(
412
        self, key: str, value: Any, ttl: Optional[int] = None
413
    ) -> bool:
414
        self.stats.total_sets += 1
415

416
        try:
417
            db = await self._get_db()
418
            serialized = pickle.dumps(value)
419

420
            expires = None
421
            if ttl:
422
                expires = (
423
                    datetime.utcnow() + timedelta(seconds=ttl)
424
                ).isoformat()
425

426
            await db.execute(
427
                """
428
                INSERT OR REPLACE INTO cache (key, value, expires)
429
                VALUES (?, ?, ?)
430
            """,
431
                (key, serialized, expires),
432
            )
433
            await db.commit()
434
            return True
435
        except Exception as e:
436
            logger.error(f"SQLite cache set error: {e}")
437
            return False
438

439
    async def delete(self, key: str) -> bool:
440
        async with self._lock:
441
            self.stats.total_deletes += 1
442
            try:
443
                db = await self._get_db()
444
                await db.execute("DELETE FROM cache WHERE key = ?", (key,))
445
                await db.commit()
446
                return True
447
            except Exception as e:
448
                logger.error(f"SQLite cache delete error: {e}")
449
                return False
450

451
    async def exists(self, key: str) -> bool:
452
        return await self.get(key) is not None
453

454
    async def clear(self) -> bool:
455
        async with self._lock:
456
            try:
457
                db = await self._get_db()
458
                await db.execute("DELETE FROM cache")
459
                await db.commit()
460
                return True
461
            except Exception as e:
462
                logger.error(f"SQLite cache clear error: {e}")
463
                return False
464

465
    async def mget(self, keys: List[str]) -> Dict[str, Any]:
466
        """Optimized batch get with single query"""
467
        if not keys:
468
            return {}
469

470
        await self._maybe_cleanup()
471

472
        placeholders = ",".join("?" * len(keys))
473
        async with self._lock:
474
            self.stats.total_gets += len(keys)
475
            try:
476
                db = await self._get_db()
477
                cursor = await db.execute(
478
                    f"SELECT key, value, expires FROM cache WHERE key IN ({placeholders})",
479
                    keys,
480
                )
481
                rows = await cursor.fetchall()
482

483
                result = {}
484
                now = datetime.utcnow()
485
                expired_keys = []
486

487
                for row in rows:
488
                    key, value, expires = row
489
                    if expires and now > datetime.fromisoformat(expires):
490
                        expired_keys.append(key)
491
                        self.stats.misses += 1
492
                    else:
493
                        try:
494
                            result[key] = pickle.loads(value)  # nosec B301
495
                            self.stats.hits += 1
496
                        except (pickle.PickleError, EOFError):
497
                            expired_keys.append(key)
498
                            self.stats.misses += 1
499

500
                # Clean up expired keys
501
                if expired_keys:
502
                    placeholders = ",".join("?" * len(expired_keys))
503
                    await db.execute(
504
                        f"DELETE FROM cache WHERE key IN ({placeholders})",
505
                        expired_keys,
506
                    )
507
                    await db.commit()
508

509
                return result
510
            except Exception as e:
511
                logger.error(f"SQLite cache mget error: {e}")
512
                return {}
513

514
    async def mset(
515
        self, items: Dict[str, Any], ttl: Optional[int] = None
516
    ) -> bool:
517
        """Optimized batch set with transaction"""
518
        if not items:
519
            return True
520

521
        async with self._lock:
522
            self.stats.total_sets += len(items)
523
            try:
524
                db = await self._get_db()
525
                expires = None
526
                if ttl:
527
                    expires = (
528
                        datetime.utcnow() + timedelta(seconds=ttl)
529
                    ).isoformat()
530

531
                await db.execute("BEGIN")
532
                for key, value in items.items():
533
                    serialized = pickle.dumps(value)
534
                    await db.execute(
535
                        "INSERT OR REPLACE INTO cache (key, value, expires) VALUES (?, ?, ?)",
536
                        (key, serialized, expires),
537
                    )
538
                await db.commit()
539
                return True
540
            except Exception as e:
541
                logger.error(f"SQLite cache mset error: {e}")
542
                await db.execute("ROLLBACK")
543
                return False
544

545
    async def cleanup_expired(self):
546
        """Remove expired entries"""
547
        async with self._lock:
548
            try:
549
                db = await self._get_db()
550
                cursor = await db.execute(
551
                    "DELETE FROM cache WHERE expires < ?",
552
                    (datetime.utcnow().isoformat(),),
553
                )
554
                await db.commit()
555
                deleted = cursor.rowcount
556
                self.stats.evictions += deleted
557
                logger.debug(f"Cleaned up {deleted} expired cache entries")
558
            except Exception as e:
559
                logger.error(f"SQLite cache cleanup error: {e}")
560

561
    async def close(self):
562
        if self._db:
563
            await self._db.close()
564
            self._db = None
565

566

567
class RedisCache(CacheBackend):
568
    """Redis cache backend with connection pooling"""
569

570
    def __init__(
571
        self,
572
        host: str = "localhost",
573
        port: int = 6379,
574
        db: int = 0,
575
        password: Optional[str] = None,
576
        max_connections: int = 10,
577
    ):
578
        super().__init__()
579
        if not REDIS_AVAILABLE:
580
            raise ImportError("redis not installed: pip install redis")
581

582
        self.host = host
583
        self.port = port
584
        self.db = db
585
        self.password = password
586
        self.max_connections = max_connections
587
        self._pool = None
588

589
    async def _get_pool(self):
590
        if self._pool is None:
591
            self._pool = redis.Redis(
592
                host=self.host,
593
                port=self.port,
594
                db=self.db,
595
                password=self.password,
596
                decode_responses=False,
597
                max_connections=self.max_connections,
598
                socket_keepalive=True,
599
                socket_keepalive_options={},
600
            )
601
        return self._pool
602

603
    async def get(self, key: str) -> Optional[Any]:
604
        self.stats.total_gets += 1
605
        try:
606
            pool = await self._get_pool()
607
            value = await pool.get(key)
608
            if value:
609
                self.stats.hits += 1
610
                return pickle.loads(value)  # nosec B301
611
            self.stats.misses += 1
612
            return None
613
        except Exception as e:
614
            logger.error(f"Redis get error: {e}")
615
            self.stats.misses += 1
616
            return None
617

618
    async def set(
619
        self, key: str, value: Any, ttl: Optional[int] = None
620
    ) -> bool:
621
        try:
622
            pool = await self._get_pool()
623
            serialized = pickle.dumps(value)
624
            await pool.set(key, serialized, ex=ttl)
625
            self.stats.total_sets += 1
626
            return True
627
        except Exception as e:
628
            logger.error(f"Redis set error: {e}")
629
            return False
630

631
    async def delete(self, key: str) -> bool:
632
        try:
633
            pool = await self._get_pool()
634
            await pool.delete(key)
635
            self.stats.total_deletes += 1
636
            return True
637
        except Exception as e:
638
            logger.error(f"Redis delete error: {e}")
639
            return False
640

641
    async def exists(self, key: str) -> bool:
642
        try:
643
            pool = await self._get_pool()
644
            return await pool.exists(key) > 0
645
        except Exception as e:
646
            logger.error(f"Redis exists error: {e}")
647
            return False
648

649
    async def clear(self) -> bool:
650
        try:
651
            pool = await self._get_pool()
652
            await pool.flushdb()
653
            return True
654
        except Exception as e:
655
            logger.error(f"Redis clear error: {e}")
656
            return False
657

658
    async def mget(self, keys: List[str]) -> Dict[str, Any]:
659
        """Batch get using Redis MGET"""
660
        if not keys:
661
            return {}
662

663
        self.stats.total_gets += len(keys)
664
        try:
665
            pool = await self._get_pool()
666
            values = await pool.mget(keys)
667

668
            result = {}
669
            for key, value in zip(keys, values):
670
                if value:
671
                    try:
672
                        result[key] = pickle.loads(value)  # nosec B301
673
                        self.stats.hits += 1
674
                    except pickle.PickleError:
675
                        self.stats.misses += 1
676
                else:
677
                    self.stats.misses += 1
678

679
            return result
680
        except Exception as e:
681
            logger.error(f"Redis mget error: {e}")
682
            return {}
683

684
    async def mset(
685
        self, items: Dict[str, Any], ttl: Optional[int] = None
686
    ) -> bool:
687
        """Batch set using Redis MSET with optional TTL"""
688
        if not items:
689
            return True
690

691
        self.stats.total_sets += len(items)
692
        try:
693
            pool = await self._get_pool()
694
            serialized = {k: pickle.dumps(v) for k, v in items.items()}
695

696
            if ttl:
697
                # Use pipeline for atomic MSET + EXPIRE
698
                pipe = pool.pipeline()
699
                pipe.mset(serialized)
700
                for key in items.keys():
701
                    pipe.expire(key, ttl)
702
                await pipe.execute()
703
            else:
704
                await pool.mset(serialized)
705

706
            return True
707
        except Exception as e:
708
            logger.error(f"Redis mset error: {e}")
709
            return False
710

711
    async def close(self):
712
        if self._pool:
713
            await self._pool.close()
714
            self._pool = None
715

716

717
class MultiTierCache:
718
    """
719
    Multi-tier caching (L1: Memory, L2: SQLite, L3: Redis)
720

721
    Strategy:
722
    - L1: Hot data, very fast, limited size
723
    - L2: Warm data, persistent, larger size
724
    - L3: Shared data, distributed (Redis)
725
    """
726

727
    def __init__(
728
        self,
729
        memory_size: int = 100,
730
        memory_max_mb: float = 50.0,
731
        sqlite_path: Optional[Path] = None,
732
        redis_config: Optional[dict] = None,
733
    ):
734
        self.l1 = MemoryCache(
735
            max_size=memory_size, max_memory_mb=memory_max_mb
736
        )
737
        self.l2 = SQLiteCache(sqlite_path) if SQLITE_AVAILABLE else None
738
        self.l3 = None
739

740
        if redis_config and REDIS_AVAILABLE:
741
            try:
742
                self.l3 = RedisCache(**redis_config)
743
            except Exception as e:
744
                logger.warning(f"Redis cache unavailable: {e}")
745

746
        self._hit_distribution = {"L1": 0, "L2": 0, "L3": 0}
747

748
    async def get(self, key: str) -> Optional[Any]:
749
        """Get from cache (L1 -> L2 -> L3)"""
750
        # Try L1
751
        value = await self.l1.get(key)
752
        if value is not None:
753
            self._hit_distribution["L1"] += 1
754
            return value
755

756
        # Try L2
757
        if self.l2:
758
            value = await self.l2.get(key)
759
            if value is not None:
760
                self._hit_distribution["L2"] += 1
761
                # Promote to L1
762
                await self.l1.set(key, value)
763
                return value
764

765
        # Try L3
766
        if self.l3:
767
            value = await self.l3.get(key)
768
            if value is not None:
769
                self._hit_distribution["L3"] += 1
770
                # Promote to L1/L2
771
                await self.l1.set(key, value)
772
                if self.l2:
773
                    await self.l2.set(key, value)
774
                return value
775

776
        return None
777

778
    async def set(
779
        self,
780
        key: str,
781
        value: Any,
782
        ttl: Optional[int] = None,
783
        tiers: str = "all",  # "all", "memory", "persistent"
784
    ) -> bool:
785
        """Set in cache tiers"""
786
        success = True
787

788
        if tiers in ("all", "memory"):
789
            success = await self.l1.set(key, value, ttl) and success
790

791
        if tiers in ("all", "persistent") and self.l2:
792
            success = await self.l2.set(key, value, ttl) and success
793

794
        if tiers == "all" and self.l3:
795
            success = await self.l3.set(key, value, ttl) and success
796

797
        return success
798

799
    async def mget(self, keys: List[str]) -> Dict[str, Any]:
800
        """Optimized multi-tier batch get"""
801
        result = {}
802
        missing = keys[:]
803

804
        # Try L1 first
805
        if missing:
806
            l1_results = await self.l1.mget(missing)
807
            for key, value in l1_results.items():
808
                result[key] = value
809
                self._hit_distribution["L1"] += 1
810
                missing.remove(key)
811

812
        # Try L2 for missing
813
        if missing and self.l2:
814
            l2_results = await self.l2.mget(missing)
815
            for key, value in l2_results.items():
816
                result[key] = value
817
                self._hit_distribution["L2"] += 1
818
                missing.remove(key)
819
                # Promote to L1
820
                await self.l1.set(key, value)
821

822
        # Try L3 for remaining
823
        if missing and self.l3:
824
            l3_results = await self.l3.mget(missing)
825
            for key, value in l3_results.items():
826
                result[key] = value
827
                self._hit_distribution["L3"] += 1
828
                # Promote to L1/L2
829
                await self.l1.set(key, value)
830
                if self.l2:
831
                    await self.l2.set(key, value)
832

833
        return result
834

835
    async def delete(self, key: str) -> bool:
836
        """Delete from all tiers"""
837
        await self.l1.delete(key)
838
        if self.l2:
839
            await self.l2.delete(key)
840
        if self.l3:
841
            await self.l3.delete(key)
842
        return True
843

844
    async def close(self):
845
        if self.l2:
846
            await self.l2.close()
847
        if self.l3:
848
            await self.l3.close()
849

850
    def get_stats(self) -> Dict[str, Any]:
851
        """Get combined statistics from all tiers"""
852
        total_hits = sum(self._hit_distribution.values())
853
        return {
854
            "hit_distribution": {
855
                tier: {
856
                    "count": count,
857
                    "percentage": (
858
                        (count / total_hits * 100) if total_hits > 0 else 0
859
                    ),
860
                }
861
                for tier, count in self._hit_distribution.items()
862
            },
863
            "L1_memory": self.l1.get_stats().to_dict(),
864
            "L2_sqlite": self.l2.get_stats().to_dict() if self.l2 else None,
865
            "L3_redis": self.l3.get_stats().to_dict() if self.l3 else None,
866
        }
867

868

869
# =============================================================================
870
# Cache Decorators
871
# =============================================================================
872

873

874
def generate_cache_key(*args, **kwargs) -> str:
875
    """Generate cache key from function arguments"""
876
    key_data = json.dumps(
877
        {"args": args, "kwargs": kwargs}, sort_keys=True, default=str
878
    )
879
    return hashlib.md5(key_data.encode()).hexdigest()
880

881

882
def cached(
883
    backend: Union[CacheBackend, str] = "memory",
884
    ttl: int = 3600,
885
    key_func: Optional[Callable] = None,
886
    condition: Optional[Callable[[Any], bool]] = None,
887
):
888
    """
889
    Decorator for caching function results
890

891
    Args:
892
        backend: Cache backend or "memory"/"sqlite"/"redis"
893
        ttl: Time to live in seconds
894
        key_func: Custom key generation function
895
        condition: Only cache if condition(result) is True
896
    """
897

898
    def decorator(func: Callable) -> Callable:
899
        @wraps(func)
900
        async def async_wrapper(*args, **kwargs):
901
            cache = _get_cache_backend(backend)
902

903
            if key_func:
904
                key = key_func(*args, **kwargs)
905
            else:
906
                key = f"{func.__module__}.{func.__name__}:{generate_cache_key(*args, **kwargs)}"
907

908
            # Try cache
909
            cached_value = await cache.get(key)
910
            if cached_value is not None:
911
                logger.debug(f"Cache hit: {key}")
912
                return cached_value
913

914
            # Execute and cache
915
            result = await func(*args, **kwargs)
916

917
            if condition is None or condition(result):
918
                await cache.set(key, result, ttl)
919

920
            return result
921

922
        @wraps(func)
923
        def sync_wrapper(*args, **kwargs):
924
            # For sync functions, run in async context
925
            return asyncio.run(async_wrapper(*args, **kwargs))
926

927
        return (
928
            async_wrapper
929
            if asyncio.iscoroutinefunction(func)
930
            else sync_wrapper
931
        )
932

933
    return decorator
934

935

936
# Global cache instances
937
_memory_cache = None
938
_sqlite_cache = None
939
_redis_cache = None
940

941

942
def _get_cache_backend(backend: Union[CacheBackend, str]) -> CacheBackend:
943
    """Get or create cache backend"""
944
    global _memory_cache, _sqlite_cache, _redis_cache
945

946
    if isinstance(backend, CacheBackend):
947
        return backend
948

949
    if backend == "memory":
950
        if _memory_cache is None:
951
            _memory_cache = MemoryCache()
952
        return _memory_cache
953

954
    elif backend == "sqlite":
955
        if _sqlite_cache is None:
956
            _sqlite_cache = SQLiteCache()
957
        return _sqlite_cache
958

959
    elif backend == "redis":
960
        if _redis_cache is None:
961
            _redis_cache = RedisCache()
962
        return _redis_cache
963

964
    raise ValueError(f"Unknown cache backend: {backend}")
965

966

967
# Convenience function for CVE caching
968
async def get_cached_cve(cve_id: str) -> Optional[dict]:
969
    """Get CVE from cache"""
970
    cache = _get_cache_backend("sqlite")
971
    return await cache.get(f"cve:{cve_id.upper()}")
972

973

974
async def cache_cve(cve_id: str, data: dict, ttl: int = 86400 * 7):
975
    """Cache CVE data for 7 days"""
976
    cache = _get_cache_backend("sqlite")
977
    await cache.set(f"cve:{cve_id.upper()}", data, ttl)
978

979

980
# Compatibility alias
981
Cache = MultiTierCache
982

983
# Export public APIs
984
__all__ = [
985
    # Backends
986
    "CacheBackend",
987
    "MemoryCache",
988
    "SQLiteCache",
989
    "RedisCache",
990
    "MultiTierCache",
991
    "Cache",  # Compatibility alias
992
    # Statistics
993
    "CacheStats",
994
    # Decorators
995
    "cached",
996
    "generate_cache_key",
997
    # Utilities
998
    "get_cached_cve",
999
    "cache_cve",
1000
    # Global instances
1001
    "_get_cache_backend",
1002
]
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc