← 返回博客列表
系统设计

可扩展 LRU 缓存设计模式 - 面向对象架构与多策略支持|OAVOService 面试助攻

2026-01-10

问题背景

在高性能系统架构中,缓存策略的选择和实现直接影响系统性能。传统的单一 LRU 实现往往缺乏扩展性,当业务需求变化时(如需要支持 LFU、FIFO 等策略),往往需要重写整个缓存模块。

OAVOService 面试助攻团队 在辅导 Google、Amazon、Meta 等大厂系统设计面试中发现,可扩展缓存设计是考察候选人面向对象设计能力和架构思维的经典题目。

问题描述

Problem 2 — Design an Extensible LRU Cache

设计一个可扩展的 LRU 缓存类结构并实现。重点考虑扩展性,因为后续需要添加更多淘汰策略,如 LFU(Least Frequently Used)。

核心要求:

架构设计

1. 策略模式 + 工厂模式

from abc import ABC, abstractmethod
from typing import Any, Optional
import threading

class EvictionStrategy(ABC):
    """淘汰策略抽象基类"""
    
    @abstractmethod
    def on_access(self, key: Any) -> None:
        """键被访问时的回调"""
        pass
    
    @abstractmethod
    def on_insert(self, key: Any, value: Any) -> None:
        """插入新键值对时的回调"""
        pass
    
    @abstractmethod
    def evict(self) -> Any:
        """返回需要淘汰的键"""
        pass
    
    @abstractmethod
    def remove(self, key: Any) -> None:
        """移除指定键"""
        pass
    
    @abstractmethod
    def size(self) -> int:
        """返回当前大小"""
        pass

class CacheNode:
    """双向链表节点"""
    def __init__(self, key=None, value=None):
        self.key = key
        self.value = value
        self.prev = None
        self.next = None
        self.frequency = 1  # 为LFU策略预留
        self.timestamp = 0   # 为其他策略预留

2. LRU 策略实现

import time

class LRUStrategy(EvictionStrategy):
    """LRU淘汰策略实现"""
    
    def __init__(self):
        # 双向链表哨兵节点
        self.head = CacheNode()
        self.tail = CacheNode()
        self.head.next = self.tail
        self.tail.prev = self.head
        self.key_to_node = {}
    
    def _add_to_head(self, node: CacheNode) -> None:
        """添加节点到头部"""
        node.prev = self.head
        node.next = self.head.next
        self.head.next.prev = node
        self.head.next = node
    
    def _remove_node(self, node: CacheNode) -> None:
        """移除指定节点"""
        node.prev.next = node.next
        node.next.prev = node.prev
    
    def _move_to_head(self, node: CacheNode) -> None:
        """移动节点到头部"""
        self._remove_node(node)
        self._add_to_head(node)
    
    def on_access(self, key: Any) -> None:
        """LRU访问:移动到头部"""
        if key in self.key_to_node:
            node = self.key_to_node[key]
            self._move_to_head(node)
    
    def on_insert(self, key: Any, value: Any) -> None:
        """LRU插入:添加到头部"""
        node = CacheNode(key, value)
        self.key_to_node[key] = node
        self._add_to_head(node)
    
    def evict(self) -> Any:
        """LRU淘汰:移除尾部节点"""
        last_node = self.tail.prev
        if last_node != self.head:
            self._remove_node(last_node)
            del self.key_to_node[last_node.key]
            return last_node.key
        return None
    
    def remove(self, key: Any) -> None:
        """移除指定键"""
        if key in self.key_to_node:
            node = self.key_to_node[key]
            self._remove_node(node)
            del self.key_to_node[key]
    
    def size(self) -> int:
        return len(self.key_to_node)
    
    def get_node(self, key: Any) -> Optional[CacheNode]:
        return self.key_to_node.get(key)

3. 核心缓存类

class ExtensibleCache:
    """可扩展缓存核心类"""
    
    def __init__(self, capacity: int, strategy: EvictionStrategy):
        self.capacity = capacity
        self.strategy = strategy
        self._lock = threading.RLock()  # 线程安全
    
    def get(self, key: Any) -> Any:
        """获取缓存值 O(1)"""
        with self._lock:
            node = self.strategy.get_node(key) if hasattr(self.strategy, 'get_node') else None
            if node:
                self.strategy.on_access(key)
                return node.value
            return None
    
    def put(self, key: Any, value: Any) -> None:
        """设置缓存值 O(1)"""
        with self._lock:
            # 检查是否已存在
            existing_node = self.strategy.get_node(key) if hasattr(self.strategy, 'get_node') else None
            if existing_node:
                existing_node.value = value
                self.strategy.on_access(key)
                return
            
            # 检查容量限制
            if self.strategy.size() >= self.capacity:
                evicted_key = self.strategy.evict()
                if evicted_key is None:
                    raise RuntimeError("Failed to evict item from full cache")
            
            # 插入新项
            self.strategy.on_insert(key, value)
    
    def remove(self, key: Any) -> bool:
        """移除指定键"""
        with self._lock:
            if hasattr(self.strategy, 'get_node') and self.strategy.get_node(key):
                self.strategy.remove(key)
                return True
            return False
    
    def size(self) -> int:
        """当前缓存大小"""
        return self.strategy.size()
    
    def clear(self) -> None:
        """清空缓存"""
        with self._lock:
            # 重新初始化策略
            self.strategy = type(self.strategy)()

扩展策略实现

1. LFU 策略扩展

from collections import defaultdict
import heapq

class LFUStrategy(EvictionStrategy):
    """LFU (Least Frequently Used) 策略"""
    
    def __init__(self):
        self.key_to_node = {}
        self.freq_to_keys = defaultdict(set)
        self.min_frequency = 0
        self.key_to_freq = {}
    
    def _update_frequency(self, key: Any) -> None:
        """更新键的频率"""
        freq = self.key_to_freq.get(key, 0)
        
        # 从旧频率组中移除
        if freq > 0:
            self.freq_to_keys[freq].discard(key)
            if freq == self.min_frequency and not self.freq_to_keys[freq]:
                self.min_frequency += 1
        
        # 添加到新频率组
        new_freq = freq + 1
        self.key_to_freq[key] = new_freq
        self.freq_to_keys[new_freq].add(key)
    
    def on_access(self, key: Any) -> None:
        if key in self.key_to_node:
            self._update_frequency(key)
    
    def on_insert(self, key: Any, value: Any) -> None:
        node = CacheNode(key, value)
        self.key_to_node[key] = node
        self.key_to_freq[key] = 1
        self.freq_to_keys[1].add(key)
        self.min_frequency = 1
    
    def evict(self) -> Any:
        if not self.key_to_node:
            return None
        
        # 找到最低频率的键
        evict_key = self.freq_to_keys[self.min_frequency].pop()
        del self.key_to_node[evict_key]
        del self.key_to_freq[evict_key]
        return evict_key
    
    def remove(self, key: Any) -> None:
        if key in self.key_to_node:
            freq = self.key_to_freq[key]
            self.freq_to_keys[freq].discard(key)
            del self.key_to_node[key]
            del self.key_to_freq[key]
    
    def size(self) -> int:
        return len(self.key_to_node)
    
    def get_node(self, key: Any) -> Optional[CacheNode]:
        return self.key_to_node.get(key)

2. 缓存工厂

class CacheFactory:
    """缓存工厂类"""
    
    @staticmethod
    def create_cache(cache_type: str, capacity: int) -> ExtensibleCache:
        """创建指定类型的缓存"""
        strategies = {
            'lru': LRUStrategy,
            'lfu': LFUStrategy,
            'fifo': FIFOStrategy,  # 可继续扩展
            'random': RandomStrategy,
        }
        
        if cache_type.lower() not in strategies:
            raise ValueError(f"Unsupported cache type: {cache_type}")
        
        strategy = strategies[cache_type.lower()]()
        return ExtensibleCache(capacity, strategy)

# 使用示例
def demo_usage():
    # 创建LRU缓存
    lru_cache = CacheFactory.create_cache('lru', 3)
    lru_cache.put('a', 1)
    lru_cache.put('b', 2)
    lru_cache.put('c', 3)
    print(lru_cache.get('a'))  # 1, 'a'被访问
    lru_cache.put('d', 4)      # 淘汰'b'
    print(lru_cache.get('b'))  # None
    
    # 创建LFU缓存
    lfu_cache = CacheFactory.create_cache('lfu', 2)
    lfu_cache.put('x', 10)
    lfu_cache.put('y', 20)
    lfu_cache.get('x')  # x频率增加
    lfu_cache.put('z', 30)  # 淘汰y (频率最低)

高级特性

1. 过期时间支持

class TTLExtension:
    """TTL (Time To Live) 扩展"""
    
    def __init__(self, base_strategy: EvictionStrategy):
        self.base_strategy = base_strategy
        self.ttl_map = {}  # key -> expiry_time
    
    def set_ttl(self, key: Any, ttl_seconds: int) -> None:
        expiry_time = time.time() + ttl_seconds
        self.ttl_map[key] = expiry_time
    
    def is_expired(self, key: Any) -> bool:
        if key not in self.ttl_map:
            return False
        return time.time() > self.ttl_map[key]
    
    def cleanup_expired(self) -> None:
        current_time = time.time()
        expired_keys = [k for k, v in self.ttl_map.items() if current_time > v]
        for key in expired_keys:
            self.base_strategy.remove(key)
            del self.ttl_map[key]

2. 统计监控

class CacheStatistics:
    """缓存统计信息"""
    
    def __init__(self):
        self.hits = 0
        self.misses = 0
        self.evictions = 0
        self.puts = 0
    
    def record_hit(self):
        self.hits += 1
    
    def record_miss(self):
        self.misses += 1
    
    def record_eviction(self):
        self.evictions += 1
    
    def record_put(self):
        self.puts += 1
    
    @property
    def hit_rate(self) -> float:
        total = self.hits + self.misses
        return self.hits / total if total > 0 else 0.0

OAVOService 面试技巧

设计原则讨论

  1. 单一职责 - 策略类只负责淘汰逻辑
  2. 开闭原则 - 对扩展开放,对修改封闭
  3. 依赖倒置 - 依赖抽象而非具体实现

常见面试扩展

复杂度分析


OAVOService 专业面试助攻服务 - 深耕系统设计面试多年,为学员提供 Google、Amazon、Meta、Citadel 等顶级公司的实时面试辅助。我们的专家团队精通各类设计模式和架构原理,确保你在系统设计面试中展现专业水准。

了解我们的 定制系统设计面试方案 —— 从缓存架构到分布式系统,全方位提升你的设计能力和面试表现。

标签: LRU缓存, 系统设计, 设计模式, 面向对象, 架构设计, OAVOService