`

cache-lru

 
阅读更多
CachingPolicy.java

public enum CachingPolicy {
  THROUGH("through"), AROUND("around"), BEHIND("behind");

  private String policy;

  private CachingPolicy(String policy) {
    this.policy = policy;
  }

  public String getPolicy() {
    return policy;
  }
}

 

LruCache.java

public class LruCache {

  class Node {
    String userId;
    UserAccount userAccount;
    Node previous;
    Node next;

    public Node(String userId, UserAccount userAccount) {
      this.userId = userId;
      this.userAccount = userAccount;
    }
  }

  int capacity;
  Map<String, Node> cache = new HashMap<>();
  Node head;
  Node end;

  public LruCache(int capacity) {
    this.capacity = capacity;
  }

  /**
   * Get user account
   */
  public UserAccount get(String userId) {
    if (cache.containsKey(userId)) {
      Node node = cache.get(userId);
      remove(node);
      setHead(node);
      return node.userAccount;
    }
    return null;
  }

  /**
   *
   * Remove node from linked list.
   */
  public void remove(Node node) {
    if (node.previous != null) {
      node.previous.next = node.next;
    } else {
      head = node.next;
    }
    if (node.next != null) {
      node.next.previous = node.previous;
    } else {
      end = node.previous;
    }
  }

  /**
   *
   * Move node to the front of the list.
   */
  public void setHead(Node node) {
    node.next = head;
    node.previous = null;
    if (head != null) {
      head.previous = node;
    }
    head = node;
    if (end == null) {
      end = head;
    }
  }

  /**
   * Set user account
   */
  public void set(String userId, UserAccount userAccount) {
    if (cache.containsKey(userId)) {
      Node old = cache.get(userId);
      old.userAccount = userAccount;
      remove(old);
      setHead(old);
    } else {
      Node newNode = new Node(userId, userAccount);
      if (cache.size() >= capacity) {
        System.out.println("# Cache is FULL! Removing " + end.userId + " from cache...");
        cache.remove(end.userId); // remove LRU data from cache.
        remove(end);
        setHead(newNode);
      } else {
        setHead(newNode);
      }
      cache.put(userId, newNode);
    }
  }

  public boolean contains(String userId) {
    return cache.containsKey(userId);
  }

  /**
   * Invalidate cache for user
   */
  public void invalidate(String userId) {
    System.out.println("# " + userId + " has been updated! Removing older version from cache...");
    Node toBeRemoved = cache.get(userId);
    remove(toBeRemoved);
    cache.remove(userId);
  }

  public boolean isFull() {
    return cache.size() >= capacity;
  }

  public UserAccount getLruData() {
    return end.userAccount;
  }

  /**
   * Clear cache
   */
  public void clear() {
    head = null;
    end = null;
    cache.clear();
  }

  /**
   *
   * Returns cache data in list form.
   */
  public List<UserAccount> getCacheDataInListForm() {
    ArrayList<UserAccount> listOfCacheData = new ArrayList<>();
    Node temp = head;
    while (temp != null) {
      listOfCacheData.add(temp.userAccount);
      temp = temp.next;
    }
    return listOfCacheData;
  }

  /**
   * Set cache capacity
   */
  public void setCapacity(int newCapacity) {
    if (capacity > newCapacity) {
      clear(); // Behavior can be modified to accommodate for decrease in cache size. For now, we'll
               // just clear the cache.
    } else {
      this.capacity = newCapacity;
    }
  }
}

 

CacheStore.java

public class CacheStore {

  static LruCache cache;

  private CacheStore() {
  }

  /**
   * Init cache capacity
   */
  public static void initCapacity(int capacity) {
    if (null == cache) {
      cache = new LruCache(capacity);
    } else {
      cache.setCapacity(capacity);
    }
  }

  /**
   * Get user account using read-through cache
   */
  public static UserAccount readThrough(String userId) {
    if (cache.contains(userId)) {
      System.out.println("# Cache Hit!");
      return cache.get(userId);
    }
    System.out.println("# Cache Miss!");
    UserAccount userAccount = DbManager.readFromDb(userId);
    cache.set(userId, userAccount);
    return userAccount;
  }

  /**
   * Get user account using write-through cache
   */
  public static void writeThrough(UserAccount userAccount) {
    if (cache.contains(userAccount.getUserId())) {
      DbManager.updateDb(userAccount);
    } else {
      DbManager.writeToDb(userAccount);
    }
    cache.set(userAccount.getUserId(), userAccount);
  }

  /**
   * Get user account using write-around cache
   */
  public static void writeAround(UserAccount userAccount) {
    if (cache.contains(userAccount.getUserId())) {
      DbManager.updateDb(userAccount);
      cache.invalidate(userAccount.getUserId()); // Cache data has been updated -- remove older
                                                 // version from cache.
    } else {
      DbManager.writeToDb(userAccount);
    }
  }

  /**
   * Get user account using read-through cache with write-back policy
   */
  public static UserAccount readThroughWithWriteBackPolicy(String userId) {
    if (cache.contains(userId)) {
      System.out.println("# Cache Hit!");
      return cache.get(userId);
    }
    System.out.println("# Cache Miss!");
    UserAccount userAccount = DbManager.readFromDb(userId);
    if (cache.isFull()) {
      System.out.println("# Cache is FULL! Writing LRU data to DB...");
      UserAccount toBeWrittenToDb = cache.getLruData();
      DbManager.upsertDb(toBeWrittenToDb);
    }
    cache.set(userId, userAccount);
    return userAccount;
  }

  /**
   * Set user account
   */
  public static void writeBehind(UserAccount userAccount) {
    if (cache.isFull() && !cache.contains(userAccount.getUserId())) {
      System.out.println("# Cache is FULL! Writing LRU data to DB...");
      UserAccount toBeWrittenToDb = cache.getLruData();
      DbManager.upsertDb(toBeWrittenToDb);
    }
    cache.set(userAccount.getUserId(), userAccount);
  }

  /**
   * Clears cache
   */
  public static void clearCache() {
    if (null != cache) {
      cache.clear();
    }
  }

  /**
   * Writes remaining content in the cache into the DB.
   */
  public static void flushCache() {
    System.out.println("# flushCache...");
    if (null == cache) {
      return;
    }
    List<UserAccount> listOfUserAccounts = cache.getCacheDataInListForm();
    for (UserAccount userAccount : listOfUserAccounts) {
      DbManager.upsertDb(userAccount);
    }
  }

  /**
   * Print user accounts
   */
  public static String print() {
    List<UserAccount> listOfUserAccounts = cache.getCacheDataInListForm();
    StringBuilder sb = new StringBuilder();
    sb.append("\n--CACHE CONTENT--\n");
    for (UserAccount userAccount : listOfUserAccounts) {
      sb.append(userAccount.toString() + "\n");
    }
    sb.append("----\n");
    return sb.toString();
  }
}

 

分享到:
评论

相关推荐

    前端开源库-lighter-lru-cache

    本文将详细介绍一个前端开源库——`lighter-lru-cache`,它是一个轻量级的JavaScript LRU缓存实现。 `lighter-lru-cache`库的核心设计目标是提供一个简单易用、占用资源少的缓存解决方案。它适用于那些对性能敏感,...

    node-lru-native, 面向 node.js的高性能LRU缓存.zip

    node-lru-native, 面向 node.js的高性能LRU缓存 node-lru-native这是 node.js 内存缓存的简单实现,支持 LRU ( least-recently-used ) 备份和 TTL expirations 。它是作为与( 精彩) node-lru-cache库的替代

    PyPI 官网下载 | backports.functools_lru_cache-1.3.tar.gz

    《PyPI上的backports.functools_lru_cache-1.3.tar.gz:Python高效缓存技术解析》 在Python编程中,高效的代码执行是至关重要的,特别是在处理大量数据或者需要频繁重复计算的场景下。PyPI(Python Package Index)...

    node-lru:nodejs实现的lru缓存

    Node LRU Cache 基于Nodejs开发的LRU Cache, 兼有缓存超时清除功能 usage var options = { expires: 5 * 60 * 1000, capacity: 5 }; var LRU = require('node-lru'); var cache = new LRU(2);//var cache = new ...

    kakku-lru-cache-store:Kakku 的内存 LRU 缓存存储

    kakku-lru-cache-store 一个存储在内存中的-backed 。 用法 var LRU = require ( "lru-cache" ) ; var Kakku = require ( "kakku" ) . Kakku ; var LruCacheStore = require ( "kakku-lru-cache-store" ) . ...

    node-lru-cache

    var LRU = require ( "lru-cache" ) , options = { max : 500 , length : function ( n , key ) { return n * 2 + key . length } , dispose : function ( key , n ) { n . close ( ) } , maxAge : 1000 * 60 *...

    cache缓存淘汰算法--LRU算法.docx

    LRU-K 中的 K 表示最近使用的次数,LRU 可视为 LRU-1。算法中需要维护一个访问历史列表,只有当数据访问次数达到 K 次时才会放入缓存。淘汰时,LRU-K 会淘汰第 K 次访问时间距当前最远的数据。LRU-K 的命中率相对 ...

    cache缓存淘汰算法--LRU算法.pdf

    2Q算法的命中率优于LRU,但略逊于LRU-K,而代价则介于LRU和LRU-K之间。 MultiQueue (MQ)算法则进一步优化了缓存策略。MQ将数据分配到多个具有不同访问优先级的LRU队列中。数据的优先级随着访问次数的增加而提升,...

    harmonyos2-koa-ratelimit-lru:lru-cache支持的速率限制器中间件

    lru-cache 支持的用于 koa@2 的速率限制器中间件 Koa2 中间件使用async/await ,因此您必须使用--harmony_async_await或使用babel在 Node.js &gt;= 7.0.0 中运行 安装 $ npm install koa - ratelimit - lru 例子 const ...

    cache缓存淘汰算法--LRU算法 (2).docx

    LRU-K算法改进了LRU的命中率,但其复杂度和代价也随之增加,因为需要额外的空间来记录未达到K次访问的数据,并且需要维护一个优先级队列。 2Q算法,又称为Two Queues,是LRU-2的一种变体,它将数据分为FIFO队列和...

    cache缓存淘汰算法--LRU算法 (2).pdf

    LRU-K在一定程度上解决了LRU的缓存污染问题,但它的实现更为复杂,需要维护优先级队列,内存消耗和CPU消耗相对较大。 Two Queues(2Q)算法是类似于LRU-2的策略,它包含两个缓存队列:一个FIFO(先进先出)队列和一...

    lru-cache-node:具有最近使用策略的节点的照明快速缓存管理器

    lru-cache-node 具有最近使用策略的节点的照明快速缓存管理器。 具有LRU策略的节点的超快速缓存。 缓存将继续添加值,直到达到maxSize为止。 之后,它将开始从缓存中弹出最近最少使用/访问的值,以便设置新值。 支持...

    python-leetcode题解之146-LRU-Cache

    python python_leetcode题解之146_LRU_Cache

    backports.functools_lru_cache-1.5.tar

    python源码安装包backports.functools_lru_cache-1.5.tar 解压后python setup.py install 进行安装

    lrucacheleetcode-lru_cache:lru_cache

    lru缓存leetcode LRU缓存 受启发的简单 LRU 缓存实现 发展 建造 ./gradlew build 测试 ./gradlew test 使用示例 LRUCache&lt; String , String &gt; cache = new LRUCache&lt;&gt; ( 2 /* capacity */ ); cache . put( " ...

    LRU-update-Cache-.zip_LRU cache

    LRU置换算法是选择最近最久未使用的页面予以置换。该算法赋予每个页面一个访问字段,用来记录一个页面自上次被访问以来经历的时间T,当须淘汰一个页面时,选择现有页面中T值最大的,即最近最久没有访问的页面。这是...

    lru-cache-2.5.0.zip

    在"lru-cache-2.5.0.zip"这个压缩包中,我们可以期待找到一个LRU Cache的实现,版本为2.5.0。通常,这样的库会包含以下关键组件: 1. **Cache容器**:这是一个存储键值对的数据结构,可以是哈希表或者其他高效的...

    lua-lru:Lua中的LRU缓存

    lua-lru,Lua中的LRU缓存 安装: $ luarocks install lua-lru ...cache = lru. new ( 100 ) 为总共1000个字节的100个元素创建LRU缓存的实例: lru = require ' lru ' cache = lru. new ( 100 , 1000 ) 方法: ca

    lrucacheleetcode-LRU_Cache_Algorithm:用C++实现LRU缓存算法

    lru cache leetcode LRU Cache Algorithm (LRU缓存算法) C++版本,适用于 ##LRU算法介绍 引自: Discards the least recently used items first. This algorithm requires keeping track of what was used when, ...

    dm-cache-policy.rar_V2 _dm-cache

    首先,dm-cache支持多种缓存策略,包括最常访问(Most Recently Used, MRU)、最不常访问(Least Recently Used, LRU)、首次访问(First In First Out, FIFO)等。这些策略都是为了优化缓存命中率,确保最常用的...

Global site tag (gtag.js) - Google Analytics