实现LRU,巧合的是工作当中有实际使用该算法。

class LRUCache {
public:
    LRUCache(int capacity) :m_capacity(capacity) {}
    
    int get(int key) {
        auto it = cache.find(key);
        if(it == cache.end())
            return -1;
        visited(it);
        return it->second.first;
    }
    
    void put(int key, int value) {
        auto it = cache.find(key);
        if(it != cache.end()) 
            visited(it);
        else{
            if(cache.size() == m_capacity)
            {
                cache.erase(elements.back());
                elements.pop_back();
            }
            elements.push_front(key);
        }
        cache[key] = {value,elements.begin()};
    }
private:
    int m_capacity;
    list<int> elements;
    using pair_type = pair<int,list<int>::iterator>;
    unordered_map<int,pair_type> cache;
    using map_iter = unordered_map<int,pair<int,list<int>::iterator>>::iterator;
    
    void visited(map_iter iter){  
        //delete the value via iterator
        elements.erase(iter->second.second);
        //push value to begining of list
        elements.push_front(iter->first);
        //update map's list iterator
        iter->second.second = elements.begin();
    }
};

/**
 * Your LRUCache object will be instantiated and called as such:
 * LRUCache* obj = new LRUCache(capacity);
 * int param_1 = obj->get(key);
 * obj->put(key,value);
 */

运行结果:

Runtime: 80 ms, faster than 99.57% of C++ online submissions for LRU Cache.
Memory Usage: 42 MB, less than 57.44% of C++ online submissions for LRU Cache.