Skip to main content
deleted 1 character in body
Source Link
G. Sliepen
  • 69.3k
  • 3
  • 75
  • 180
class LRUCache {
    std::list<std::pair<int, int>> nodes;
    using node_iterator = decltype(nodes)::iterator;
    std::unordered_map<int, node_iterator> index;
    std::mutex mtx;
    std::size_t n;

    void update(node_iterator node_it) {
        nodes.splice(tmpnodes.begin(), nodes, node_it);        
    }

public:
    LRUCache(std::size_t n): n(n) {}

    void put(int key, int value) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            auto node_it = it->second;
            node_it->second = value;
            update(node_it);
        } else {
            if (nodes.sizeemplace_front() ==key, nvalue) {;
              index[key] = index.erase(nodes.front().first);
    
            if (nodes.pop_backsize();
      > n) {
    }

            index.erase(nodes.emplace_frontback(key, value).first);
            index[key] =   nodes.frontpop_back();
            }
        }
    }

    int get(int key) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            update(it->second);
            return *itit->second->second;
        } else {
            return -1;
        }
    }
};
class LRUCache {
    std::list<std::pair<int, int>> nodes;
    using node_iterator = decltype(nodes)::iterator;
    std::unordered_map<int, node_iterator> index;
    std::mutex mtx;
    std::size_t n;

    void update(node_iterator node_it) {
        nodes.splice(tmp.begin(), nodes, node_it);        
    }

public:
    LRUCache(std::size_t n): n(n) {}

    void put(int key, int value) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            auto node_it = it->second;
            node_it->second = value;
            update(node_it);
        } else {
            if (nodes.size() == n) {
                index.erase(nodes.front().first);
                nodes.pop_back();
            }

            nodes.emplace_front(key, value);
            index[key] = nodes.front();
        }
    }

    int get(int key) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            update(it->second);
            return *it->second;
        } else {
            return -1;
        }
    }
};
class LRUCache {
    std::list<std::pair<int, int>> nodes;
    using node_iterator = decltype(nodes)::iterator;
    std::unordered_map<int, node_iterator> index;
    std::mutex mtx;
    std::size_t n;

    void update(node_iterator node_it) {
        nodes.splice(nodes.begin(), nodes, node_it);        
    }

public:
    LRUCache(std::size_t n): n(n) {}

    void put(int key, int value) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            auto node_it = it->second;
            node_it->second = value;
            update(node_it);
        } else {
            nodes.emplace_front(key, value);
            index[key] = nodes.front();
 
            if (nodes.size() > n) {
                index.erase(nodes.back().first);
                nodes.pop_back();
            }
        }
    }

    int get(int key) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            update(it->second);
            return it->second->second;
        } else {
            return -1;
        }
    }
};
Source Link
G. Sliepen
  • 69.3k
  • 3
  • 75
  • 180

Make more use of the standard library

You are leveraging std::unordered_map to provide \$O(1)\$ lookups, but you have manually implemented a linked list to keep track of which nodes were most recently used. You can use std::list to do this for you. In fact, the list will store both the keys and values, while the std::unordered_map will map keys to entries in the list. You can make use of the guarantee that list iterators will not be invalidated, even if elements in a list are added or (re)moved. The splice() function can be used to move an element to the front.

class LRUCache {
    std::list<std::pair<int, int>> nodes;
    using node_iterator = decltype(nodes)::iterator;
    std::unordered_map<int, node_iterator> index;
    std::mutex mtx;
    std::size_t n;

    void update(node_iterator node_it) {
        nodes.splice(tmp.begin(), nodes, node_it);        
    }

public:
    LRUCache(std::size_t n): n(n) {}

    void put(int key, int value) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            auto node_it = it->second;
            node_it->second = value;
            update(node_it);
        } else {
            if (nodes.size() == n) {
                index.erase(nodes.front().first);
                nodes.pop_back();
            }

            nodes.emplace_front(key, value);
            index[key] = nodes.front();
        }
    }

    int get(int key) {
        std::lock_guard lock(mtx);

        if (auto it = index.find(key); it != index.end()) {
            update(it->second);
            return *it->second;
        } else {
            return -1;
        }
    }
};

There are other variations possible, see for example this StackOverflow question.

Ambiguous return value

There is a problem with get(): if it returns -1, it is not possible to tell whether this was because the key was not found, or if the key was found but the value was set to -1. Consider adding another way to report whether or not the key was found. A good way in C++17 or later is to use std::optional for the return value.

Make it a template

Your cache is hardcoded to use int for both keys and values. What if you want to have std::string keys and float values? You can easily make your cache work with any type of key and value by making it a template:

template<typename Key, typename Value>
class LRUCache {
    std::list<std::pair<Key, Value>> nodes;
    ...

public:
    void put(const Key& key, const Value& value) {
        ...
    }

    std::optional<Value> get(const Key& key) {
        ...
    }
};