Browse Source

Basic, mildly tested LRU cache.

master
antpy 4 years ago
parent
commit
506741fbdf
1 changed files with 60 additions and 0 deletions
  1. 60
    0
      src/LRUCache.h

+ 60
- 0
src/LRUCache.h View File

@@ -0,0 +1,60 @@
#pragma once
#ifndef CACHE_H
#define CACHE_H

#include <unordered_map>
#include <list>
#include <utility>
#include <iostream>

template<class K, class V>
class LRUCache {
using Key_list = std::list<std::pair<K, V>>;
Key_list key_values;
std::unordered_map<K, typename Key_list::iterator> positions;
size_t max_values;

void remove_oldest() {
while (positions.size() >= max_values) {
positions.erase(key_values.back().first);
key_values.pop_back();
}
}

public:
LRUCache(const size_t max_values) : max_values(max_values) {
}

template<class VV>
void insert(const K& key, VV&& value) {
remove_oldest();
key_values.emplace_front(key, value);
positions[key] = key_values.begin();
}

std::pair<V*, bool> find(const K& key) {
auto p = positions.find(key);
if (p == positions.end())
return {nullptr, false};
auto iter = p->second;
auto& value = iter->second;
// If the iterator is not at the front of the list, move it there.
if (iter != key_values.begin()) {
key_values.splice(
key_values.begin(),
key_values,
iter,
std::next(iter));
}
positions[key] = key_values.begin();
return {&(iter->second), true};
}

void debug_cache() {
for (auto& p : key_values) {
std::cout << p.first << ":" << p.second << std::endl;
}
}
};

#endif

Loading…
Cancel
Save