Introduction
I suppose you know what an MRU cache is; otherwise, you wouldn't be reading this. This is an implementation of a very simple one using only STL.
To implement a cache, derive a subclass from this template class. As an implementor, you'd have to implement two methods:
HandleNonExistingKeyFetch
- to handle cache misses. In this method, you access the real source of data behind the cache and return the value.
HandleItemRelease
- (optional) called when an item is removed from the cache.
The cache class is a template of two types, a key and value (like hash map). The value type is the type of the resource, and the key type is the type of the resource address (this is how you fetch a resource).
Source Code
#include <list>
#include <map>
template <typename key_type, typename value_type>
class MruCache
{
public:
const int maxLength;
MruCache(int iMaxLength) : maxLength(iMaxLength) { }
inline value_type FetchItem(key_type key) { return __fetch_item(key); }
virtual ~MruCache() { Clear(); }
virtual void Clear() { __clear(); }
protected:
virtual void HandleItemRelease(key_type key, value_type value) { };
virtual value_type HandleNonExistingKeyFetch(key_type key) = 0;
private:
typedef struct _Entry
{
key_type key;
value_type value;
} Entry;
typedef std::list<Entry> EntryList;
EntryList listOfEntries;
typedef std::map<key_type, void*> ItrPtrMap;
ItrPtrMap mapOfListIteratorPtr;
value_type __fetch_item(key_type key)
{
Entry entry;
EntryList::iterator* ptrItr =
(EntryList::iterator*) mapOfListIteratorPtr[key];
if (!ptrItr)
{
if ( (int)listOfEntries.size() >= maxLength)
{
Entry lruEntry = listOfEntries.back();
HandleItemRelease(lruEntry.key, lruEntry.value);
listOfEntries.pop_back();
delete mapOfListIteratorPtr[lruEntry.key];
mapOfListIteratorPtr.erase(lruEntry.key);
}
entry.value = HandleNonExistingKeyFetch(key);
entry.key = key;
listOfEntries.push_front(entry);
EntryList::iterator* ptrItr = new EntryList::iterator();
*ptrItr = listOfEntries.begin();
mapOfListIteratorPtr[key] = ptrItr;
}
else
{
entry = *(*ptrItr);
listOfEntries.erase(*ptrItr);
listOfEntries.push_front(entry);
*ptrItr = listOfEntries.begin();
}
return entry.value;
}
virtual void __clear()
{
for (ItrPtrMap::iterator i=mapOfListIteratorPtr.begin();
i!=mapOfListIteratorPtr.end(); i++)
{
void* ptrItr = i->second;
EntryList::iterator* pItr = (EntryList::iterator*) ptrItr;
HandleItemRelease( (*pItr)->key, (*pItr)->value );
delete ptrItr;
}
listOfEntries.clear();
mapOfListIteratorPtr.clear();
}
};
The cache itself is a linked list. The newest elements are at the beginning of the list. The least recently fetched elements are at the end. A hash map is used to access the elements in "random access" time.
Usage Example
char* HARD_DISK[] =
{
"zero", "one", "two", "three", "four",
"five", "six", "seven", "eight", "nine", "ten"
};
class TestCache : public MruCache<int, char*>
{
public:
TestCache(int iMaxLength) : MruCache<int, char*>(iMaxLength) { }
protected:
virtual void HandleItemRelease(int key, char* value)
{
cout << "[DEBUG] key \"" << key
<< "\" removed" << endl;
}
virtual char* HandleNonExistingKeyFetch(int key)
{
cout << "[DEBUG] key \""
<< key << "\" fetched" << endl;
return HARD_DISK[key];
}
};
int main()
{
TestCache cache(4);
cout << cache.FetchItem(1) << endl;
cout << cache.FetchItem(2) << endl;
cout << cache.FetchItem(3) << endl;
cout << cache.FetchItem(4) << endl;
cout << cache.FetchItem(5) << endl;
return 0;
}