Skip to content
Snippets Groups Projects
Commit d5528613 authored by David Baker's avatar David Baker
Browse files

Revert all the bits changing keys of eeverything that used LRUCaches to tuples

parent 10f76dc5
No related branches found
No related tags found
No related merge requests found
......@@ -309,14 +309,14 @@ def _flatten_dict(d, prefix=[], result={}):
return result
regex_cache = LruCache(5000, 1)
regex_cache = LruCache(5000)
def _compile_regex(regex_str):
r = regex_cache.get((regex_str,), None)
r = regex_cache.get(regex_str, None)
if r:
return r
r = re.compile(regex_str, flags=re.IGNORECASE)
regex_cache[(regex_str,)] = r
regex_cache[regex_str] = r
return r
......@@ -32,7 +32,7 @@ class DictionaryCache(object):
"""
def __init__(self, name, max_entries=1000):
self.cache = LruCache(max_size=max_entries, keylen=1)
self.cache = LruCache(max_size=max_entries)
self.name = name
self.sequence = 0
......@@ -56,7 +56,7 @@ class DictionaryCache(object):
)
def get(self, key, dict_keys=None):
entry = self.cache.get((key,), self.sentinel)
entry = self.cache.get(key, self.sentinel)
if entry is not self.sentinel:
cache_counter.inc_hits(self.name)
......@@ -78,7 +78,7 @@ class DictionaryCache(object):
# Increment the sequence number so that any SELECT statements that
# raced with the INSERT don't update the cache (SYN-369)
self.sequence += 1
self.cache.pop((key,), None)
self.cache.pop(key, None)
def invalidate_all(self):
self.check_thread()
......@@ -96,8 +96,8 @@ class DictionaryCache(object):
self._update_or_insert(key, value)
def _update_or_insert(self, key, value):
entry = self.cache.setdefault((key,), DictionaryEntry(False, {}))
entry = self.cache.setdefault(key, DictionaryEntry(False, {}))
entry.value.update(value)
def _insert(self, key, value):
self.cache[(key,)] = DictionaryEntry(True, value)
self.cache[key] = DictionaryEntry(True, value)
......@@ -29,7 +29,7 @@ def enumerate_leaves(node, depth):
class LruCache(object):
"""Least-recently-used cache."""
def __init__(self, max_size, keylen, cache_type=dict):
def __init__(self, max_size, keylen=1, cache_type=dict):
cache = cache_type()
self.size = 0
list_root = []
......
......@@ -56,42 +56,42 @@ class CacheTestCase(unittest.TestCase):
def test_eviction(self):
cache = Cache("test", max_entries=2)
cache.prefill((1,), "one")
cache.prefill((2,), "two")
cache.prefill((3,), "three") # 1 will be evicted
cache.prefill(1, "one")
cache.prefill(2, "two")
cache.prefill(3, "three") # 1 will be evicted
failed = False
try:
cache.get((1,))
cache.get(1)
except KeyError:
failed = True
self.assertTrue(failed)
cache.get((2,))
cache.get((3,))
cache.get(2)
cache.get(3)
def test_eviction_lru(self):
cache = Cache("test", max_entries=2, lru=True)
cache.prefill((1,), "one")
cache.prefill((2,), "two")
cache.prefill(1, "one")
cache.prefill(2, "two")
# Now access 1 again, thus causing 2 to be least-recently used
cache.get((1,))
cache.get(1)
cache.prefill((3,), "three")
cache.prefill(3, "three")
failed = False
try:
cache.get((2,))
cache.get(2)
except KeyError:
failed = True
self.assertTrue(failed)
cache.get((1,))
cache.get((3,))
cache.get(1)
cache.get(3)
class CacheDecoratorTestCase(unittest.TestCase):
......
......@@ -22,37 +22,37 @@ from synapse.util.caches.treecache import TreeCache
class LruCacheTestCase(unittest.TestCase):
def test_get_set(self):
cache = LruCache(1, 1)
cache[("key",)] = "value"
self.assertEquals(cache.get(("key",)), "value")
self.assertEquals(cache[("key",)], "value")
cache = LruCache(1)
cache["key"] = "value"
self.assertEquals(cache.get("key"), "value")
self.assertEquals(cache["key"], "value")
def test_eviction(self):
cache = LruCache(2, 1)
cache[(1,)] = 1
cache[(2,)] = 2
cache = LruCache(2)
cache[1] = 1
cache[2] = 2
self.assertEquals(cache.get((1,)), 1)
self.assertEquals(cache.get((2,)), 2)
self.assertEquals(cache.get(1), 1)
self.assertEquals(cache.get(2), 2)
cache[(3,)] = 3
cache[3] = 3
self.assertEquals(cache.get((1,)), None)
self.assertEquals(cache.get((2,)), 2)
self.assertEquals(cache.get((3,)), 3)
self.assertEquals(cache.get(1), None)
self.assertEquals(cache.get(2), 2)
self.assertEquals(cache.get(3), 3)
def test_setdefault(self):
cache = LruCache(1, 1)
self.assertEquals(cache.setdefault(("key",), 1), 1)
self.assertEquals(cache.get(("key",)), 1)
self.assertEquals(cache.setdefault(("key",), 2), 1)
self.assertEquals(cache.get(("key",)), 1)
cache = LruCache(1)
self.assertEquals(cache.setdefault("key", 1), 1)
self.assertEquals(cache.get("key"), 1)
self.assertEquals(cache.setdefault("key", 2), 1)
self.assertEquals(cache.get("key"), 1)
def test_pop(self):
cache = LruCache(1, 1)
cache[("key",)] = 1
self.assertEquals(cache.pop(("key",)), 1)
self.assertEquals(cache.pop(("key",)), None)
cache = LruCache(1)
cache["key"] = 1
self.assertEquals(cache.pop("key"), 1)
self.assertEquals(cache.pop("key"), None)
def test_del_multi(self):
cache = LruCache(4, 2, cache_type=TreeCache)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment