mirror of
https://github.com/ltcptgeneral/cs239-caching.git
synced 2025-09-22 22:57:49 +00:00
[ADD] - Added social media user profile get and upsert microservice with nosql database integration
This commit is contained in:
0
app/cache/__init__.py
vendored
Normal file
0
app/cache/__init__.py
vendored
Normal file
104
app/cache/cache.py
vendored
Normal file
104
app/cache/cache.py
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
# implements a simple string k-v store, objects should be serialized before putting into the cache
|
||||
class Cache(ABC):
|
||||
@abstractmethod
|
||||
def __init__(self, limit: int):
|
||||
"""Constructor taking in the cache size limit as number of entries"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, key: str) -> str:
|
||||
"""Get the value corresponding to key or returns None if there was a cache miss"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def put(self, key: str, val: str) -> bool:
|
||||
"""Set the value corresponding to key and returns True if an eviction was made"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def invalidate(self, key: str) -> bool:
|
||||
"""Mark cache item as invalid and returns True if the element was found and invalidated"""
|
||||
pass
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
# the baseline cache using Direct Mapping, LRU eviction, and no prefetching
|
||||
class BaselineCache(Cache):
|
||||
|
||||
limit = None
|
||||
cache = None
|
||||
|
||||
def __init__(self, limit: int):
|
||||
super()
|
||||
self.limit = limit
|
||||
self.cache = OrderedDict()
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.cache == other
|
||||
|
||||
def __len__(self):
|
||||
return len(self.cache)
|
||||
|
||||
def get(self, key: str) -> str:
|
||||
if key in self.cache:
|
||||
self.cache.move_to_end(key)
|
||||
return self.cache[key]
|
||||
else:
|
||||
return None
|
||||
|
||||
def put(self, key: str, val: str) -> bool:
|
||||
# LRU evict
|
||||
evict = False
|
||||
if len(self.cache) >= self.limit:
|
||||
self.cache.popitem(last = False)
|
||||
evict = True
|
||||
|
||||
self.cache[key] = val
|
||||
# no need for this since this op appends key-val by default
|
||||
# self.cache.move_to_end(key)
|
||||
|
||||
return evict
|
||||
|
||||
def invalidate(self, key: str) -> bool:
|
||||
# basic delete invalidation, no (p)refetching
|
||||
if key in self.cache:
|
||||
del self.cache[key]
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
if __name__ == "__main__": # basic testing, should never be called when importing
|
||||
cache = BaselineCache(10)
|
||||
|
||||
for i in range(10):
|
||||
assert cache.put(str(i), str(i+1)) == False
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'0': '1', '1': '2', '2': '3', '3': '4', '4': '5', '5': '6', '6': '7', '7': '8', '8': '9', '9': '10'})
|
||||
|
||||
assert cache.get("5") == "6"
|
||||
assert cache.get("8") == "9"
|
||||
assert cache.get("0") == "1"
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'1': '2', '2': '3', '3': '4', '4': '5', '6': '7', '7': '8', '9': '10', '5': '6', '8': '9', '0': '1'})
|
||||
|
||||
assert cache.get("a") == None
|
||||
assert cache.get("b") == None
|
||||
assert cache.get("c") == None
|
||||
|
||||
assert cache.put("a", "b") == True
|
||||
assert cache.put("b", "c") == True
|
||||
assert cache.put("c", "d") == True
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'4': '5', '6': '7', '7': '8', '9': '10', '5': '6', '8': '9', '0': '1', 'a': 'b', 'b' : 'c', 'c': 'd'})
|
||||
|
||||
assert cache.get("c") == "d"
|
||||
assert cache.get("b") == "c"
|
||||
assert cache.get("a") == "b"
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'4': '5', '6': '7', '7': '8', '9': '10', '5': '6', '8': '9', '0': '1', 'c': 'd', 'b' : 'c', 'a': 'b'})
|
28
app/cache/prefetch_cache.py
vendored
Normal file
28
app/cache/prefetch_cache.py
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
from .cache import BaselineCache
|
||||
|
||||
class PrefetchCache(BaselineCache):
|
||||
key_relations = None
|
||||
|
||||
def __init__(self):
|
||||
super()
|
||||
self.key_relations = dict()
|
||||
|
||||
def put(self, key: str, val: str) -> bool:
|
||||
# LRU evict
|
||||
evict = False
|
||||
if len(self.cache) >= self.limit:
|
||||
self.cache.popitem(last = False)
|
||||
evict = True
|
||||
self.cache[key] = val
|
||||
self.prefetch(key, val)
|
||||
|
||||
return evict
|
||||
|
||||
def prefetch(self, key: str, val: str) -> bool:
|
||||
if len(self.cache) >= self.limit and key in self.key_relations:
|
||||
self.cache[self.key_relations[key][0]] = self.key_relations[key][1]
|
||||
return True
|
||||
return False
|
||||
|
||||
def set_relations(self):
|
||||
return
|
94
app/cache/tiered_cache.py
vendored
Normal file
94
app/cache/tiered_cache.py
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
from .cache import BaselineCache
|
||||
from collections import OrderedDict
|
||||
import os
|
||||
|
||||
class TieredCache(BaselineCache):
|
||||
l2_limit = None
|
||||
l2_map = None
|
||||
|
||||
def __init__(self, limit, l2_limit = 100):
|
||||
super().__init__(limit)
|
||||
self.l2_limit = l2_limit
|
||||
self.l2_map = OrderedDict()
|
||||
|
||||
def get(self, key):
|
||||
# first look in the l1 cache
|
||||
s = super().get(key)
|
||||
if s != None:
|
||||
return s
|
||||
else: # on a miss, check the l2 cache mapping
|
||||
if key in self.l2_map: # if it is in l2 cache (disk), open the file and return the values
|
||||
f = open(self.l2_map[key], "r")
|
||||
v = f.read()
|
||||
f.close()
|
||||
return v
|
||||
else: # otherwise its a cache miss and return None
|
||||
return None
|
||||
|
||||
def put(self, key, val):
|
||||
evict = False
|
||||
if len(self.cache) >= self.limit:
|
||||
if len(self.l2_map) >= self.l2_limit:
|
||||
self.l2_map.popitem(last = False)
|
||||
evict = True
|
||||
|
||||
k,v = self.cache.popitem(last = False)
|
||||
path = f"tiered_cache/{k}"
|
||||
self.l2_map[k] = path
|
||||
f = open(path, "w+")
|
||||
f.write(v)
|
||||
f.close()
|
||||
|
||||
self.cache[key] = val
|
||||
|
||||
return evict
|
||||
|
||||
def invalidate(self, key: str) -> bool:
|
||||
# basic delete invalidation, no (p)refetching
|
||||
if key in self.cache:
|
||||
del self.cache[key]
|
||||
return True
|
||||
elif key in self.l2_map:
|
||||
os.remove(self.l2_map[key]) # this is so sketchy
|
||||
del self.l2_map[key]
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
if __name__ == "__main__": # basic testing, should never be called when importing
|
||||
cache = TieredCache(10)
|
||||
|
||||
for i in range(10):
|
||||
assert cache.put(str(i), str(i+1)) == False
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'0': '1', '1': '2', '2': '3', '3': '4', '4': '5', '5': '6', '6': '7', '7': '8', '8': '9', '9': '10'})
|
||||
|
||||
assert cache.get("5") == "6"
|
||||
assert cache.get("8") == "9"
|
||||
assert cache.get("0") == "1"
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'1': '2', '2': '3', '3': '4', '4': '5', '6': '7', '7': '8', '9': '10', '5': '6', '8': '9', '0': '1'})
|
||||
|
||||
assert cache.get("a") == None
|
||||
assert cache.get("b") == None
|
||||
assert cache.get("c") == None
|
||||
|
||||
assert cache.put("a", "b") == False
|
||||
assert cache.put("b", "c") == False
|
||||
assert cache.put("c", "d") == False
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'4': '5', '6': '7', '7': '8', '9': '10', '5': '6', '8': '9', '0': '1', 'a': 'b', 'b' : 'c', 'c': 'd'})
|
||||
|
||||
assert cache.get("c") == "d"
|
||||
assert cache.get("b") == "c"
|
||||
assert cache.get("a") == "b"
|
||||
|
||||
assert cache.get("1") == "2"
|
||||
assert cache.get("2") == "3"
|
||||
assert cache.get("3") == "4"
|
||||
|
||||
assert len(cache) == 10
|
||||
assert cache == OrderedDict({'4': '5', '6': '7', '7': '8', '9': '10', '5': '6', '8': '9', '0': '1', 'c': 'd', 'b' : 'c', 'a': 'b'})
|
Reference in New Issue
Block a user