1
0
mirror of https://github.com/ltcptgeneral/cs239-caching.git synced 2025-04-01 12:33:25 +00:00
This commit is contained in:
HiccupHan 2025-03-03 22:19:51 -08:00
commit 31ffbc18ec
7 changed files with 47 additions and 18 deletions

@ -8,7 +8,7 @@ class Node:
self.next = None
self.prev = None
class SeiveCache(Cache):
class SieveCache(Cache):
def __init__(self, limit: int):
super().__init__(limit)
self.limit = limit # Fix: Store limit properly

@ -1,4 +1,4 @@
cache_strategy: "Seive"
cache_strategy: "Sieve"
cache_limit: 50
l2_cache_limit: 100 # unused
db_file: "llmData_sns.json" # Change this to the name of any json file within the "database/datastore" folder

@ -1,12 +1,16 @@
from tinydb import TinyDB, Query
from config import DB_FILE
import shutil
import random
DB_LOCATION = "database/datastore/" + DB_FILE
# Initialize TinyDB as a NoSQL key-value store
db = TinyDB(DB_LOCATION)
# We don't want to change our main DB file, so we will make a temp DB file and use that as our DB file
shutil.copyfile( DB_LOCATION, "temp_DB.json" )
db = TinyDB("temp_DB.json")
User = Query()
def get_user_ids():
@ -34,9 +38,16 @@ def get_user_profile(user_id):
result = db.search(User.user_id == user_id)
return result[0] if result else None
def update_user_profile(user_id, name, followers, bio, posts, friends):
def update_user_profile( data ):
"""Update user profile in TinyDB"""
db.upsert({"user_id": user_id, "name": name, "followers": followers, "bio": bio, "posts": posts, "friends": friends}, User.user_id == user_id)
user_id = str( data["user_id"] )
# Basically make sure friends stay the same (for prefetching). Not great implementation, but it works
curr_user = db.search(User.user_id == user_id)
if( curr_user and data["friends"] == None ):
data["friends"] = curr_user[0]["friends"]
db.upsert( data, User.user_id == user_id )
def init_db():
"""Ensure TinyDB is initialized before FastAPI starts and prepopulate some data"""

@ -3,11 +3,12 @@ from database import get_user_ids, get_user_profile, update_user_profile, get_us
from cache.cache import BaselineCache
from cache.prefetch_cache import PrefetchCache
from cache.tiered_cache import TieredCache
from cache.eviction_seive import SeiveCache
from cache.eviction_sieve import SieveCache
from cache.nocache import NoCache
from cache.idealcache import IdealCache
from cache.read_after_write_cache import ReadAfterWriteCache
from config import CACHE_STRATEGY, CACHE_LIMIT, L2_CACHE_LIMIT
from models.models import User
import time
app = FastAPI()
@ -22,9 +23,9 @@ elif CACHE_STRATEGY == "Prefetch":
elif CACHE_STRATEGY == "Tiered":
print("Using tiered cache strategy")
cache = TieredCache(limit=CACHE_LIMIT, l2_limit=L2_CACHE_LIMIT)
elif CACHE_STRATEGY == "Seive":
print("Using seive cache strategy")
cache = SeiveCache(limit=CACHE_LIMIT)
elif CACHE_STRATEGY == "Sieve":
print("Using sieve cache strategy")
cache = SieveCache(limit=CACHE_LIMIT)
elif CACHE_STRATEGY == "None":
print("Using no cache strategy")
cache = NoCache(limit=CACHE_LIMIT)
@ -62,8 +63,11 @@ def fetch_user_profile(user_id: str):
return {"user_id": user_id, "profile": profile, "source": "database", "time_ms": (time.time() - start) * 1000}
@app.post("/update_user/")
def modify_user_profile(user_id: str, name: str, followers: int, bio: str, posts: str, friends: list[str]):
async def modify_user_profile(user_data : User):
"""Update user profile and refresh cache"""
update_user_profile(user_id, name, followers, bio, posts, friends)
user_id=user_data.user_id
user_dict = user_data.dict()
update_user_profile(user_dict)
cache.invalidate(user_id) # Invalidate old cache
return {"message": "User profile updated successfully"}

9
app/models/models.py Normal file

@ -0,0 +1,9 @@
from pydantic import BaseModel
class User(BaseModel):
user_id: str
name: str | None = None
followers: int | None = None
bio: str | None = None
posts: str | None = None
friends: list | None = None

@ -15,9 +15,9 @@ random.seed(0)
# Workload Configurations
workloads = {
"random_read": {"read": 1.0, "write": 0.0},
"read_heavy": {"read": 0.8, "write": 0.2},
"write_heavy": {"read": 0.2, "write": 0.8},
"random_read": {"read": 1.0, "write": 0.0},
"frequent_users": {"read": 1.0, "write": 0.0, "frequent_percent": 0.7},
"frequent_after_write": {"read": 0.7, "write": 0.3},
"friend_based": {"read": 1.0, "write": 0.0, "friend_access": 0.7},
@ -30,12 +30,13 @@ def generate_request(workload, last_updated=None):
"""Generate read or write requests based on workload type"""
if random.random() < workload["read"]:
user_id = select_user(workload, last_updated)
return baseurl + f"/user/{user_id}", "GET"
return baseurl + f"/user/{user_id}", None, "GET"
# Write operation (updates user profile)
user_id = select_user(workload, last_updated)
url = baseurl + f"/update_user/?user_id={user_id}&name=UpdatedUser&followers=500&bio=Updated&posts=UpdatedPost"
return url, "POST"
write_obj = { "user_id":user_id,"name": "UpdatedUser", "followers":"500","bio":"Updated","posts":"UpdatedPost"}
url = baseurl + f"/update_user/"
return url, write_obj, "POST"
def select_user(workload, last_updated):
"""Selects a user based on workload type"""
@ -55,8 +56,12 @@ def run_workload(name, workload):
last_updated = None
for _ in tqdm(range(10000), desc=f"Running {name}"):
url, method = generate_request(workload, last_updated)
url, data, method = generate_request(workload, last_updated)
if( method == "GET" ):
response = requests.request(method, url)
else:
response = requests.post(url, json = data)
try:
content = json.loads(response.content)