This commit is contained in:
HiccupHan
2025-03-03 22:19:51 -08:00
7 changed files with 47 additions and 18 deletions

View File

@@ -8,7 +8,7 @@ class Node:
self.next = None
self.prev = None
class SeiveCache(Cache):
class SieveCache(Cache):
def __init__(self, limit: int):
super().__init__(limit)
self.limit = limit # Fix: Store limit properly

View File

@@ -1,4 +1,4 @@
cache_strategy: "Seive"
cache_strategy: "Sieve"
cache_limit: 50
l2_cache_limit: 100 # unused
db_file: "llmData_sns.json" # Change this to the name of any json file within the "database/datastore" folder

View File

@@ -1,12 +1,16 @@
from tinydb import TinyDB, Query
from config import DB_FILE
import shutil
import random
DB_LOCATION = "database/datastore/" + DB_FILE
# Initialize TinyDB as a NoSQL key-value store
db = TinyDB(DB_LOCATION)
# We don't want to change our main DB file, so we will make a temp DB file and use that as our DB file
shutil.copyfile( DB_LOCATION, "temp_DB.json" )
db = TinyDB("temp_DB.json")
User = Query()
def get_user_ids():
@@ -34,9 +38,16 @@ def get_user_profile(user_id):
result = db.search(User.user_id == user_id)
return result[0] if result else None
def update_user_profile(user_id, name, followers, bio, posts, friends):
def update_user_profile( data ):
"""Update user profile in TinyDB"""
db.upsert({"user_id": user_id, "name": name, "followers": followers, "bio": bio, "posts": posts, "friends": friends}, User.user_id == user_id)
user_id = str( data["user_id"] )
# Basically make sure friends stay the same (for prefetching). Not great implementation, but it works
curr_user = db.search(User.user_id == user_id)
if( curr_user and data["friends"] == None ):
data["friends"] = curr_user[0]["friends"]
db.upsert( data, User.user_id == user_id )
def init_db():
"""Ensure TinyDB is initialized before FastAPI starts and prepopulate some data"""

View File

@@ -3,11 +3,12 @@ from database import get_user_ids, get_user_profile, update_user_profile, get_us
from cache.cache import BaselineCache
from cache.prefetch_cache import PrefetchCache
from cache.tiered_cache import TieredCache
from cache.eviction_seive import SeiveCache
from cache.eviction_sieve import SieveCache
from cache.nocache import NoCache
from cache.idealcache import IdealCache
from cache.read_after_write_cache import ReadAfterWriteCache
from config import CACHE_STRATEGY, CACHE_LIMIT, L2_CACHE_LIMIT
from models.models import User
import time
app = FastAPI()
@@ -22,9 +23,9 @@ elif CACHE_STRATEGY == "Prefetch":
elif CACHE_STRATEGY == "Tiered":
print("Using tiered cache strategy")
cache = TieredCache(limit=CACHE_LIMIT, l2_limit=L2_CACHE_LIMIT)
elif CACHE_STRATEGY == "Seive":
print("Using seive cache strategy")
cache = SeiveCache(limit=CACHE_LIMIT)
elif CACHE_STRATEGY == "Sieve":
print("Using sieve cache strategy")
cache = SieveCache(limit=CACHE_LIMIT)
elif CACHE_STRATEGY == "None":
print("Using no cache strategy")
cache = NoCache(limit=CACHE_LIMIT)
@@ -62,8 +63,11 @@ def fetch_user_profile(user_id: str):
return {"user_id": user_id, "profile": profile, "source": "database", "time_ms": (time.time() - start) * 1000}
@app.post("/update_user/")
def modify_user_profile(user_id: str, name: str, followers: int, bio: str, posts: str, friends: list[str]):
async def modify_user_profile(user_data : User):
"""Update user profile and refresh cache"""
update_user_profile(user_id, name, followers, bio, posts, friends)
user_id=user_data.user_id
user_dict = user_data.dict()
update_user_profile(user_dict)
cache.invalidate(user_id) # Invalidate old cache
return {"message": "User profile updated successfully"}

9
app/models/models.py Normal file
View File

@@ -0,0 +1,9 @@
from pydantic import BaseModel
class User(BaseModel):
user_id: str
name: str | None = None
followers: int | None = None
bio: str | None = None
posts: str | None = None
friends: list | None = None