mirror of
https://github.com/ltcptgeneral/cs239-caching.git
synced 2025-03-26 17:55:18 +00:00
[ADD] - Added social media user profile get and upsert microservice with nosql database integration
This commit is contained in:
parent
67bf966a6a
commit
12de833f66
26
.gitignore
vendored
26
.gitignore
vendored
@ -1 +1,25 @@
|
||||
__pycache__
|
||||
# Ignore the virtual environment
|
||||
venv/
|
||||
|
||||
# Ignore Python cache files
|
||||
__pycache__/
|
||||
**/__pycache__/
|
||||
|
||||
# Ignore database files (TinyDB JSON)
|
||||
database.json
|
||||
|
||||
# Ignore environment variables file (if used)
|
||||
.env
|
||||
|
||||
# Ignore logs and temporary files
|
||||
*.log
|
||||
*.tmp
|
||||
|
||||
# Ignore VSCode & PyCharm project files
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# Ignore MacOS system files
|
||||
.DS_Store
|
||||
|
||||
|
||||
|
56
README.md
56
README.md
@ -8,3 +8,59 @@ Thus, we want to follow up on MuCache and see how much impact different caching
|
||||
## Methodology
|
||||
To test caching strategies on microservices, we decided to build a micro service system mimicking interactions between microservices to easily implement and test different caching strategies. The idea is that the caching between two services will be the same for any other pair of services. Thus, we can scale down our targeted environment.
|
||||
We designed our own cache and microservice system and will use different metrics to test our caching strategies.
|
||||
|
||||
|
||||
## How to run
|
||||
|
||||
### Set Up Virtual Environment
|
||||
macOS/Linux
|
||||
|
||||
```python3 -m venv venv && source venv/bin/activate```
|
||||
|
||||
Windows (CMD)
|
||||
|
||||
```python -m venv venv && venv\Scripts\activate```
|
||||
|
||||
Windows (PowerShell)
|
||||
|
||||
```python -m venv venv && venv\Scripts\Activate.ps1```
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```pip install -r requirements.txt```
|
||||
|
||||
### Edit config.yaml to set the caching strategy:
|
||||
|
||||
```
|
||||
cache_strategy: "Baseline" # Change to "Prefetch" or "Tiered"
|
||||
cache_limit: 10
|
||||
l2_cache_limit: 100
|
||||
```
|
||||
|
||||
### Run Microservice
|
||||
|
||||
```python run.py```
|
||||
|
||||
### Test API Endpoints
|
||||
|
||||
Fetch a User Profile
|
||||
|
||||
```curl -X GET "http://127.0.0.1:8000/user/1"```
|
||||
|
||||
Update a User Profile
|
||||
|
||||
```
|
||||
curl -X POST "http://127.0.0.1:8000/update_user/?user_id=2&name=Bob&followers=200&bio=TechEnthusiast&posts=AIIsAwesome"
|
||||
|
||||
|
||||
```
|
||||
|
||||
### Stop the server and deactive virtual env
|
||||
|
||||
macOS/Linux
|
||||
|
||||
```deactivate```
|
||||
|
||||
Windows
|
||||
|
||||
```venv\Scripts\deactivate.bat```
|
0
app/cache/__init__.py
vendored
Normal file
0
app/cache/__init__.py
vendored
Normal file
0
cache.py → app/cache/cache.py
vendored
0
cache.py → app/cache/cache.py
vendored
@ -1,4 +1,4 @@
|
||||
from cache import BaselineCache
|
||||
from .cache import BaselineCache
|
||||
|
||||
class PrefetchCache(BaselineCache):
|
||||
key_relations = None
|
2
tiered_cache.py → app/cache/tiered_cache.py
vendored
2
tiered_cache.py → app/cache/tiered_cache.py
vendored
@ -1,4 +1,4 @@
|
||||
from cache import BaselineCache
|
||||
from .cache import BaselineCache
|
||||
from collections import OrderedDict
|
||||
import os
|
||||
|
15
app/config.py
Normal file
15
app/config.py
Normal file
@ -0,0 +1,15 @@
|
||||
import os
|
||||
import yaml
|
||||
|
||||
CONFIG_FILE = "config.yaml"
|
||||
|
||||
def load_config():
|
||||
with open(CONFIG_FILE, "r") as f:
|
||||
return yaml.safe_load(f)
|
||||
|
||||
config = load_config()
|
||||
|
||||
# Read from environment variable or fallback to YAML value
|
||||
CACHE_STRATEGY = os.getenv("CACHE_STRATEGY", config.get("cache_strategy", "Baseline"))
|
||||
CACHE_LIMIT = config.get("cache_limit", 10)
|
||||
L2_CACHE_LIMIT = config.get("l2_cache_limit", 100)
|
3
app/config.yaml
Normal file
3
app/config.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
cache_strategy: "Baseline" # Change this to "Prefetch" or "Tiered"
|
||||
cache_limit: 10
|
||||
l2_cache_limit: 100
|
29
app/database.py
Normal file
29
app/database.py
Normal file
@ -0,0 +1,29 @@
|
||||
|
||||
from tinydb import TinyDB, Query
|
||||
|
||||
# Initialize TinyDB as a NoSQL key-value store
|
||||
DB_FILE = "database.json"
|
||||
db = TinyDB(DB_FILE)
|
||||
User = Query()
|
||||
|
||||
def get_user_profile(user_id):
|
||||
"""Fetch user profile from TinyDB"""
|
||||
result = db.search(User.user_id == user_id)
|
||||
return result[0] if result else None
|
||||
|
||||
def update_user_profile(user_id, name, followers, bio, posts):
|
||||
"""Update user profile in TinyDB"""
|
||||
db.upsert({"user_id": user_id, "name": name, "followers": followers, "bio": bio, "posts": posts}, User.user_id == user_id)
|
||||
|
||||
def init_db():
|
||||
"""Ensure TinyDB is initialized before FastAPI starts and prepopulate some data"""
|
||||
global db
|
||||
db = TinyDB(DB_FILE) # Reload TinyDB if needed
|
||||
|
||||
# Prepopulate database with some sample users if empty
|
||||
if len(db) == 0:
|
||||
db.insert_multiple([
|
||||
{"user_id": "1", "name": "Alice", "followers": 100, "bio": "Love coding!", "posts": "Hello, world!"},
|
||||
{"user_id": "2", "name": "Bob", "followers": 200, "bio": "Tech enthusiast", "posts": "AI is amazing!"},
|
||||
{"user_id": "3", "name": "Charlie", "followers": 50, "bio": "Blogger", "posts": "Check out my latest post!"}
|
||||
])
|
39
app/main.py
Normal file
39
app/main.py
Normal file
@ -0,0 +1,39 @@
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from database import get_user_profile, update_user_profile
|
||||
from cache.cache import BaselineCache
|
||||
from cache.prefetch_cache import PrefetchCache
|
||||
from cache.tiered_cache import TieredCache
|
||||
from config import CACHE_STRATEGY, CACHE_LIMIT, L2_CACHE_LIMIT
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
# Initialize cache based on strategy from config.yaml or environment variable
|
||||
if CACHE_STRATEGY == "Baseline":
|
||||
cache = BaselineCache(limit=CACHE_LIMIT)
|
||||
elif CACHE_STRATEGY == "Prefetch":
|
||||
cache = PrefetchCache()
|
||||
elif CACHE_STRATEGY == "Tiered":
|
||||
cache = TieredCache(limit=CACHE_LIMIT, l2_limit=L2_CACHE_LIMIT)
|
||||
else:
|
||||
raise ValueError(f"Invalid CACHE_STRATEGY: {CACHE_STRATEGY}")
|
||||
|
||||
@app.get("/user/{user_id}")
|
||||
def fetch_user_profile(user_id: str):
|
||||
"""Fetch user profile with caching"""
|
||||
cached_profile = cache.get(user_id)
|
||||
if cached_profile:
|
||||
return {"user_id": user_id, "profile": cached_profile, "source": "cache"}
|
||||
|
||||
profile = get_user_profile(user_id)
|
||||
if profile is None:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
cache.put(user_id, profile) # Store in cache
|
||||
return {"user_id": user_id, "profile": profile, "source": "database"}
|
||||
|
||||
@app.post("/update_user/")
|
||||
def modify_user_profile(user_id: str, name: str, followers: int, bio: str, posts: str):
|
||||
"""Update user profile and refresh cache"""
|
||||
update_user_profile(user_id, name, followers, bio, posts)
|
||||
cache.invalidate(user_id) # Invalidate old cache
|
||||
return {"message": "User profile updated successfully"}
|
12
app/run.py
Normal file
12
app/run.py
Normal file
@ -0,0 +1,12 @@
|
||||
import os
|
||||
import uvicorn
|
||||
from database import init_db # Ensure database initializes before starting FastAPI
|
||||
|
||||
os.environ["PYTHONDONTWRITEBYTECODE"] = "1"
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Initialize TinyDB (NoSQL) before FastAPI starts
|
||||
init_db()
|
||||
|
||||
# Start the FastAPI server with custom options
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True, workers=2)
|
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@ -0,0 +1,5 @@
|
||||
fastapi
|
||||
uvicorn
|
||||
tinydb
|
||||
pyyaml
|
||||
|
Loading…
x
Reference in New Issue
Block a user