Skip to content

Commit 05ddc33

Browse files
⚡ perf: Add Redis caching to /v1/xon-pulse and /v1/rss
1 parent 5184897 commit 05ddc33

2 files changed

Lines changed: 89 additions & 2 deletions

File tree

api/v1/feeds.py

Lines changed: 69 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,51 @@
11
"""Feed endpoints for RSS and XON Pulse."""
22

3+
import json
34
import logging
4-
from typing import List
5+
from datetime import timedelta
6+
from typing import Dict, List, Optional
7+
58
from fastapi import APIRouter, Request, Response
9+
from feedgen.feed import FeedGenerator
610
from google.cloud import datastore
711
from pydantic import BaseModel
8-
from feedgen.feed import FeedGenerator
12+
from redis import Redis
13+
14+
from config.settings import REDIS_DB, REDIS_HOST, REDIS_PORT
915
from models.base import BaseResponse
1016
from services.send_email import send_exception_email
1117
from utils.custom_limiter import custom_rate_limiter
1218
from utils.safe_encoding import escape_rss_content, escape_url_fragment
1319

1420
router = APIRouter()
1521

22+
# Redis client for caching
23+
redis_client = Redis(
24+
host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True
25+
)
26+
27+
# Cache TTL: 12 hours for feeds
28+
FEEDS_CACHE_TTL_HOURS = 12
29+
30+
31+
def get_cached_feed(cache_key: str) -> Optional[str]:
32+
"""Retrieve cached feed from Redis."""
33+
try:
34+
return redis_client.get(cache_key)
35+
except Exception:
36+
pass
37+
return None
38+
39+
40+
def cache_feed(
41+
cache_key: str, content: str, expiry_hours: int = FEEDS_CACHE_TTL_HOURS
42+
) -> None:
43+
"""Cache feed content in Redis."""
44+
try:
45+
redis_client.setex(cache_key, timedelta(hours=expiry_hours), content)
46+
except Exception:
47+
pass
48+
1649

1750
class PulseNewsItem(BaseModel):
1851
"""Model for individual news item."""
@@ -34,6 +67,14 @@ class PulseNewsResponse(BaseResponse):
3467
async def get_pulse_data(request: Request):
3568
"""Generate news feed for presenting all data breaches news."""
3669
try:
70+
# Check cache first
71+
cache_key = "feeds:xon-pulse"
72+
cached_result = get_cached_feed(cache_key)
73+
if cached_result:
74+
cached_data = json.loads(cached_result)
75+
return PulseNewsResponse(**cached_data)
76+
77+
# Cache miss - fetch from Datastore
3778
client = datastore.Client()
3879
query = client.query(kind="xon-pulse")
3980
results = list(query.fetch())
@@ -48,6 +89,14 @@ async def get_pulse_data(request: Request):
4889
)
4990
data.append(item)
5091

92+
# Cache the response
93+
response_data = {
94+
"status": "success",
95+
"data": [item.model_dump() for item in data],
96+
"status_code": 200,
97+
}
98+
cache_feed(cache_key, json.dumps(response_data))
99+
51100
return PulseNewsResponse(status="success", data=data, status_code=200)
52101
except Exception as exc:
53102
logging.error("Failed to fetch news feed: %s", str(exc))
@@ -68,6 +117,13 @@ async def get_pulse_data(request: Request):
68117
async def rss_feed(request: Request):
69118
"""Generate RSS feed for presenting all data breaches in XoN."""
70119
try:
120+
# Check cache first
121+
cache_key = "feeds:rss"
122+
cached_rss = get_cached_feed(cache_key)
123+
if cached_rss:
124+
return Response(content=cached_rss, media_type="application/rss+xml")
125+
126+
# Cache miss - generate RSS feed
71127
feed_generator = FeedGenerator()
72128
feed_generator.title("XposedOrNot Data Breaches")
73129
feed_generator.description("Live updates of uploaded data breaches")
@@ -102,6 +158,17 @@ async def rss_feed(request: Request):
102158
feed_entry.guid(guid=entity_key, permalink=True)
103159

104160
rss_content = feed_generator.rss_str()
161+
162+
# Cache the RSS content (decode bytes to string for Redis)
163+
cache_feed(
164+
cache_key,
165+
(
166+
rss_content.decode("utf-8")
167+
if isinstance(rss_content, bytes)
168+
else rss_content
169+
),
170+
)
171+
105172
return Response(content=rss_content, media_type="application/rss+xml")
106173

107174
except Exception as exc:

api/v1/metrics.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,13 @@ async def get_detailed_metrics_endpoint(request: Request) -> DetailedMetricsResp
9595
if not validate_url(request):
9696
raise HTTPException(status_code=400, detail="Invalid request URL")
9797

98+
# Check cache first
99+
cache_key = "metrics:detailed"
100+
cached_result = get_cached_metrics(cache_key)
101+
if cached_result:
102+
return DetailedMetricsResponse(**cached_result)
103+
104+
# Cache miss - fetch and process
98105
metrics = await get_detailed_metrics()
99106

100107
# Process top breaches
@@ -130,6 +137,19 @@ async def get_detailed_metrics_endpoint(request: Request) -> DetailedMetricsResp
130137
}
131138
)
132139

140+
# Build response and cache it
141+
response_data = {
142+
"Breaches_Count": metrics["breaches_count"],
143+
"Breaches_Records": metrics["breaches_total_records"],
144+
"Pastes_Count": str(metrics["pastes_count"]),
145+
"Pastes_Records": metrics["pastes_total_records"],
146+
"Yearly_Breaches_Count": metrics["yearly_count"],
147+
"Industry_Breaches_Count": metrics["industry_breaches_count"],
148+
"Top_Breaches": top_breaches,
149+
"Recent_Breaches": recent_breaches,
150+
}
151+
cache_metrics(cache_key, response_data)
152+
133153
return DetailedMetricsResponse(
134154
Breaches_Count=metrics["breaches_count"],
135155
Breaches_Records=metrics["breaches_total_records"],

0 commit comments

Comments
 (0)