Skip to content

Commit 46e220f

Browse files
authored
Merge pull request #65 from ClipABit/staging-issues
Staging issues
2 parents 01e3933 + 442aa5c commit 46e220f

File tree

13 files changed

+70
-359
lines changed

13 files changed

+70
-359
lines changed

backend/api/fastapi_router.py

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def __init__(
3838
self.router = APIRouter()
3939

4040
# Initialize UploadHandler with process_video spawn function
41-
from services.upload import UploadHandler
41+
from services.upload_handler import UploadHandler
4242
self.upload_handler = UploadHandler(
4343
job_store=server_instance.job_store,
4444
process_video_spawn_fn=self._get_process_video_spawn_fn()
@@ -88,6 +88,7 @@ def _register_routes(self):
8888
self.router.add_api_route("/search", self.search, methods=["GET"])
8989
self.router.add_api_route("/videos", self.list_videos, methods=["GET"])
9090
self.router.add_api_route("/videos/{hashed_identifier}", self.delete_video, methods=["DELETE"])
91+
self.router.add_api_route("/cache/clear", self.clear_cache, methods=["POST"])
9192

9293
async def health(self):
9394
"""
@@ -276,3 +277,33 @@ async def delete_video(self, hashed_identifier: str, filename: str, namespace: s
276277
"message": "Video deletion started, processing in background"
277278
}
278279

280+
async def clear_cache(self, namespace: str = "__default__"):
281+
"""
282+
Clear the URL cache for a given namespace.
283+
284+
Args:
285+
namespace (str, optional): Namespace to clear cache for (default: "__default__")
286+
287+
Returns:
288+
dict: Contains status and number of entries cleared
289+
290+
Raises:
291+
HTTPException: If cache clearing is not allowed (403 Forbidden)
292+
"""
293+
logger.info(f"[Clear Cache] Request to clear cache for namespace: {namespace}")
294+
if not self.is_internal_env:
295+
raise HTTPException(status_code=403, detail="Cache clearing is not allowed in the current environment.")
296+
297+
try:
298+
cleared_count = self.server_instance.r2_connector.clear_cache(namespace)
299+
logger.info(f"[Clear Cache] Cleared {cleared_count} cache entries for namespace: {namespace}")
300+
return {
301+
"status": "success",
302+
"namespace": namespace,
303+
"cleared_entries": cleared_count,
304+
"message": f"Successfully cleared {cleared_count} cache entries"
305+
}
306+
except Exception as e:
307+
logger.error(f"[Clear Cache] Error clearing cache: {e}")
308+
raise HTTPException(status_code=500, detail=str(e))
309+

backend/apps/dev_combined.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020

2121
from shared.config import get_environment, get_secrets, is_internal_env
2222
from shared.images import get_dev_image
23-
from services.search import SearchService
24-
from services.processing import ProcessingService
23+
from services.search_service import SearchService
24+
from services.processing_service import ProcessingService
2525
from services.http_server import ServerService
2626

2727
logger = logging.getLogger(__name__)

backend/apps/processing_app.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
from shared.config import get_environment, get_secrets
1414
from shared.images import get_processing_image
15-
from services.processing import ProcessingService
15+
from services.processing_service import ProcessingService
1616

1717
logger = logging.getLogger(__name__)
1818

backend/apps/search_app.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
from shared.config import get_environment, get_secrets
1414
from shared.images import get_search_image
15-
from services.search import SearchService
15+
from services.search_service import SearchService
1616

1717
logger = logging.getLogger(__name__)
1818

backend/apps/server.py

Lines changed: 0 additions & 40 deletions
This file was deleted.

backend/cli.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,9 @@
99

1010
# Individual apps for staging/prod deployment
1111
APPS = {
12-
"server": ("apps/server.py", "\033[36m"), # Cyan
13-
"search": ("apps/search_app.py", "\033[33m"), # Yellow
14-
"processing": ("apps/processing_app.py", "\033[35m"), # Magenta
12+
"server": ("services/http_server.py", "\033[36m"), # Cyan
13+
"search": ("services/search_service.py", "\033[33m"), # Yellow
14+
"processing": ("services/processing_service.py", "\033[35m"), # Magenta
1515
}
1616
RESET = "\033[0m"
1717

backend/database/r2_connector.py

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -364,15 +364,12 @@ def fetch_video_page(
364364
params = {
365365
"Bucket": self.bucket_name,
366366
"Prefix": prefix,
367-
"MaxKeys": min(page_size + 1, 1000),
367+
"MaxKeys": page_size, # Request exactly page_size items
368368
}
369369

370370
if continuation_token:
371-
cursor_key = self._decode_cursor_token(continuation_token)
372-
if cursor_key:
373-
params["StartAfter"] = cursor_key
374-
else:
375-
params["ContinuationToken"] = continuation_token
371+
# Use S3's native ContinuationToken
372+
params["ContinuationToken"] = continuation_token
376373

377374
response = self.s3_client.list_objects_v2(**params)
378375

@@ -385,15 +382,19 @@ def fetch_video_page(
385382
continue
386383
filtered.append(obj)
387384

388-
has_more_flag = response.get("IsTruncated", False)
389-
items = filtered[:page_size]
390-
has_more = has_more_flag or len(filtered) > page_size
385+
# Use S3's IsTruncated flag to determine if there are more pages
386+
has_more = response.get("IsTruncated", False)
387+
items = filtered # Use all filtered items since we requested exactly page_size
391388

392389
videos: List[dict] = []
393390
for obj in items:
394391
object_key = obj.get("Key")
395392
try:
396-
filename = object_key.split('/', 1)[1]
393+
parts = object_key.split('/', 1)
394+
if len(parts) != 2:
395+
logger.warning(f"Skipping object with unexpected key format: {object_key}")
396+
continue
397+
filename = parts[1]
397398
identifier = self._encode_path(self.bucket_name, namespace, filename)
398399
url = self.s3_client.generate_presigned_url(
399400
'get_object',
@@ -406,12 +407,12 @@ def fetch_video_page(
406407
"hashed_identifier": identifier,
407408
"presigned_url": url,
408409
})
409-
except ClientError as e:
410-
logger.error(f"Error processing video {object_key}: {e}")
410+
else:
411+
logger.warning(f"Generated empty presigned URL for {object_key}")
412+
except Exception as e:
413+
logger.error(f"Error processing video {object_key}: {e}", exc_info=True)
411414

412-
next_token = response.get("NextContinuationToken") if has_more_flag else None
413-
if not next_token and has_more and videos:
414-
next_token = self._encode_cursor_token(items[-1].get("Key"))
415+
next_token = response.get("NextContinuationToken") if has_more else None
415416

416417
logger.info(
417418
"Fetched %s video objects for namespace %s (has_more=%s)",

backend/services/__init__.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +0,0 @@
1-
from .upload import UploadHandler
2-
3-
__all__ = ["UploadHandler"]

0 commit comments

Comments
 (0)