Skip to content

Commit 42f52c5

Browse files
committed
fix: debug log processing speed
1 parent 2f43b1a commit 42f52c5

File tree

2 files changed

+11
-2
lines changed

2 files changed

+11
-2
lines changed

lavender_data/server/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ async def lifespan(app: FastAPI):
111111

112112
def log_filter(request: Request, response):
113113
if (
114-
re.match(r"/iterations/.*/next/.*", str(request.url))
114+
re.match(r"/iterations/.*/next/.*", request.url.path)
115115
and response.status_code == 202
116116
):
117117
return False
@@ -127,7 +127,7 @@ async def add_process_time_header(request: Request, call_next):
127127

128128
if log_filter(request, response):
129129
logger.info(
130-
f"{request.client.host}:{request.client.port} - {request.method} {request.url} {response.status_code} {process_time:.2f}s"
130+
f"{request.client.host}:{request.client.port} - {request.method} {request.url.path}?{request.url.query} {response.status_code} {process_time:.2f}s"
131131
)
132132

133133
return response

lavender_data/server/iteration/process.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import ujson as json
22
from typing import Optional
33
import traceback
4+
import time
45

56
from fastapi import HTTPException
67
from pydantic import BaseModel
@@ -173,10 +174,18 @@ def process_next_samples_and_store(
173174
*,
174175
shared_memory: SharedMemory,
175176
):
177+
logger = get_logger(__name__)
176178
try:
179+
_start = time.perf_counter()
177180
batch = process_next_samples(params, max_retry_count)
181+
_process_time = time.perf_counter()
178182
content = serialize_sample(batch)
183+
_serialize_time = time.perf_counter()
179184
shared_memory.set(cache_key, content, ex=cache_ttl)
185+
_store_time = time.perf_counter()
186+
logger.debug(
187+
f"Done processing {cache_key} in {_store_time-_start:.2f}s, process: {_process_time-_start:.2f}s, serialize: {_serialize_time - _process_time:.2f}s, store: {_store_time - _serialize_time:.2f}s, size: {len(content)} bytes"
188+
)
180189
except ProcessNextSamplesException as e:
181190
shared_memory.set(cache_key, f"processing_error:{e.json()}", ex=cache_ttl)
182191
except Exception as e:

0 commit comments

Comments
 (0)