Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 17 additions & 2 deletions rlm/core/lm_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def handle(self):
request_data = socket_recv(self.connection)
if not isinstance(request_data, dict):
response = LMResponse.error_response("Request must be a JSON object")
socket_send(self.connection, response.to_dict())
self._safe_send(response)
return

request = LMRequest.from_dict(request_data)
Expand All @@ -37,11 +37,26 @@ def handle(self):
else:
response = LMResponse.error_response("Missing 'prompt' or 'prompts' in request.")

socket_send(self.connection, response.to_dict())
self._safe_send(response)

except (BrokenPipeError, ConnectionError, ConnectionResetError, OSError):
# Client disconnected - this is expected during parallel execution
# when workers complete and close their sockets. Silently ignore.
pass

except Exception as e:
# Try to send error response, but don't fail if socket is broken
response = LMResponse.error_response(str(e))
self._safe_send(response)

def _safe_send(self, response: LMResponse) -> bool:
"""Send response, returning False if the socket is broken."""
try:
socket_send(self.connection, response.to_dict())
return True
except (BrokenPipeError, ConnectionError, ConnectionResetError, OSError):
# Client disconnected - silently ignore
return False

def _handle_single(self, request: LMRequest, handler: "LMHandler") -> LMResponse:
"""Handle a single prompt request."""
Expand Down