From 5aa83b22c3167e91533ad4a8818d8b7e40c331ab Mon Sep 17 00:00:00 2001 From: lingwei-gu Date: Sun, 12 Oct 2025 13:34:58 -0400 Subject: [PATCH 1/3] remove error logging its just default behaviour --- src/nuggetizer/core/llm.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/nuggetizer/core/llm.py b/src/nuggetizer/core/llm.py index 2cb0bab..314601c 100644 --- a/src/nuggetizer/core/llm.py +++ b/src/nuggetizer/core/llm.py @@ -173,7 +173,6 @@ def run( else: encoding = tiktoken.get_encoding(self.model) except Exception as e: - print(f"Error: {str(e)}") encoding = tiktoken.get_encoding("cl100k_base") return response, len(encoding.encode(response)) except Exception as e: From 43c9cb298d56275e81666646da553d612427dace Mon Sep 17 00:00:00 2001 From: lingwei-gu Date: Sun, 12 Oct 2025 13:41:29 -0400 Subject: [PATCH 2/3] pass ruff --- src/nuggetizer/core/llm.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nuggetizer/core/llm.py b/src/nuggetizer/core/llm.py index 314601c..1df0208 100644 --- a/src/nuggetizer/core/llm.py +++ b/src/nuggetizer/core/llm.py @@ -172,8 +172,9 @@ def run( encoding = tiktoken.get_encoding("o200k_base") else: encoding = tiktoken.get_encoding(self.model) - except Exception as e: + except Exception: encoding = tiktoken.get_encoding("cl100k_base") + return response, len(encoding.encode(response)) except Exception as e: print(f"LLM Inference Error: {str(e)}") From aa2b8bc101800b7045675bcedb68b9317d5c7d9d Mon Sep 17 00:00:00 2001 From: lingwei-gu Date: Sun, 12 Oct 2025 13:44:38 -0400 Subject: [PATCH 3/3] pass ruff --- src/nuggetizer/core/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nuggetizer/core/llm.py b/src/nuggetizer/core/llm.py index 1df0208..8a826e5 100644 --- a/src/nuggetizer/core/llm.py +++ b/src/nuggetizer/core/llm.py @@ -174,7 +174,7 @@ def run( encoding = tiktoken.get_encoding(self.model) except Exception: encoding = tiktoken.get_encoding("cl100k_base") - + return response, len(encoding.encode(response)) except Exception as e: print(f"LLM Inference Error: {str(e)}")