Skip to content

Commit bcf905d

Browse files
committed
Remove all _write_debug_log occurrences and fix related tests
1 parent ea6c458 commit bcf905d

File tree

8 files changed

+8
-58
lines changed

8 files changed

+8
-58
lines changed

src/mcp_code_indexer/vector_mode/daemon.py

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
import time
1515
import time
1616

17-
from mcp_code_indexer.vector_mode.monitoring.utils import _write_debug_log
17+
1818

1919
from ..database.database import DatabaseManager
2020
from ..database.models import Project, SyncStatus
@@ -275,7 +275,6 @@ async def _queue_full_project_indexing(
275275
try:
276276
# Get or create IndexMeta for the project
277277
index_meta = await self.db_manager.get_or_create_index_meta(project_name)
278-
_write_debug_log(f"Retrieved IndexMeta for {project_name}: {index_meta}")
279278

280279
# If status is 'failed' or 'paused', change it to 'pending'
281280
if index_meta.sync_status in [SyncStatus.FAILED, SyncStatus.PAUSED]:
@@ -287,7 +286,6 @@ async def _queue_full_project_indexing(
287286

288287
# Only queue initial embedding if status is 'pending'
289288
if index_meta.sync_status == SyncStatus.PENDING:
290-
_write_debug_log(f"Retrieved IndexMeta for {project_name}: 1")
291289
task: InitialProjectEmbeddingTask = {
292290
"type": VectorDaemonTaskType.INITIAL_PROJECT_EMBEDDING,
293291
"project_name": project_name,
@@ -442,18 +440,15 @@ async def _process_initial_project_embedding_task(
442440
logger.info(
443441
f"Worker {worker_id}: Starting initial project embedding for {project_name}"
444442
)
445-
_write_debug_log(f"Retrieved IndexMeta for {project_name}: 2")
446443
try:
447444
# Update IndexMeta status to in_progress
448445
index_meta = await self.db_manager.get_or_create_index_meta(project_name)
449446
index_meta.sync_status = SyncStatus.IN_PROGRESS
450447
await self.db_manager.update_index_meta(index_meta)
451-
_write_debug_log(f"Retrieved IndexMeta for {project_name}: 3")
452448
# Perform the actual embedding
453449
stats = await self._perform_initial_project_embedding(
454450
project_name, folder_path
455451
)
456-
_write_debug_log(f"Retrieved IndexMeta for {project_name}: 4")
457452

458453
# Update IndexMeta status to completed on success
459454
index_meta = await self.db_manager.get_or_create_index_meta(project_name)
@@ -466,10 +461,6 @@ async def _process_initial_project_embedding_task(
466461
index_meta.sync_status = SyncStatus.COMPLETED
467462
index_meta.error_message = None
468463

469-
_write_debug_log(
470-
f"Retrieved IndexMeta for {project_name}: {index_meta.sync_status.value}"
471-
)
472-
473464
index_meta.last_sync = datetime.utcnow()
474465
index_meta.total_files = stats.get("scanned", 0)
475466
index_meta.indexed_files = stats.get("processed", 0)
@@ -496,9 +487,6 @@ async def _process_initial_project_embedding_task(
496487
logger.error(
497488
f"Failed to update IndexMeta after embedding error: {meta_error}"
498489
)
499-
_write_debug_log(
500-
f"Retrieved IndexMeta for {project_name}: error: {meta_error}"
501-
)
502490

503491
self.stats["errors_count"] += 1
504492

@@ -803,9 +791,6 @@ async def process_batch_with_semaphore(
803791
for result in batch_results:
804792
if isinstance(result, Exception):
805793
logger.error(f"Batch processing failed with exception: {result}")
806-
_write_debug_log(
807-
f"Batch processing failed with exception: {result}"
808-
)
809794
# Estimate failed files (assuming average batch size)
810795
estimated_failed = min(
811796
batch_size, len(project_files) - processed_count
@@ -849,7 +834,6 @@ async def process_batch_with_semaphore(
849834
)
850835

851836
except Exception as e:
852-
_write_debug_log(f"Error during initial embedding: {e}")
853837
logger.error(
854838
f"Error during initial project embedding for {project_name}: {e}"
855839
)

src/mcp_code_indexer/vector_mode/monitoring/file_watcher.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
import asyncio
99
import logging
10-
from .utils import _write_debug_log
10+
1111
from pathlib import Path
1212
from typing import Callable, Optional, List, Dict, Any
1313
import time

src/mcp_code_indexer/vector_mode/monitoring/utils.py

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,4 @@
55
logger = logging.getLogger(__name__)
66

77

8-
def _write_debug_log(message: str) -> None:
9-
"""Write debug message to temporary file."""
10-
try:
11-
with open("/tmp/filewatcher_debug.log", "a") as f:
12-
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
13-
f.write(f"[{timestamp}] {message}\n")
14-
except Exception:
15-
logger.error("Failed to write debug log. ")
16-
pass
8+

src/mcp_code_indexer/vector_mode/providers/turbopuffer_client.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from typing import List, Dict, Any, Optional
1515
import turbopuffer
1616

17-
from mcp_code_indexer.vector_mode.monitoring.utils import _write_debug_log
17+
1818
from turbopuffer.types import Row
1919

2020
from ..config import VectorConfig
@@ -126,7 +126,6 @@ def upsert_vectors(
126126

127127
except Exception as e:
128128
logger.error(f"Failed to upsert vectors: {e}")
129-
_write_debug_log(f"Failed to upsert vectors: {e}")
130129
raise RuntimeError(f"Vector upsert failed: {e}")
131130

132131
def upsert_vectors_batch(
@@ -211,7 +210,6 @@ def upsert_vectors_batch(
211210

212211
except Exception as e:
213212
logger.error(f"Failed to batch upsert vectors: {e}")
214-
_write_debug_log(f"Failed to batch upsert vectors: {e}")
215213
raise RuntimeError(f"Batch vector upsert failed: {e}")
216214

217215
def search_vectors(
@@ -243,7 +241,6 @@ def search_vectors(
243241

244242
except Exception as e:
245243
logger.error(f"Vector search failed: {e}")
246-
_write_debug_log(f"Vector search failed: {e}, filters value: {filters}")
247244
raise RuntimeError(f"Vector search failed: {e}")
248245

249246
def delete_vectors(
@@ -393,8 +390,6 @@ def search_with_metadata_filter(
393390
# Multiple conditions - use And format
394391
filters = ("And", filter_conditions)
395392

396-
_write_debug_log(f"Search filters: {filters}")
397-
398393
return self.search_vectors(
399394
query_vector=query_vector,
400395
top_k=top_k,

src/mcp_code_indexer/vector_mode/providers/voyage_client.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from typing import List, Dict, Any, Tuple
1010
import voyageai
1111

12-
from mcp_code_indexer.vector_mode.monitoring.utils import _write_debug_log
12+
1313

1414
from ..config import VectorConfig, DEFAULT_EMBEDDING_MODEL
1515
from ..const import MODEL_DIMENSIONS
@@ -93,11 +93,6 @@ def generate_embeddings(
9393

9494
logger.info(f"Generating embeddings for {len(texts)} texts using {self.model}")
9595

96-
_write_debug_log(
97-
f"generating embeddings, estimated cost: {self.count_tokens(texts)}. len(texts)={len(texts)}"
98-
)
99-
_write_debug_log(f"sample text: {texts[0][:100]}...")
100-
10196
try:
10297
result = self.client.embed(
10398
texts=texts, model=self.model, input_type=input_type, truncation=True
@@ -111,7 +106,6 @@ def generate_embeddings(
111106
return result.embeddings
112107

113108
except Exception as e:
114-
_write_debug_log(f"Failed to generate embeddings: {e}")
115109
logger.error(f"Failed to generate embeddings: {e}")
116110
raise RuntimeError(f"Embedding generation failed: {e}")
117111

src/mcp_code_indexer/vector_mode/services/embedding_service.py

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from pathlib import Path
1111
from typing import List, Dict, Tuple
1212

13-
from mcp_code_indexer.vector_mode.monitoring.utils import _write_debug_log
13+
1414

1515
from ..chunking.ast_chunker import CodeChunk
1616
from ..providers.voyage_client import VoyageClient
@@ -305,9 +305,6 @@ async def _build_token_aware_batches(
305305
logger.debug(
306306
f"File {file_path}: {len(file_texts)} texts, {file_tokens} tokens"
307307
)
308-
_write_debug_log(
309-
f"File {file_path}: {len(file_texts)} texts, {file_tokens} tokens"
310-
)
311308

312309
# If adding this file would exceed token limit OR text count limit, finalize current batch
313310
if (
@@ -332,11 +329,6 @@ async def _build_token_aware_batches(
332329
f"{current_batch_tokens} tokens (limit exceeded: "
333330
f"tokens={token_exceeded}, count={count_exceeded})"
334331
)
335-
_write_debug_log(
336-
f"BATCH ANALYSIS: current_batch_texts={len(current_batch_texts)}, "
337-
f"file_texts={len(file_texts)}, total={len(current_batch_texts) + len(file_texts)}, "
338-
f"limit={self.config.voyage_batch_size_limit}"
339-
)
340332

341333
batches.append((current_batch_texts, current_batch_boundaries))
342334

@@ -354,11 +346,6 @@ async def _build_token_aware_batches(
354346
current_batch_tokens += file_tokens
355347
batch_idx = end_idx
356348

357-
_write_debug_log(
358-
f"AFTER ADDING: batch now has {len(current_batch_texts)} texts, "
359-
f"{current_batch_tokens} tokens"
360-
)
361-
362349
# Add final batch if it has content
363350
if current_batch_texts:
364351
logger.info(

src/mcp_code_indexer/vector_mode/services/vector_storage_service.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@
1010
from pathlib import Path
1111
from typing import List, Dict, Any, Optional
1212

13-
from mcp_code_indexer.vector_mode.monitoring.utils import _write_debug_log
14-
1513
from turbopuffer.types import Row
1614

1715
from ..chunking.ast_chunker import CodeChunk

tests/unit/vector_mode/test_vector_storage_service.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -562,7 +562,7 @@ def __init__(self, file_path: str, file_mtime: str, **kwargs):
562562
query_vector=[0.0] * embedding_dim, # dummy vector with embedding_dimension
563563
top_k=1200,
564564
namespace="mcp_code_test_project",
565-
filters=(("project_id", "Eq", project_name),),
565+
filters=("project_id", "Eq", project_name),
566566
)
567567

568568
async def test_get_file_metadata_specific_files(
@@ -761,5 +761,5 @@ def __init__(self, file_path: str, file_mtime: str):
761761
query_vector=[0.0] * embedding_dim,
762762
top_k=1200,
763763
namespace="mcp_code_test_project",
764-
filters=(("project_id", "Eq", project_name),),
764+
filters=("project_id", "Eq", project_name),
765765
)

0 commit comments

Comments
 (0)