@@ -93,7 +93,7 @@ def pinecone_index() -> str:
9393
9494
9595def validate_pinecone_index (
96- index_name : str , expected_num_of_vectors : int , retries = 10 , interval = 1
96+ index_name : str , expected_num_of_vectors : int , retries = 30 , interval = 1
9797) -> None :
9898 # Because there's a delay for the index to catch up to the recent writes, add in a retry
9999 pinecone = Pinecone (api_key = get_api_key ())
@@ -103,9 +103,10 @@ def validate_pinecone_index(
103103 index_stats = index .describe_index_stats ()
104104 vector_count = index_stats ["total_vector_count" ]
105105 if vector_count == expected_num_of_vectors :
106+ logger .info (f"expected { expected_num_of_vectors } == vector count { vector_count } " )
106107 break
107108 logger .info (
108- f"retry attempt { i } : expected { expected_num_of_vectors } != vector count { vector_count } "
109+ f"retry attempt { i } : expected { expected_num_of_vectors } != vector count { vector_count } "
109110 )
110111 time .sleep (interval )
111112 assert vector_count == expected_num_of_vectors
@@ -144,6 +145,7 @@ async def test_pinecone_destination(pinecone_index: str, upload_file: Path, temp
144145 with new_upload_file .open () as f :
145146 staged_content = json .load (f )
146147 expected_num_of_vectors = len (staged_content )
148+ logger .info ("validating first upload" )
147149 validate_pinecone_index (
148150 index_name = pinecone_index , expected_num_of_vectors = expected_num_of_vectors
149151 )
@@ -153,6 +155,7 @@ async def test_pinecone_destination(pinecone_index: str, upload_file: Path, temp
153155 await uploader .run_async (path = new_upload_file , file_data = file_data )
154156 else :
155157 uploader .run (path = new_upload_file , file_data = file_data )
158+ logger .info ("validating second upload" )
156159 validate_pinecone_index (
157160 index_name = pinecone_index , expected_num_of_vectors = expected_num_of_vectors
158161 )
0 commit comments