|
7 | 7 | import pytest |
8 | 8 |
|
9 | 9 | from deepset_mcp.api.client import AsyncDeepsetClient |
10 | | -from deepset_mcp.api.pipeline.models import DeepsetPipeline, PipelineLogList |
| 10 | +from deepset_mcp.api.pipeline.models import DeepsetPipeline |
11 | 11 | from deepset_mcp.api.pipeline.resource import PipelineResource |
| 12 | +from deepset_mcp.api.shared_models import PaginatedResponse |
12 | 13 |
|
13 | 14 | pytestmark = pytest.mark.integration |
14 | 15 |
|
@@ -145,7 +146,7 @@ async def test_get_logs_for_deployed_pipeline( |
145 | 146 | logs = await pipeline_resource.get_logs(pipeline_name=pipeline_name) |
146 | 147 |
|
147 | 148 | # Verify the response structure |
148 | | - assert isinstance(logs, PipelineLogList) |
| 149 | + assert isinstance(logs, PaginatedResponse) |
149 | 150 | assert isinstance(logs.data, list) |
150 | 151 | assert isinstance(logs.has_more, bool) |
151 | 152 | assert isinstance(logs.total, int) |
@@ -178,7 +179,7 @@ async def test_get_logs_for_non_deployed_pipeline( |
178 | 179 | logs = await pipeline_resource.get_logs(pipeline_name=pipeline_name) |
179 | 180 |
|
180 | 181 | # Should return a valid response structure even if empty |
181 | | - assert isinstance(logs, PipelineLogList) |
| 182 | + assert isinstance(logs, PaginatedResponse) |
182 | 183 | assert isinstance(logs.data, list) |
183 | 184 | assert isinstance(logs.has_more, bool) |
184 | 185 | assert isinstance(logs.total, int) |
@@ -209,3 +210,73 @@ async def test_deployment_timeout_handling( |
209 | 210 | timeout_seconds=1, # Very short timeout |
210 | 211 | poll_interval=1, |
211 | 212 | ) |
| 213 | + |
| 214 | + |
| 215 | +@pytest.mark.extra_slow |
| 216 | +@pytest.mark.asyncio |
| 217 | +async def test_get_logs_pagination( |
| 218 | + pipeline_resource: PipelineResource, |
| 219 | + simple_yaml_config: str, |
| 220 | +) -> None: |
| 221 | + """ |
| 222 | + Test pagination functionality for pipeline logs. |
| 223 | +
|
| 224 | + This test: |
| 225 | + 1. Creates and deploys a pipeline |
| 226 | + 2. Waits for deployment and potentially some logs |
| 227 | + 3. Tests pagination by requesting logs with small limit |
| 228 | + 4. Verifies cursor-based pagination works correctly |
| 229 | + """ |
| 230 | + pipeline_name = "test-logs-pagination-pipeline" |
| 231 | + |
| 232 | + # Step 1: Create and deploy a pipeline |
| 233 | + await pipeline_resource.create(pipeline_name=pipeline_name, yaml_config=simple_yaml_config) |
| 234 | + deploy_result = await pipeline_resource.deploy(pipeline_name=pipeline_name) |
| 235 | + assert deploy_result.valid is True, f"Pipeline deployment failed: {deploy_result.errors}" |
| 236 | + |
| 237 | + # Step 2: Wait for the pipeline to be deployed |
| 238 | + deployed_pipeline = await wait_for_pipeline_deployment( |
| 239 | + pipeline_resource=pipeline_resource, |
| 240 | + pipeline_name=pipeline_name, |
| 241 | + timeout_seconds=300, # 5 minutes timeout |
| 242 | + poll_interval=15, # Check every 15 seconds |
| 243 | + ) |
| 244 | + |
| 245 | + assert deployed_pipeline.status == "DEPLOYED" |
| 246 | + |
| 247 | + # Step 3: Get first page of logs with small limit to test pagination |
| 248 | + first_page = await pipeline_resource.get_logs(pipeline_name=pipeline_name, limit=5) |
| 249 | + |
| 250 | + # Verify the response structure |
| 251 | + assert isinstance(first_page, PaginatedResponse) |
| 252 | + assert isinstance(first_page.data, list) |
| 253 | + assert isinstance(first_page.has_more, bool) |
| 254 | + assert isinstance(first_page.total, int | type(None)) |
| 255 | + |
| 256 | + # Step 4: If there are more logs available, test cursor-based pagination |
| 257 | + if first_page.has_more and first_page.next_cursor: |
| 258 | + second_page = await pipeline_resource.get_logs( |
| 259 | + pipeline_name=pipeline_name, limit=5, after=first_page.next_cursor |
| 260 | + ) |
| 261 | + |
| 262 | + # Verify second page structure |
| 263 | + assert isinstance(second_page, PaginatedResponse) |
| 264 | + assert isinstance(second_page.data, list) |
| 265 | + |
| 266 | + # Ensure we got different logs (no duplicates between pages) |
| 267 | + first_page_log_ids = {log.log_id for log in first_page.data} |
| 268 | + second_page_log_ids = {log.log_id for log in second_page.data} |
| 269 | + |
| 270 | + # There should be no overlap between pages |
| 271 | + assert first_page_log_ids.isdisjoint(second_page_log_ids), "Found duplicate logs across pages" |
| 272 | + |
| 273 | + # Step 5: Test async iteration over all logs |
| 274 | + all_logs_via_iteration = [] |
| 275 | + async for log in first_page: |
| 276 | + all_logs_via_iteration.append(log) |
| 277 | + # Limit to avoid infinite loops in case of issues |
| 278 | + if len(all_logs_via_iteration) > 100: |
| 279 | + break |
| 280 | + |
| 281 | + # Should have at least the logs from the first page |
| 282 | + assert len(all_logs_via_iteration) >= len(first_page.data) |
0 commit comments