Skip to content

Commit b7ccd64

Browse files
committed
feat(specs): logs endpoints
1 parent 9c7b529 commit b7ccd64

File tree

6 files changed

+220
-0
lines changed

6 files changed

+220
-0
lines changed

specs/crawler/common/parameters.yml

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,14 @@ CrawlerIdParameter:
66
schema:
77
$ref: '#/CrawlerID'
88

9+
CrawlerLogIdParameter:
10+
name: logId
11+
in: path
12+
description: Crawler log ID.
13+
required: true
14+
schema:
15+
$ref: '#/CrawlerLogID'
16+
917
TaskIdParameter:
1018
name: taskID
1119
in: path
@@ -36,6 +44,48 @@ Page:
3644
schema:
3745
$ref: '#/page'
3846

47+
From:
48+
name: from
49+
in: query
50+
description: Date 'from' filter.
51+
schema:
52+
$ref: '#/from'
53+
54+
Until:
55+
name: until
56+
in: query
57+
description: Date 'until' filter.
58+
schema:
59+
$ref: '#/until'
60+
61+
Status:
62+
name: status
63+
in: query
64+
description: Status to filter 'DONE', 'SKIPPED' or 'FAILED'.
65+
schema:
66+
$ref: '#/urlsCrawledGroupStatus'
67+
68+
Limit:
69+
name: limit
70+
in: query
71+
description: Limit of the query results.
72+
schema:
73+
$ref: '#/limit'
74+
75+
Offset:
76+
name: offset
77+
in: query
78+
description: Offset of the query results.
79+
schema:
80+
$ref: '#/offset'
81+
82+
Order:
83+
name: order
84+
in: query
85+
description: Order of the query 'ASC' or 'DESC'.
86+
schema:
87+
$ref: '#/order'
88+
3989
Name:
4090
name: name
4191
in: query
@@ -60,6 +110,11 @@ CrawlerID:
60110
description: Universally unique identifier (UUID) of the crawler.
61111
example: e0f6db8a-24f5-4092-83a4-1b2c6cb6d809
62112

113+
CrawlerLogID:
114+
type: string
115+
description: Universally unique identifier (UUID) of the crawler log.
116+
example: a2ebb507-ef64-4b6b-9d84-ef66baaa7a80
117+
63118
TaskID:
64119
type: string
65120
description: Universally unique identifier (UUID) of the task.
@@ -137,6 +192,37 @@ total:
137192
description: Total number of retrievable items.
138193
example: 100
139194

195+
from:
196+
type: string
197+
description: Unix string 'from' date.
198+
example: 1762264044
199+
200+
until:
201+
type: string
202+
description: Unix string 'until' date.
203+
example: 1762264044
204+
205+
limit:
206+
type: integer
207+
description: Limit of the query results.
208+
minimum: 1
209+
default: 10
210+
maximum: 1000
211+
example: 10
212+
213+
offset:
214+
type: integer
215+
description: Offset of the query results.
216+
example: 11
217+
218+
order:
219+
type: string
220+
description: |
221+
Order of the query.
222+
enum:
223+
- ASC
224+
- DESC
225+
140226
Pagination:
141227
type: object
142228
description: Pagination information.
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
description: Get Crawler File Response.
2+
content:
3+
application/json:
4+
schema:
5+
title: getCrawlerFileResponse
6+
type: object
7+
properties:
8+
file:
9+
title: crawlerLogFile
10+
type: string
11+
required:
12+
- file
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
description: List Response of Crawler Logs.
2+
content:
3+
application/json:
4+
schema:
5+
title: listCrawlerLogsResponse
6+
type: object
7+
properties:
8+
logs:
9+
type: array
10+
items:
11+
title: crawlerLogItem
12+
type: object
13+
properties:
14+
id:
15+
type: string
16+
description: ID of the crawler log.
17+
configId:
18+
type: string
19+
description: Crawler Config identifier.
20+
reindexId:
21+
type: string
22+
description: Identifier of Reindex.
23+
fileSizeBytes:
24+
type: integer
25+
description: Size of the compressed crawler log.
26+
uncompressedSizeBytes:
27+
type: integer
28+
description: Size of the uncompressed crawler log.
29+
crawlStartedAt:
30+
type: string
31+
description: Crawl started at date.
32+
crawlCompletedAt:
33+
type: string
34+
description: Crawl started at date.
35+
fileCreatedAt:
36+
type: string
37+
description: File created date.
38+
expiresAt:
39+
type: string
40+
description: File expiration date.
41+
status:
42+
type: string
43+
description: File status.
44+
accessCount:
45+
type: integer
46+
description: File access count.
47+
lastAccessedAt:
48+
type: string
49+
description: File last accessed date.
50+
nullable: true
51+
urlsDone:
52+
type: integer
53+
description: Crawler urls done.
54+
urlsSkipped:
55+
type: integer
56+
description: Crawler urls skipped.
57+
urlsFailed:
58+
type: integer
59+
description: Crawler urls failed.
60+
meta:
61+
title: crawlerLogsMeta
62+
type: object
63+
properties:
64+
total:
65+
type: integer
66+
description: Total of records found.
67+
required:
68+
- logs
69+
- meta
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
get:
2+
operationId: listCrawlRuns
3+
summary: List crawler runs
4+
description: |
5+
The Crawler Logs feature allows you to monitor and debug your crawler’s activity by recording
6+
detailed logs for each crawl run. Logs are useful for troubleshooting crawl issues,
7+
verifying site coverage, and monitoring crawler performance over time.
8+
tags:
9+
- logs
10+
x-acl: []
11+
parameters:
12+
- $ref: '../common/parameters.yml#/CrawlerIdParameter'
13+
- $ref: '../common/parameters.yml#/From'
14+
- $ref: '../common/parameters.yml#/Until'
15+
- $ref: '../common/parameters.yml#/Status'
16+
- $ref: '../common/parameters.yml#/Limit'
17+
- $ref: '../common/parameters.yml#/Offset'
18+
- $ref: '../common/parameters.yml#/Order'
19+
responses:
20+
'200':
21+
$ref: '../common/schemas/crawlerLogsResponse.yml'
22+
'400':
23+
$ref: '../../common/responses/InvalidRequest.yml'
24+
'401':
25+
$ref: '../common/schemas/responses.yml#/MissingAuthorization'
26+
'403':
27+
$ref: '../common/schemas/responses.yml#/NoRightsOnCrawler'
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
get:
2+
operationId: getCrawlRunFile
3+
summary: Crawler run file
4+
description: |
5+
The Crawler Logs file allows you to monitor and debug your crawler’s activity by recording
6+
detailed logs for each crawl run. Logs are useful for troubleshooting crawl issues,
7+
verifying site coverage, and monitoring crawler performance over time.
8+
tags:
9+
- logs
10+
x-acl: []
11+
parameters:
12+
- $ref: '../common/parameters.yml#/CrawlerIdParameter'
13+
- $ref: '../common/parameters.yml#/CrawlerLogIdParameter'
14+
responses:
15+
'200':
16+
$ref: '../common/schemas/crawlerLogsResponse.yml'
17+
'400':
18+
$ref: '../../common/responses/InvalidRequest.yml'
19+
'401':
20+
$ref: '../common/schemas/responses.yml#/MissingAuthorization'
21+
'403':
22+
$ref: '../common/schemas/responses.yml#/NoRightsOnCrawler'

specs/crawler/spec.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,10 @@ paths:
103103
$ref: 'paths/crawlerCrawl.yml'
104104
/1/crawlers/{id}/stats/urls:
105105
$ref: 'paths/crawlerStats.yml'
106+
/1/crawlers/{id}/crawl_runs:
107+
$ref: 'paths/crawlerLogs.yml'
108+
/1/crawlers/{id}/{logId}/download:
109+
$ref: 'paths/crawlerLogsFile.yml'
106110
/1/crawlers/{id}/config:
107111
$ref: 'paths/crawlerConfig.yml'
108112
/1/crawlers/{id}/config/versions:

0 commit comments

Comments
 (0)