Skip to content

Commit 00aed9b

Browse files
committed
Bugfix: Fix crawlPage API internal error
1 parent 624efe7 commit 00aed9b

File tree

6 files changed

+11
-7
lines changed

6 files changed

+11
-7
lines changed

.eslintrc.js

+2-1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ module.exports = {
1515
'@typescript-eslint/no-explicit-any': 'off',
1616
'@typescript-eslint/no-empty-interface': 'off',
1717
'@typescript-eslint/no-var-requires': 'off',
18-
'@typescript-eslint/no-non-null-assertion': 'off'
18+
'@typescript-eslint/no-non-null-assertion': 'off',
19+
'no-empty': 'off'
1920
}
2021
}

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ running result:
136136
137137
#### An example of a crawler application
138138
139-
Create a new **application instance** via [xCrawl()](#xCrawl):
139+
Create a new **application instance** via [xCrawl()](#x-crawl-2):
140140
141141
```js
142142
import xCrawl from 'x-crawl'

package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"private": true,
33
"name": "x-crawl",
4-
"version": "3.2.10",
4+
"version": "3.2.11",
55
"author": "coderHXL",
66
"description": "x-crawl is a flexible nodejs crawler library.",
77
"license": "MIT",

publish/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ running result:
136136
137137
#### An example of a crawler application
138138
139-
Create a new **application instance** via [xCrawl()](#xCrawl):
139+
Create a new **application instance** via [xCrawl()](#x-crawl-2):
140140
141141
```js
142142
import xCrawl from 'x-crawl'

publish/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "x-crawl",
3-
"version": "3.2.10",
3+
"version": "3.2.11",
44
"author": "coderHXL",
55
"description": "x-crawl is a flexible nodejs crawler library.",
66
"license": "MIT",

src/api.ts

+5-2
Original file line numberDiff line numberDiff line change
@@ -149,10 +149,13 @@ export function createCrawlPage(baseConfig: LoaderXCrawlBaseConfig) {
149149
timeout: requestConfig.timeout
150150
})
151151
} catch (error: any) {
152-
console.log(`error: ${logError(error.message)}`)
152+
console.log(logError(`Error: ${error.message}`))
153153
}
154154

155-
const content = await page!.content()
155+
let content = ''
156+
try {
157+
content = await page!.content()
158+
} catch {}
156159

157160
const res: CrawlPage = {
158161
httpResponse,

0 commit comments

Comments
 (0)