Skip to content

Commit 1fe00c6

Browse files
committed
其他
1 parent cce1c87 commit 1fe00c6

File tree

5 files changed

+68
-27
lines changed

5 files changed

+68
-27
lines changed

.eslintrc.js

+2-1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ module.exports = {
1313
plugins: ['@typescript-eslint'],
1414
rules: {
1515
'@typescript-eslint/no-explicit-any': 'off',
16-
'@typescript-eslint/no-empty-interface': 'off'
16+
'@typescript-eslint/no-empty-interface': 'off',
17+
'@typescript-eslint/no-var-requires': 'off'
1718
}
1819
}

README.md

+30-10
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
English | <a href="#cn" style="text-decoration: none">简体中文</a>
44

5-
Crawl is a Nodejs multifunctional crawler library. Provide configuration to batch fetch HTML, JSON, images, etc.
5+
XCrawl is a Nodejs multifunctional crawler library. Provide configuration to batch fetch HTML, JSON, images, etc.
66

77
## Install
88

@@ -45,7 +45,7 @@ Create a crawler instance via new XCrawl.
4545
class XCrawl {
4646
private readonly baseConfig
4747
constructor(baseConfig?: IXCrawlBaseConifg)
48-
fetch<T = any>(config: IFetchConfig): Promise<T>
48+
fetch<T = any>(config: IFetchConfig): Promise<IFetch<T>>
4949
fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
5050
fetchHTML(url: string): Promise<JSDOM>
5151
}
@@ -108,9 +108,9 @@ function fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
108108

109109
```js
110110
const requestConifg = [
111-
{ url: '/xxxx', method: 'GET' },
112-
{ url: '/xxxx', method: 'GET' },
113-
{ url: '/xxxx', method: 'GET' }
111+
{ url: '/xxxx' },
112+
{ url: '/xxxx' },
113+
{ url: '/xxxx' }
114114
]
115115

116116
myXCrawl.fetchFile({
@@ -188,6 +188,16 @@ interface IFetchBaseConifg {
188188
}
189189
```
190190
191+
- IFech
192+
193+
```ts
194+
type IFetch<T> = {
195+
statusCode: number | undefined
196+
headers: IncomingHttpHeaders // node:http
197+
data: T
198+
}[]
199+
```
200+
191201
- IFetchFile
192202
193203
```ts
@@ -238,7 +248,7 @@ If you have any **questions** or **needs** , please submit **Issues in** https:/
238248
239249
<a href="#en" style="text-decoration: none">English</a> | 简体中文
240250
241-
Crawl 是 Nodejs 多功能爬虫库。提供配置即可批量抓取 HTML 、JSON、图片等等。
251+
XCrawl 是 Nodejs 多功能爬虫库。提供配置即可批量抓取 HTML 、JSON、图片等等。
242252
243253
## 安装
244254
@@ -281,7 +291,7 @@ docsXCrawl.fetchHTML('/zh/get-started').then((jsdom) => {
281291
class XCrawl {
282292
private readonly baseConfig
283293
constructor(baseConfig?: IXCrawlBaseConifg)
284-
fetch<T = any>(config: IFetchConfig): Promise<T>
294+
fetch<T = any>(config: IFetchConfig): Promise<IFetch<T>>
285295
fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
286296
fetchHTML(url: string): Promise<JSDOM>
287297
}
@@ -344,9 +354,9 @@ function fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
344354

345355
```js
346356
const requestConifg = [
347-
{ url: '/xxxx', method: 'GET' },
348-
{ url: '/xxxx', method: 'GET' },
349-
{ url: '/xxxx', method: 'GET' }
357+
{ url: '/xxxx' },
358+
{ url: '/xxxx' },
359+
{ url: '/xxxx' }
350360
]
351361
352362
myXCrawl.fetchFile({
@@ -424,6 +434,16 @@ interface IFetchBaseConifg {
424434
}
425435
```
426436

437+
- IFetch
438+
439+
```ts
440+
type IFetch<T> = {
441+
statusCode: number | undefined
442+
headers: IncomingHttpHeaders // node:http
443+
data: T
444+
}[]
445+
```
446+
427447
- IFetchFile
428448

429449
```ts

package.json

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,17 @@
11
{
22
"private": true,
33
"name": "x-crawl",
4-
"version": "0.0.2",
4+
"version": "0.0.3",
55
"author": "CoderHxl",
66
"description": "XCrawl is a Nodejs multifunctional crawler library.",
77
"license": "MIT",
88
"main": "src/index.ts",
99
"scripts": {
1010
"build": "rollup --config rollup.config.mjs",
11+
"build-strict": "pnpm test-dev && pnpm build && pnpm test-pro",
1112
"start": "rollup --config script/start.mjs",
12-
"test": "jest test/test.ts",
13+
"test-dev": "jest test/modal/test.ts dev",
14+
"test-pro": "jest test/modal/test.ts pro",
1315
"prettier": "prettier --write ."
1416
},
1517
"dependencies": {

publish/README.md

+30-10
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
English | <a href="#cn" style="text-decoration: none">简体中文</a>
44

5-
Crawl is a Nodejs multifunctional crawler library. Provide configuration to batch fetch HTML, JSON, images, etc.
5+
XCrawl is a Nodejs multifunctional crawler library. Provide configuration to batch fetch HTML, JSON, images, etc.
66

77
## Install
88

@@ -45,7 +45,7 @@ Create a crawler instance via new XCrawl.
4545
class XCrawl {
4646
private readonly baseConfig
4747
constructor(baseConfig?: IXCrawlBaseConifg)
48-
fetch<T = any>(config: IFetchConfig): Promise<T>
48+
fetch<T = any>(config: IFetchConfig): Promise<IFetch<T>>
4949
fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
5050
fetchHTML(url: string): Promise<JSDOM>
5151
}
@@ -108,9 +108,9 @@ function fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
108108

109109
```js
110110
const requestConifg = [
111-
{ url: '/xxxx', method: 'GET' },
112-
{ url: '/xxxx', method: 'GET' },
113-
{ url: '/xxxx', method: 'GET' }
111+
{ url: '/xxxx' },
112+
{ url: '/xxxx' },
113+
{ url: '/xxxx' }
114114
]
115115

116116
myXCrawl.fetchFile({
@@ -188,6 +188,16 @@ interface IFetchBaseConifg {
188188
}
189189
```
190190
191+
- IFech
192+
193+
```ts
194+
type IFetch<T> = {
195+
statusCode: number | undefined
196+
headers: IncomingHttpHeaders // node:http
197+
data: T
198+
}[]
199+
```
200+
191201
- IFetchFile
192202
193203
```ts
@@ -238,7 +248,7 @@ If you have any **questions** or **needs** , please submit **Issues in** https:/
238248
239249
<a href="#en" style="text-decoration: none">English</a> | 简体中文
240250
241-
Crawl 是 Nodejs 多功能爬虫库。提供配置即可批量抓取 HTML 、JSON、图片等等。
251+
XCrawl 是 Nodejs 多功能爬虫库。提供配置即可批量抓取 HTML 、JSON、图片等等。
242252
243253
## 安装
244254
@@ -281,7 +291,7 @@ docsXCrawl.fetchHTML('/zh/get-started').then((jsdom) => {
281291
class XCrawl {
282292
private readonly baseConfig
283293
constructor(baseConfig?: IXCrawlBaseConifg)
284-
fetch<T = any>(config: IFetchConfig): Promise<T>
294+
fetch<T = any>(config: IFetchConfig): Promise<IFetch<T>>
285295
fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
286296
fetchHTML(url: string): Promise<JSDOM>
287297
}
@@ -344,9 +354,9 @@ function fetchFile(config: IFetchFileConfig): Promise<IFetchFile>
344354

345355
```js
346356
const requestConifg = [
347-
{ url: '/xxxx', method: 'GET' },
348-
{ url: '/xxxx', method: 'GET' },
349-
{ url: '/xxxx', method: 'GET' }
357+
{ url: '/xxxx' },
358+
{ url: '/xxxx' },
359+
{ url: '/xxxx' }
350360
]
351361
352362
myXCrawl.fetchFile({
@@ -424,6 +434,16 @@ interface IFetchBaseConifg {
424434
}
425435
```
426436

437+
- IFetch
438+
439+
```ts
440+
type IFetch<T> = {
441+
statusCode: number | undefined
442+
headers: IncomingHttpHeaders // node:http
443+
data: T
444+
}[]
445+
```
446+
427447
- IFetchFile
428448

429449
```ts

publish/package.json

+2-4
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "x-crawl",
3-
"version": "0.0.2",
3+
"version": "0.0.3",
44
"author": "CoderHxl",
55
"description": "XCrawl is a Nodejs multifunctional crawler library.",
66
"license": "MIT",
@@ -26,9 +26,7 @@
2626
}
2727
},
2828
"dependencies": {
29-
"jsdom": "^21.1.0"
30-
},
31-
"devDependencies": {
29+
"jsdom": "^21.1.0",
3230
"@types/jsdom": "^20.0.1"
3331
}
3432
}

0 commit comments

Comments
 (0)