Skip to content

Commit a7d0a7c

Browse files
committed
Update: Docs
1 parent f13efe4 commit a7d0a7c

File tree

4 files changed

+15
-36
lines changed

4 files changed

+15
-36
lines changed

README.md

+7-17
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ Regular crawling: Get the recommended pictures of the youtube homepage every oth
9595

9696
```js
9797
// 1.Import module ES/CJS
98-
import path from 'node:path'
9998
import xCrawl from 'x-crawl'
10099

101100
// 2.Create a crawler instance
@@ -125,13 +124,7 @@ myXCrawl.startPolling({ d: 1 }, () => {
125124
})
126125

127126
// Call the crawlFile API to crawl pictures
128-
myXCrawl.crawlFile({
129-
requestConfig,
130-
fileConfig: { storeDir: path.resolve(__dirname, './upload') }
131-
})
132-
133-
// Close the browser
134-
browser.close()
127+
myXCrawl.crawlFile({ requestConfig, fileConfig: { storeDir: './upload' } })
135128
})
136129
})
137130
```
@@ -260,7 +253,6 @@ myXCrawl.crawlData({ requestConfig }).then(res => {
260253
Crawl file data via [crawlFile()](#crawlFile)
261254

262255
```js
263-
import path from 'node:path'
264256
import xCrawl from 'x-crawl'
265257

266258
const myXCrawl = xCrawl({
@@ -274,7 +266,7 @@ myXCrawl
274266
.crawlFile({
275267
requestConfig,
276268
fileConfig: {
277-
storeDir: path.resolve(__dirname, './upload') // storage folder
269+
storeDir: './upload' // storage folder
278270
}
279271
})
280272
.then((fileInfos) => {
@@ -299,9 +291,7 @@ myXCrawl. startPolling({ h: 2, m: 30 }, (count, stopPolling) => {
299291
// crawlPage/crawlData/crawlFile
300292
myXCrawl.crawlPage('https://xxx.com').then(res => {
301293
const { jsdom, browser, page } = res
302-
303-
// Close the browser
304-
browser.close()
294+
305295
})
306296
})
307297
```
@@ -414,7 +404,7 @@ const requestConfig = [ 'https://xxx.com/xxxx', 'https://xxx.com/xxxx', 'https:/
414404
myXCrawl
415405
.crawlFile({
416406
requestConfig,
417-
fileConfig: { storeDir: path. resolve(__dirname, './upload') }
407+
fileConfig: { storeDir: './upload' }
418408
})
419409
.then((fileInfos) => {
420410
console.log('Promise: ', fileInfos)
@@ -424,7 +414,7 @@ myXCrawl
424414
myXCrawl.crawlFile(
425415
{
426416
requestConfig,
427-
fileConfig: { storeDir: path. resolve(__dirname, './upload') }
417+
fileConfig: { storeDir: './upload' }
428418
},
429419
(fileInfo) => {
430420
console.log('Callback: ', fileInfo)
@@ -436,7 +426,7 @@ myXCrawl
436426
.crawlFile(
437427
{
438428
requestConfig,
439-
fileConfig: { storeDir: path. resolve(__dirname, './upload') }
429+
fileConfig: { storeDir: './upload' }
440430
},
441431
(fileInfo) => {
442432
console.log('Callback: ', fileInfo)
@@ -589,7 +579,7 @@ myXCrawl
589579
.crawlFile({
590580
requestConfig,
591581
fileConfig: {
592-
storeDir: path.resolve(__dirname, './upload') // storage folder
582+
storeDir: './upload' // storage folder
593583
}
594584
})
595585
.then((fileInfos) => {

docs/cn.md

+6-17
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,6 @@ npm install x-crawl
101101

102102
```js
103103
// 1.导入模块 ES/CJS
104-
import path from 'node:path'
105104
import xCrawl from 'x-crawl'
106105
107106
// 2.创建一个爬虫实例
@@ -125,13 +124,7 @@ myXCrawl.startPolling({ d: 1 }, () => {
125124
imgEls.forEach((item) => requestConfig.push(`https:${item.src}`))
126125
127126
// 调用 crawlFile API 爬取图片
128-
myXCrawl.crawlFile({
129-
requestConfig,
130-
fileConfig: { storeDir: path.resolve(__dirname, './upload') }
131-
})
132-
133-
// 关闭浏览器
134-
browser.close()
127+
myXCrawl.crawlFile({ requestConfig, fileConfig: { storeDir: './upload' } })
135128
})
136129
})
137130
```
@@ -259,7 +252,6 @@ myXCrawl.crawlData({ requestConfig }).then(res => {
259252
通过 [crawlFile()](#crawlFile) 爬取文件数据
260253
261254
```js
262-
import path from 'node:path'
263255
import xCrawl from 'x-crawl'
264256
265257
const myXCrawl = xCrawl({
@@ -273,7 +265,7 @@ myXCrawl
273265
.crawlFile({
274266
requestConfig,
275267
fileConfig: {
276-
storeDir: path.resolve(__dirname, './upload') // 存放文件夹
268+
storeDir: './upload' // 存放文件夹
277269
}
278270
})
279271
.then((fileInfos) => {
@@ -298,8 +290,6 @@ myXCrawl.startPolling({ h: 2, m: 30 }, (count, stopPolling) => {
298290
// crawlPage/crawlData/crawlFile
299291
myXCrawl.crawlPage('https://xxx.com').then(res => {
300292
const { jsdom, browser, page } = res
301-
302-
browser.close()
303293
})
304294
})
305295
```
@@ -413,7 +403,7 @@ const requestConfig = [ 'https://xxx.com/xxxx', 'https://xxx.com/xxxx', 'https:/
413403
myXCrawl
414404
.crawlFile({
415405
requestConfig,
416-
fileConfig: { storeDir: path.resolve(__dirname, './upload') }
406+
fileConfig: { storeDir: './upload' }
417407
})
418408
.then((fileInfos) => {
419409
console.log('Promise: ', fileInfos)
@@ -423,7 +413,7 @@ myXCrawl
423413
myXCrawl.crawlFile(
424414
{
425415
requestConfig,
426-
fileConfig: { storeDir: path.resolve(__dirname, './upload') }
416+
fileConfig: { storeDir: './upload' }
427417
},
428418
(fileInfo) => {
429419
console.log('Callback: ', fileInfo)
@@ -435,7 +425,7 @@ myXCrawl
435425
.crawlFile(
436426
{
437427
requestConfig,
438-
fileConfig: { storeDir: path.resolve(__dirname, './upload') }
428+
fileConfig: { storeDir: './upload' }
439429
},
440430
(fileInfo) => {
441431
console.log('Callback: ', fileInfo)
@@ -573,7 +563,6 @@ function crawlFile: (
573563
#### 示例
574564
575565
```js
576-
import path from 'node:path'
577566
import xCrawl from 'x-crawl'
578567
579568
const myXCrawl = xCrawl({
@@ -588,7 +577,7 @@ myXCrawl
588577
.crawlFile({
589578
requestConfig,
590579
fileConfig: {
591-
storeDir: path.resolve(__dirname, './upload') // 存放文件夹
580+
storeDir: './upload' // 存放文件夹
592581
}
593582
})
594583
.then((fileInfos) => {

package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"private": true,
33
"name": "x-crawl",
4-
"version": "3.2.1",
4+
"version": "3.2.2",
55
"author": "coderHXL",
66
"description": "x-crawl is a flexible nodejs crawler library. ",
77
"license": "MIT",

publish/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "x-crawl",
3-
"version": "3.2.1",
3+
"version": "3.2.2",
44
"author": "coderHXL",
55
"description": "x-crawl is a flexible nodejs crawler library.",
66
"license": "MIT",

0 commit comments

Comments
 (0)