-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathfetch.js
More file actions
127 lines (110 loc) · 3.39 KB
/
fetch.js
File metadata and controls
127 lines (110 loc) · 3.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
import axios from 'axios'
import fs from 'node:fs'
import { fileURLToPath } from 'node:url'
import { dirname, join } from 'node:path'
const __filename = fileURLToPath(import.meta.url)
const __dirname = dirname(__filename)
let { GITHUB_TOKEN, REPO_OWNER, REPO_NAME } = process.env
const headers = {
Authorization: `Bearer ${GITHUB_TOKEN}`,
'Content-Type': 'application/json',
Accept: 'application/vnd.github.spiderman-preview+json'
}
REPO_OWNER = 'KarinJS'
REPO_NAME = 'Karin'
const baseUrl = `https://api.github.com/repos/${REPO_OWNER}/${REPO_NAME}`
const dataDir = join(__dirname, 'data')
/** 清空data目录 */
function clearDataDirectory () {
if (fs.existsSync(dataDir)) {
fs.rmSync(dataDir, { recursive: true })
console.log(`Cleared data directory at ${dataDir}`)
}
fs.mkdirSync(dataDir, { recursive: true })
console.log(`Created data directory at ${dataDir}`)
}
clearDataDirectory()
const apiEndpoints = [
`${baseUrl}`,
`${baseUrl}/contributors`,
`${baseUrl}/pulls?state=open&per_page=100`,
`${baseUrl}/pulls?state=closed&per_page=100`,
`${baseUrl}/issues?state=open&per_page=100`,
`${baseUrl}/issues?state=closed&per_page=100`,
`${baseUrl}/stargazers`,
`${baseUrl}/forks`,
`${baseUrl}/subscribers`,
`${baseUrl}/commits`,
`${baseUrl}/tags`,
`${baseUrl}/license`,
`${baseUrl}/discussions`,
`${baseUrl}/releases`,
`${baseUrl}/branches`,
]
function parseLinkHeader (linkHeader) {
const links = {}
if (!linkHeader) return links
const linkEntries = linkHeader.split(', ')
linkEntries.forEach(entry => {
const [urlPart, relPart] = entry.split('; ')
const url = urlPart.slice(1, -1)
const rel = relPart.split('=')[1].slice(1, -1)
links[rel] = url
})
return links
}
async function fetchPaginatedData (endpoint) {
let allData = []
let page = 1
let hasMore = true
const url = new URL(endpoint)
while (hasMore) {
url.searchParams.set('page', page)
try {
const response = await axios.get(url.toString(), { headers })
allData = allData.concat(response.data)
const linkHeader = response.headers.link
if (linkHeader) {
const links = parseLinkHeader(linkHeader)
hasMore = !!links.next
} else {
hasMore = false
}
page++
} catch (error) {
console.error(`Error fetching page ${page} of ${endpoint}:`, error.message)
break
}
}
return allData
}
async function fetchAndSaveData () {
for (const endpoint of apiEndpoints) {
try {
const url = new URL(endpoint)
const pathSegments = url.pathname.split('/')
const resource = pathSegments.pop()
const isPaginated = ['pulls', 'issues'].includes(resource)
let data
if (isPaginated) {
data = await fetchPaginatedData(endpoint)
} else {
const response = await axios.get(endpoint, { headers })
data = response.data
}
let fileName
if (resource === 'pulls' || resource === 'issues') {
const state = url.searchParams.get('state')
fileName = `${resource === 'pulls' ? 'pr' : 'issue'}_${state}.json`
} else {
fileName = `${resource}.json`
}
const filePath = join(dataDir, fileName)
fs.writeFileSync(filePath, JSON.stringify(data, null, 2))
console.log(`Data saved to ${filePath}`)
} catch (error) {
console.error(`Error fetching data from ${endpoint}:`, error.message)
}
}
}
fetchAndSaveData()