Skip to content

Commit 2df37ca

Browse files
Fix: Allow crawl node updates to set IP and port fields to null (#312)
## High Level Overview of Change <!-- Please include a summary/list of the changes. If too broad, please consider splitting into multiple PRs. If a relevant Asana task, please link it here. --> If a user tries to update an existing crawls DB entry with a non-null IP to have a null IP, the IP address does not update. This is because we use merge in our saveNode, which has the behavior of excluding undefined values during conflict resolution. As a result, we need to manually override with the null fields we would like to retain (essentially sanitize the new entry with nulls instead of undefined) ### Context of Change <!-- Please include the context of a change. If a bug fix, when was the bug introduced? What was the behavior? If a new feature, why was this architecture chosen? What were the alternatives? If a refactor, how is this better than the previous implementation? If there is a design document for this feature, please link it here. --> ### Type of Change <!-- Please check relevant options, delete irrelevant ones. --> - [x] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] Refactor (non-breaking change that only restructures code) - [ ] Tests (You added tests for code that already exists, or your new feature included in this PR) - [ ] Documentation Updates - [ ] Release ## Before / After <!-- If just refactoring / back-end changes, this can be just an in-English description of the change at a technical level. If a UI change, screenshots should be included. --> ## Test Plan <!-- Please describe the tests that you ran to verify your changes and provide instructions so that others can reproduce. --> <!-- ## Future Tasks For future tasks related to PR. -->
1 parent 76a5c40 commit 2df37ca

File tree

5 files changed

+176
-28
lines changed

5 files changed

+176
-28
lines changed

.vscode/settings.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
"enable": true
2626
},
2727
"editor.codeActionsOnSave": {
28-
"source.fixAll.eslint": true
28+
"source.fixAll.eslint": "explicit"
2929
},
3030
"files.insertFinalNewline": true,
3131
"files.trimFinalNewlines": true,

src/crawler/crawl.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,8 @@ class Crawler {
202202

203203
const { this_node, active_nodes } = nodes
204204

205-
const promises: Array<Promise<void>> = [saveNode(this_node)]
205+
// If we are saving the node outside of peer iterations, we do not want to force missing parameters to null
206+
const promises: Array<Promise<void>> = [saveNode(this_node, false)]
206207

207208
for (const node of active_nodes) {
208209
const normalizedPublicKey = Crawler.normalizePublicKey(node.public_key)
@@ -233,7 +234,9 @@ class Crawler {
233234
}
234235

235236
this.publicKeysSeen.add(normalizedPublicKey)
236-
promises.push(saveNode(dbNode))
237+
238+
// If a peer has the node IP as null, we want to overwrite in the database
239+
promises.push(saveNode(dbNode, true))
237240

238241
if (node.ip === undefined || node.port === undefined) {
239242
continue

src/shared/database/index.ts

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,17 +49,25 @@ export async function getNetworks(): Promise<Network[]> {
4949
* Saves a Node to database.
5050
*
5151
* @param node - Node to write to database.
52+
* @param shouldOverwriteNull - If IP/port are not provided, will force update to null in DB.
5253
* @returns Void.
5354
*/
54-
export async function saveNode(node: Node): Promise<void> {
55+
export async function saveNode(
56+
node: Node,
57+
shouldOverwriteNull: boolean,
58+
): Promise<void> {
5559
if (node.complete_ledgers && node.complete_ledgers.length > 255) {
5660
const ledgersSplit = node.complete_ledgers.split(',')
5761
node.complete_ledgers = ledgersSplit[ledgersSplit.length - 1]
5862
}
63+
64+
const sanitizedMergeNode = shouldOverwriteNull
65+
? { ip: null, port: null, ...node }
66+
: node
5967
query('crawls')
6068
.insert(node)
6169
.onConflict('public_key')
62-
.merge()
70+
.merge(sanitizedMergeNode)
6371
.catch((err: Error) => log.error(err.message))
6472
}
6573

test/crawler/crawler.test.ts

Lines changed: 68 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -5,30 +5,9 @@ import { destroy, query, setupTables } from '../../src/shared/database'
55
import { Node } from '../../src/shared/types'
66

77
import network2 from './fixtures/cyclic-network.json'
8+
import nullNodeNetwork from './fixtures/null-ip-three-node-crawl.json'
89
import network1 from './fixtures/three-node-crawl.json'
910

10-
function mock(): void {
11-
// Sets up mocking at endpoints specified in network1
12-
Object.keys(network1.peers).forEach((peer: string) => {
13-
nock(`https://${peer}:51235`)
14-
.get('/crawl')
15-
.reply(
16-
200,
17-
(network1.peers as Record<string, Record<string, unknown>>)[peer],
18-
)
19-
})
20-
21-
// Sets up mocking at endpoints specified in network2
22-
Object.keys(network2.peers).forEach((peer: string) => {
23-
nock(`https://${peer}:51235`)
24-
.get('/crawl')
25-
.reply(
26-
200,
27-
(network2.peers as Record<string, Record<string, unknown>>)[peer],
28-
)
29-
})
30-
}
31-
3211
async function crawl(ip: string): Promise<void> {
3312
await new Crawler().crawl({
3413
id: 'main',
@@ -40,7 +19,6 @@ async function crawl(ip: string): Promise<void> {
4019
describe('Runs test crawl', () => {
4120
beforeAll(async () => {
4221
await setupTables()
43-
mock()
4422
})
4523

4624
afterAll(async () => {
@@ -52,6 +30,15 @@ describe('Runs test crawl', () => {
5230
})
5331

5432
test('successfully crawls 3 node network', async () => {
33+
// Sets up mocking at endpoints specified in network1
34+
Object.keys(network1.peers).forEach((peer: string) => {
35+
nock(`https://${peer}:51235`)
36+
.get('/crawl')
37+
.reply(
38+
200,
39+
(network1.peers as Record<string, Record<string, unknown>>)[peer],
40+
)
41+
})
5542
await crawl('1.1.1.1')
5643

5744
const results: Node[] = await query('crawls').select([
@@ -65,7 +52,65 @@ describe('Runs test crawl', () => {
6552
expect(results).toContainEqual(network1.result[2])
6653
})
6754

55+
test('successfully updates an existing ip and port to null', async () => {
56+
// Manually set endpoints to standard 3 node network
57+
Object.keys(network1.peers).forEach((peer: string) => {
58+
nock(`https://${peer}:51235`)
59+
.get('/crawl')
60+
.reply(
61+
200,
62+
(network1.peers as Record<string, Record<string, unknown>>)[peer],
63+
)
64+
})
65+
66+
// Manually set same node endpoints to new network with a null ip/port
67+
Object.keys(nullNodeNetwork.peers).forEach((peer: string) => {
68+
nock(`https://${peer}:51235`)
69+
.get('/crawl')
70+
.reply(
71+
200,
72+
(nullNodeNetwork.peers as Record<string, Record<string, unknown>>)[
73+
peer
74+
],
75+
)
76+
})
77+
await crawl('1.1.1.1')
78+
79+
const initResults: Node[] = await query('crawls').select([
80+
'ip',
81+
'port',
82+
'public_key',
83+
])
84+
85+
// Ensure DB has registered standard nodes with IP addresses
86+
expect(initResults).toContainEqual(network1.result[0])
87+
expect(initResults).toContainEqual(network1.result[1])
88+
expect(initResults).toContainEqual(network1.result[2])
89+
90+
await crawl('1.1.1.1')
91+
92+
const modifiedResults: Node[] = await query('crawls').select([
93+
'ip',
94+
'port',
95+
'public_key',
96+
])
97+
98+
// Ensure DB has registered new nodes with a null ip/port
99+
expect(modifiedResults).toContainEqual(nullNodeNetwork.result[0])
100+
expect(modifiedResults).toContainEqual(nullNodeNetwork.result[1])
101+
expect(modifiedResults).toContainEqual(nullNodeNetwork.result[2])
102+
})
103+
68104
test('successfully crawls cyclic node network', async () => {
105+
// Sets up mocking at endpoints specified in network2
106+
Object.keys(network2.peers).forEach((peer: string) => {
107+
nock(`https://${peer}:51235`)
108+
.get('/crawl')
109+
.reply(
110+
200,
111+
(network2.peers as Record<string, Record<string, unknown>>)[peer],
112+
)
113+
})
69114
await crawl('2.2.2.2')
70115

71116
const results: Node[] = await query('crawls').select([
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
{
2+
"peers": {
3+
"1.1.1.1": {
4+
"overlay": {
5+
"active": [
6+
{
7+
"public_key": "n9Kt2gdtxsYV6h4SAfHkRFUMU8gB6rDQ3MktiB3MMEDS9s9obu9b",
8+
"complete_ledgers": "9-29"
9+
},
10+
{
11+
"ip": "1.1.1.23",
12+
"port": 51235,
13+
"public_key": "n9LgkNWaPfRnxLKAgnSCMHRCyHBZsSdGBdRmyaYEArL3SBoq1EqK",
14+
"complete_ledgers": "10-20"
15+
}
16+
]
17+
},
18+
"server": {
19+
"complete_ledgers": "10-20",
20+
"pubkey_node" : "n9Mh83gUuY4hBXVD9geWHsyVwz5h32rjauLWQCZJVTEbCb5TYs21",
21+
"server_state": "connected",
22+
"io_latency_ms": "1",
23+
"load_factor_server": "1",
24+
"uptime": "1",
25+
"version": "1.6.0"
26+
},
27+
"unl": {
28+
"validator_sites": [
29+
{
30+
"last_refresh_status": "same_sequence",
31+
"last_refresh_time": "2022-May-03 21:59:20.652097009 UTC",
32+
"next_refresh_time": "2022-May-03 22:04:20.636643972 UTC",
33+
"refresh_interval_min": 5,
34+
"uri":"https://vl.fake.example.com"
35+
}
36+
]
37+
}
38+
},
39+
"1.1.1.13": {
40+
"overlay": {
41+
"active": [
42+
{
43+
"ip": "1.1.1.23",
44+
"port": 51235,
45+
"public_key": "n9LgkNWaPfRnxLKAgnSCMHRCyHBZsSdGBdRmyaYEArL3SBoq1EqK",
46+
"complete_ledgers": "10-20"
47+
}
48+
]
49+
},
50+
"server": {
51+
"complete_ledgers": "10-20",
52+
"pubkey_node" : "n9Kt2gdtxsYV6h4SAfHkRFUMU8gB6rDQ3MktiB3MMEDS9s9obu9b",
53+
"server_state": "connected",
54+
"io_latency_ms": "1",
55+
"load_factor_server": "1",
56+
"uptime": "1",
57+
"version": "1.6.0"
58+
}
59+
},
60+
"1.1.1.23": {
61+
"overlay": {
62+
"active": []
63+
},
64+
"server": {
65+
"complete_ledgers": "10-20",
66+
"pubkey_node" : "n9LgkNWaPfRnxLKAgnSCMHRCyHBZsSdGBdRmyaYEArL3SBoq1EqK",
67+
"server_state": "connected",
68+
"io_latency_ms": "1",
69+
"load_factor_server": "1",
70+
"uptime": "1",
71+
"version": "1.6.0"
72+
}
73+
}
74+
},
75+
"result": [
76+
{
77+
"public_key" : "n9Mh83gUuY4hBXVD9geWHsyVwz5h32rjauLWQCZJVTEbCb5TYs21",
78+
"ip": null,
79+
"port": null
80+
},
81+
{
82+
"public_key": "n9Kt2gdtxsYV6h4SAfHkRFUMU8gB6rDQ3MktiB3MMEDS9s9obu9b",
83+
"ip": null,
84+
"port": null
85+
},
86+
{
87+
"public_key": "n9LgkNWaPfRnxLKAgnSCMHRCyHBZsSdGBdRmyaYEArL3SBoq1EqK",
88+
"ip": "1.1.1.23",
89+
"port": 51235
90+
}
91+
]
92+
}

0 commit comments

Comments
 (0)