Skip to content

Commit a27d1d2

Browse files
fix: update HAProxy UP check to use HAProxy Data Plane API (#173)
Signed-off-by: Jeromy Cannon <[email protected]>
1 parent feba5f9 commit a27d1d2

File tree

8 files changed

+119
-61
lines changed

8 files changed

+119
-61
lines changed

src/commands/node.mjs

Lines changed: 72 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,21 @@ export class NodeCommand extends BaseCommand {
4444
this.keyManager = opts.keyManager
4545
this.accountManager = opts.accountManager
4646
this.keytoolDepManager = opts.keytoolDepManager
47+
this._portForwards = []
48+
}
49+
50+
/**
51+
* stops and closes the port forwards
52+
* @returns {Promise<void>}
53+
*/
54+
async close () {
55+
if (this._portForwards) {
56+
for (const srv of this._portForwards) {
57+
await this.k8.stopPortForward(srv)
58+
}
59+
}
60+
61+
this._portForwards = []
4762
}
4863

4964
async checkNetworkNodePod (namespace, nodeId) {
@@ -637,10 +652,11 @@ export class NodeCommand extends BaseCommand {
637652
title: 'Check node proxies are ACTIVE',
638653
task: async (ctx, parentTask) => {
639654
const subTasks = []
655+
let localPort = constants.LOCAL_NODE_PROXY_START_PORT
640656
for (const nodeId of ctx.config.nodeIds) {
641657
subTasks.push({
642658
title: `Check proxy for node: ${chalk.yellow(nodeId)}`,
643-
task: async () => await self.checkNetworkNodeProxyUp(ctx.config.namespace, nodeId)
659+
task: async () => await self.checkNetworkNodeProxyUp(nodeId, localPort++)
644660
})
645661
}
646662

@@ -664,45 +680,47 @@ export class NodeCommand extends BaseCommand {
664680
throw new FullstackTestingError(`Error starting node: ${e.message}`, e)
665681
} finally {
666682
await self.accountManager.close()
683+
await self.close()
667684
}
668685

669686
return true
670687
}
671688

672-
async checkNetworkNodeProxyUp (namespace, nodeId, maxAttempts = 10, delay = 5000) {
689+
/**
690+
* Check if the network node proxy is up, requires close() to be called after
691+
* @param nodeId the node id
692+
* @param localPort the local port to forward to
693+
* @param maxAttempts the maximum number of attempts
694+
* @param delay the delay between attempts
695+
* @returns {Promise<boolean>} true if the proxy is up
696+
*/
697+
async checkNetworkNodeProxyUp (nodeId, localPort, maxAttempts = 10, delay = 5000) {
673698
const podArray = await this.k8.getPodsByLabel([`app=haproxy-${nodeId}`, 'fullstack.hedera.com/type=haproxy'])
674699

675700
let attempts = 0
676701
if (podArray.length > 0) {
677702
const podName = podArray[0].metadata.name
703+
this._portForwards.push(await this.k8.portForward(podName, localPort, 5555))
704+
try {
705+
await this.k8.testConnection('localhost', localPort)
706+
} catch (e) {
707+
throw new FullstackTestingError(`failed to create port forward for '${nodeId}' proxy on port ${localPort}`, e)
708+
}
678709

679710
while (attempts < maxAttempts) {
680-
const logResponse = await this.k8.kubeClient.readNamespacedPodLog(
681-
podName,
682-
namespace,
683-
'haproxy',
684-
undefined,
685-
undefined,
686-
1024,
687-
undefined,
688-
undefined,
689-
undefined,
690-
4
691-
)
692-
693-
if (logResponse.response.statusCode !== 200) {
694-
throw new FullstackTestingError(`Expected pod ${podName} log query to execute successful, but instead got a status of ${logResponse.response.statusCode}`)
695-
}
711+
try {
712+
const status = await this.getNodeProxyStatus(`http://localhost:${localPort}/v2/services/haproxy/stats/native?type=backend`)
713+
if (status === 'UP') {
714+
this.logger.debug(`Proxy ${podName} is UP. [attempt: ${attempts}/${maxAttempts}]`)
715+
return true
716+
}
696717

697-
this.logger.debug(`Received HAProxy log from ${podName}`, { output: logResponse.body })
698-
if (logResponse.body.includes('Server be_servers/server1 is UP')) {
699-
this.logger.debug(`Proxy ${podName} is UP [attempt: ${attempts}/${maxAttempts}]`)
700-
return true
718+
attempts++
719+
this.logger.debug(`Proxy ${podName} is not UP. Checking again in ${delay}ms ... [attempt: ${attempts}/${maxAttempts}]`)
720+
await sleep(delay)
721+
} catch (e) {
722+
throw new FullstackTestingError(`failed to create port forward for '${nodeId}' proxy on port ${localPort}`, e)
701723
}
702-
703-
attempts++
704-
this.logger.debug(`Proxy ${podName} is not UP. Checking again in ${delay}ms ... [attempt: ${attempts}/${maxAttempts}]`)
705-
await sleep(delay)
706724
}
707725
}
708726

@@ -967,4 +985,32 @@ export class NodeCommand extends BaseCommand {
967985
}
968986
}
969987
}
988+
989+
async getNodeProxyStatus (url) {
990+
try {
991+
this.logger.debug(`Fetching proxy status from: ${url}`)
992+
const res = await fetch(url, {
993+
method: 'GET',
994+
signal: AbortSignal.timeout(5000),
995+
headers: {
996+
Authorization: `Basic ${Buffer.from(
997+
`${constants.NODE_PROXY_USER_ID}:${constants.NODE_PROXY_PASSWORD}`).toString(
998+
'base64')}`
999+
}
1000+
})
1001+
const response = await res.json()
1002+
1003+
if (res.status === 200) {
1004+
const status = response[0]?.stats?.filter(
1005+
(stat) => stat.name === 'http_backend')[0]?.stats?.status
1006+
this.logger.debug(`Proxy status: ${status}`)
1007+
return status
1008+
} else {
1009+
this.logger.debug(`Proxy request status code: ${res.status}`)
1010+
return null
1011+
}
1012+
} catch (e) {
1013+
this.logger.error(`Error in fetching proxy status: ${e.message}`, e)
1014+
}
1015+
}
9701016
}

src/core/account_manager.mjs

Lines changed: 1 addition & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@ import {
3131
TransferTransaction
3232
} from '@hashgraph/sdk'
3333
import { FullstackTestingError, MissingArgumentError } from './errors.mjs'
34-
import net from 'net'
3534
import { Templates } from './templates.mjs'
3635

3736
const REASON_FAILED_TO_GET_KEYS = 'failed to get keys for accountId'
@@ -58,8 +57,6 @@ const REJECTED = 'rejected'
5857
*
5958
*/
6059
export class AccountManager {
61-
static _openPortForwardConnections = 0
62-
6360
/**
6461
* creates a new AccountManager instance
6562
* @param logger the logger to use
@@ -202,12 +199,10 @@ export class AccountManager {
202199

203200
if (usePortForward) {
204201
this._portForwards.push(await this.k8.portForward(serviceObject.podName, localPort, port))
205-
AccountManager._openPortForwardConnections++
206202
}
207203

208204
nodes[`${host}:${targetPort}`] = AccountId.fromString(serviceObject.accountId)
209-
await this.testConnection(serviceObject.podName, host, targetPort)
210-
205+
await this.k8.testConnection(host, targetPort)
211206
localPort++
212207
}
213208

@@ -464,31 +459,6 @@ export class AccountManager {
464459
return receipt.status === Status.Success
465460
}
466461

467-
/**
468-
* to test the connection to the node within the network
469-
* @param podName the podName is only used for logging messages and errors
470-
* @param host the host of the target connection
471-
* @param port the port of the target connection
472-
* @returns {Promise<boolean>}
473-
*/
474-
async testConnection (podName, host, port) {
475-
const self = this
476-
477-
return new Promise((resolve, reject) => {
478-
const s = new net.Socket()
479-
s.on('error', (e) => {
480-
s.destroy()
481-
reject(new FullstackTestingError(`failed to connect to '${host}:${port}': ${e.message}`, e))
482-
})
483-
484-
s.connect(port, host, () => {
485-
self.logger.debug(`Connection test successful: ${host}:${port}`)
486-
s.destroy()
487-
resolve(true)
488-
})
489-
})
490-
}
491-
492462
/**
493463
* creates a new Hedera account
494464
* @param namespace the namespace to store the Kubernetes key secret into

src/core/constants.mjs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,10 @@ export const GENESIS_KEY = process.env.GENESIS_KEY || '302e020100300506032b65700
8383
export const SYSTEM_ACCOUNTS = [[3, 100], [200, 349], [400, 750], [900, 1000]] // do account 0.0.2 last and outside the loop
8484
export const TREASURY_ACCOUNT = 2
8585
export const LOCAL_NODE_START_PORT = process.env.LOCAL_NODE_START_PORT || 30212
86+
export const LOCAL_NODE_PROXY_START_PORT = process.env.LOCAL_NODE_PROXY_START_PORT || 30313
8687
export const ACCOUNT_CREATE_BATCH_SIZE = process.env.ACCOUNT_CREATE_BATCH_SIZE || 50
88+
export const NODE_PROXY_USER_ID = process.env.NODE_PROXY_USER_ID || 'admin'
89+
export const NODE_PROXY_PASSWORD = process.env.NODE_PROXY_PASSWORD || 'adminpwd'
8790

8891
export const POD_STATUS_RUNNING = 'Running'
8992
export const POD_STATUS_READY = 'Ready'

src/core/k8.mjs

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -671,9 +671,34 @@ export class K8 {
671671

672672
// add info for logging
673673
server.info = `${podName}:${podPort} -> ${constants.LOCAL_HOST}:${localPort}`
674+
this.logger.debug(`Starting port-forwarder [${server.info}]`)
674675
return server.listen(localPort, constants.LOCAL_HOST)
675676
}
676677

678+
/**
679+
* to test the connection to a pod within the network
680+
* @param host the host of the target connection
681+
* @param port the port of the target connection
682+
* @returns {Promise<boolean>}
683+
*/
684+
async testConnection (host, port) {
685+
const self = this
686+
687+
return new Promise((resolve, reject) => {
688+
const s = new net.Socket()
689+
s.on('error', (e) => {
690+
s.destroy()
691+
reject(new FullstackTestingError(`failed to connect to '${host}:${port}': ${e.message}`, e))
692+
})
693+
694+
s.connect(port, host, () => {
695+
self.logger.debug(`Connection test successful: ${host}:${port}`)
696+
s.destroy()
697+
resolve(true)
698+
})
699+
})
700+
}
701+
677702
/**
678703
* Stop the port forwarder server
679704
*

test/e2e/commands/01_node.test.mjs

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -97,5 +97,18 @@ describe.each([
9797
expect(e).toBeNull()
9898
}
9999
}, 20000)
100+
101+
it('Node Proxy should be UP', async () => {
102+
expect.assertions(1)
103+
104+
try {
105+
await expect(nodeCmd.checkNetworkNodeProxyUp('node0', 30313)).resolves.toBeTruthy()
106+
} catch (e) {
107+
nodeCmd.logger.showUserError(e)
108+
expect(e).toBeNull()
109+
} finally {
110+
await nodeCmd.close()
111+
}
112+
}, 20000)
100113
})
101114
})

test/e2e/commands/02_account.test.mjs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,9 +61,10 @@ describe('AccountCommand', () => {
6161
})
6262

6363
describe('node proxies should be UP', () => {
64+
let localPort = 30399
6465
for (const nodeId of argv[flags.nodeIDs.name].split(',')) {
6566
it(`proxy should be UP: ${nodeId} `, async () => {
66-
await nodeCmd.checkNetworkNodeProxyUp(namespace, nodeId)
67+
await nodeCmd.checkNetworkNodeProxyUp(nodeId, localPort++)
6768
}, 30000)
6869
}
6970
})

test/e2e/core/account_manager.test.mjs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,13 @@ describe('AccountManager', () => {
4040

4141
// ports should be opened
4242
accountManager._portForwards.push(await k8.portForward(podName, localPort, podPort))
43-
const status = await accountManager.testConnection(podName, localHost, localPort)
43+
const status = await k8.testConnection(localHost, localPort)
4444
expect(status).toBeTruthy()
4545

4646
// ports should be closed
4747
await accountManager.close()
4848
try {
49-
await accountManager.testConnection(podName, localHost, localPort)
49+
await k8.testConnection(localHost, localPort)
5050
} catch (e) {
5151
expect(e.message.includes(`failed to connect to '${localHost}:${localPort}'`)).toBeTruthy()
5252
}

version.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,4 +21,4 @@
2121

2222
export const JAVA_VERSION = '21.0.1+12'
2323
export const HELM_VERSION = 'v3.14.2'
24-
export const FST_CHART_VERSION = 'v0.23.0'
24+
export const FST_CHART_VERSION = 'v0.24.0'

0 commit comments

Comments
 (0)