Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: update backup uploader credential using s3 compatible keys #1741

Merged
merged 15 commits into from
Apr 2, 2025
Merged
45 changes: 25 additions & 20 deletions .github/workflows/script/gcs_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,6 @@ else
storageType=${STORAGE_TYPE}
fi

if [ -z "${GCP_SERVICE_ACCOUNT_TOKEN}" ]; then
echo "GCP_SERVICE_ACCOUNT_TOKEN is not set. Exiting..."
exit 1
fi

if [ -z "${PREFIX}" ]; then
echo "PREFIX is not set"
else
Expand All @@ -47,6 +42,12 @@ else
"--aws-write-secrets" "${GCS_SECRET_KEY}"
"--aws-bucket" "${streamBucket}"
"--aws-bucket-prefix" "${PREFIX}"

"--backupWriteSecrets" "${GCS_SECRET_KEY}"
"--backupWriteAccessKey" "${GCS_ACCESS_KEY}"
"--backupEndpoint" "storage.googleapis.com"
"--backupRegion" "us-central1"
"--backup-bucket" "${streamBackupBucket}"
)
elif [ "${storageType}" == "gcs_only" ]; then
STORAGE_OPTIONS=(
Expand All @@ -55,6 +56,12 @@ else
"--gcs-write-secrets" "${GCS_SECRET_KEY}"
"--gcs-bucket" "${streamBucket}"
"--gcs-bucket-prefix" "${PREFIX}"

"--backupWriteSecrets" "${GCS_SECRET_KEY}"
"--backupWriteAccessKey" "${GCS_ACCESS_KEY}"
"--backupEndpoint" "storage.googleapis.com"
"--backupRegion" "us-central1"
"--backup-bucket" "${streamBackupBucket}"
)
fi

Expand All @@ -72,8 +79,6 @@ fi
echo "STORAGE_OPTIONS: " "${STORAGE_OPTIONS[@]}"
echo "MIRROR_STORAGE_OPTIONS: " "${MIRROR_STORAGE_OPTIONS[@]}"

echo "${GCP_SERVICE_ACCOUNT_TOKEN}" > gcp_service_account.json

echo "Using bucket name: ${streamBucket}"
echo "Test storage type: ${storageType}"

Expand All @@ -97,9 +102,7 @@ npm run solo-test -- node keys --gossip-keys --tls-keys -i node1 --deployment "$

npm run solo-test -- network deploy --deployment "${SOLO_DEPLOYMENT}" -i node1 \
--storage-type "${storageType}" \
"${STORAGE_OPTIONS[@]}" \
--backup-bucket "${streamBackupBucket}" \
--google-credential gcp_service_account.json
"${STORAGE_OPTIONS[@]}"

npm run solo-test -- node setup -i node1 --deployment "${SOLO_DEPLOYMENT}"
npm run solo-test -- node start -i node1 --deployment "${SOLO_DEPLOYMENT}"
Expand All @@ -120,16 +123,18 @@ node examples/create-topic.js

npm run solo-test -- node stop -i node1 --deployment "${SOLO_DEPLOYMENT}"

echo "Waiting for backup uploader to run"
# manually call script "backup.sh" from container backup-uploader since it only runs every 5 minutes
kubectl exec network-node1-0 -c backup-uploader -n solo-e2e -- /backup.sh

echo "Retrieve logs and check if it include the error message"
# example : {"level":"error","msg":"Updated modification time ......}
kubectl logs network-node1-0 -c backup-uploader -n solo-e2e > backup-uploader.log
if grep -q \""error\"" backup-uploader.log; then
echo "Backup uploader logs contain error message"
exit 1
if [ "${storageType}" == "aws_only" ] || [ "${storageType}" == "gcs_only" ]; then
echo "Waiting for backup uploader to run"
# manually call script "backup.sh" from container backup-uploader since it only runs every 5 minutes
kubectl exec network-node1-0 -c backup-uploader -n solo-e2e -- /app/backup.sh

echo "Retrieve logs and check if it include the error message"
# example : {"level":"error","msg":"Updated modification time ......}
kubectl logs network-node1-0 -c backup-uploader -n solo-e2e > backup-uploader.log
if grep -q \""error\"" backup-uploader.log; then
echo "Backup uploader logs contain error message"
exit 1
fi
fi

npm run solo-test -- network destroy --deployment "${SOLO_DEPLOYMENT}" --force -q
6 changes: 3 additions & 3 deletions Taskfile.helper.yml
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ tasks:
if [[ "${SOLO_CHART_VERSION}" != "" ]]; then
export SOLO_CHART_FLAG="--solo-chart-version ${SOLO_CHART_VERSION}"
fi
SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- network deploy --deployment "${SOLO_DEPLOYMENT}" --node-aliases {{.node_identifiers}} ${CONSENSUS_NODE_FLAG} ${SOLO_CHART_FLAG} ${VALUES_FLAG} ${SETTINGS_FLAG} ${LOG4J2_FLAG} ${APPLICATION_PROPERTIES_FLAG} ${GENESIS_THROTTLES_FLAG} ${DEBUG_NODE_FLAG} ${SOLO_CHARTS_DIR_FLAG} ${LOAD_BALANCER_FLAG} ${NETWORK_DEPLOY_EXTRA_FLAGS} -q --dev
SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- network deploy --deployment "${SOLO_DEPLOYMENT}" --node-aliases {{.node_identifiers}} ${CONSENSUS_NODE_FLAG} ${SOLO_CHART_FLAG} ${VALUES_FLAG} ${SETTINGS_FLAG} ${LOG4J2_FLAG} ${APPLICATION_PROPERTIES_FLAG} ${GENESIS_THROTTLES_FLAG} ${DEBUG_NODE_FLAG} ${LOAD_BALANCER_FLAG} ${NETWORK_DEPLOY_EXTRA_FLAGS} -q --dev
- task: "solo:node:setup"

solo:node:setup:
Expand Down Expand Up @@ -405,7 +405,7 @@ tasks:
cmds:
- |
export MINIO_FLAG=$(cat {{ .minio_flag_file }})
SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- cluster-ref setup --cluster-setup-namespace "${SOLO_CLUSTER_SETUP_NAMESPACE}" ${MINIO_FLAG} ${SOLO_CHARTS_DIR_FLAG} ${CLUSTER_TLS_FLAGS} -q --dev
SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- cluster-ref setup --cluster-setup-namespace "${SOLO_CLUSTER_SETUP_NAMESPACE}" ${MINIO_FLAG} ${CLUSTER_TLS_FLAGS} -q --dev

solo:node:addresses:
internal: true
Expand Down Expand Up @@ -507,7 +507,7 @@ tasks:
npm run solo -- explorer deploy --deployment "${EXPLORER_DEPLOYMENT}" --cluster-ref ${EXPLORER_CLUSTER_CONTEXT} --mirrorNamespace ${SOLO_NAMESPACE} ${SOLO_CHARTS_DIR_FLAG} ${EXPLORER_DEPLOY_EXTRA_FLAGS} ${ENABLE_EXPLORER_TLS_FLAG} ${TLS_CLUSTER_ISSUER_TYPE_FLAG} ${ENABLE_EXPLORER_INGRESS} -q --dev
export EXPLORER_DEPLOYED_NAME_SPACE=${EXPLORER_NAME_SPACE}
else
SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- explorer deploy --deployment "${SOLO_DEPLOYMENT}" --cluster-ref kind-${SOLO_CLUSTER_NAME} --mirrorNamespace ${SOLO_NAMESPACE} ${SOLO_CHARTS_DIR_FLAG} ${EXPLORER_DEPLOY_EXTRA_FLAGS} ${ENABLE_EXPLORER_TLS_FLAG} ${TLS_CLUSTER_ISSUER_TYPE_FLAG} ${ENABLE_EXPLORER_INGRESS} -q --dev
SOLO_HOME_DIR=${SOLO_HOME_DIR} npm run solo -- explorer deploy --deployment "${SOLO_DEPLOYMENT}" --cluster-ref kind-${SOLO_CLUSTER_NAME} --mirrorNamespace ${SOLO_NAMESPACE} ${EXPLORER_DEPLOY_EXTRA_FLAGS} ${ENABLE_EXPLORER_TLS_FLAG} ${TLS_CLUSTER_ISSUER_TYPE_FLAG} ${ENABLE_EXPLORER_INGRESS} -q --dev
export EXPLORER_DEPLOYED_NAME_SPACE=${SOLO_NAMESPACE}
fi
if [[ "{{ .use_port_forwards }}" == "true" ]];then
Expand Down
49 changes: 44 additions & 5 deletions src/commands/flags.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2170,12 +2170,48 @@ export class Flags {
prompt: undefined,
};

public static readonly googleCredential: CommandFlag = {
constName: 'googleCredential',
name: 'google-credential',
public static readonly backupWriteAccessKey: CommandFlag = {
constName: 'backupWriteAccessKey',
name: 'backup-write-access-key',
definition: {
defaultValue: '',
describe: 'path of google credential file in json format',
describe: 'backup storage access key for write access',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

public static readonly backupWriteSecrets: CommandFlag = {
constName: 'backupWriteSecrets',
name: 'backup-write-secrets',
definition: {
defaultValue: '',
describe: 'backup storage secret key for write access',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

public static readonly backupEndpoint: CommandFlag = {
constName: 'backupEndpoint',
name: 'backup-endpoint',
definition: {
defaultValue: '',
describe: 'backup storage endpoint URL',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

public static readonly backupRegion: CommandFlag = {
constName: 'backupRegion',
name: 'backup-region',
definition: {
defaultValue: 'us-central1',
describe: 'backup storage region',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
Expand Down Expand Up @@ -2441,7 +2477,10 @@ export class Flags {
Flags.storageBucket,
Flags.storageBucketPrefix,
Flags.backupBucket,
Flags.googleCredential,
Flags.backupWriteAccessKey,
Flags.backupWriteSecrets,
Flags.backupEndpoint,
Flags.backupRegion,
Flags.tlsClusterIssuerType,
Flags.tlsPrivateKey,
Flags.tlsPublicKey,
Expand Down
57 changes: 33 additions & 24 deletions src/commands/network.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,10 @@
awsBucket: string;
awsBucketPrefix: string;
backupBucket: string;
googleCredential: string;
backupWriteSecrets: string;
backupWriteAccessKey: string;
backupEndpoint: string;
backupRegion: string;
consensusNodes: ConsensusNode[];
contexts: string[];
clusterRefs: ClusterReferences;
Expand Down Expand Up @@ -192,7 +195,10 @@
flags.awsBucket,
flags.awsBucketPrefix,
flags.backupBucket,
flags.googleCredential,
flags.backupWriteAccessKey,
flags.backupWriteSecrets,
flags.backupEndpoint,
flags.backupRegion,
flags.domainNames,
],
};
Expand Down Expand Up @@ -306,27 +312,30 @@
}

async prepareBackupUploaderSecrets(config: NetworkDeployConfigClass) {
if (config.googleCredential) {
const backupData = {};
const namespace = config.namespace;
const googleCredential = fs.readFileSync(config.googleCredential, 'utf8');
backupData['saJson'] = Base64.encode(googleCredential);

// create secret in each cluster
for (const context of config.contexts) {
this.logger.debug(`creating secret for backup uploader using context: ${context}`);

const k8client = this.k8Factory.getK8(context);
const isBackupSecretCreated = await k8client
.secrets()
.createOrReplace(namespace, constants.BACKUP_SECRET_NAME, SecretType.OPAQUE, backupData, undefined);

if (!isBackupSecretCreated) {
throw new SoloError(`failed to create secret for backup uploader using context: ${context}`);
}
const {backupWriteAccessKey, backupWriteSecrets, backupEndpoint, backupRegion} = config;
const backupData = {};
const namespace = config.namespace;
backupData['AWS_ACCESS_KEY_ID'] = Base64.encode(backupWriteAccessKey);
backupData['AWS_SECRET_ACCESS_KEY'] = Base64.encode(backupWriteSecrets);
backupData['RCLONE_CONFIG_BACKUPS_ENDPOINT'] = Base64.encode(backupEndpoint);
backupData['RCLONE_CONFIG_BACKUPS_REGION'] = Base64.encode(backupRegion);
backupData['RCLONE_CONFIG_BACKUPS_TYPE'] = Base64.encode('s3');
backupData['RCLONE_CONFIG_BACKUPS_PROVIDER'] = Base64.encode('GCS');

// create secret in each cluster
for (const context of config.contexts) {
this.logger.debug(`creating secret for backup uploader using context: ${context}`);

Check warning on line 327 in src/commands/network.ts

View check run for this annotation

Codecov / codecov/patch

src/commands/network.ts#L315-L327

Added lines #L315 - L327 were not covered by tests

this.logger.debug(`created secret for backup uploader using context: ${context}`);
const k8client = this.k8Factory.getK8(context);
const isBackupSecretCreated = await k8client
.secrets()
.createOrReplace(namespace, constants.BACKUP_SECRET_NAME, SecretType.OPAQUE, backupData, undefined);

if (!isBackupSecretCreated) {
throw new SoloError(`failed to create secret for backup uploader using context: ${context}`);

Check warning on line 335 in src/commands/network.ts

View check run for this annotation

Codecov / codecov/patch

src/commands/network.ts#L329-L335

Added lines #L329 - L335 were not covered by tests
}

this.logger.debug(`created secret for backup uploader using context: ${context}`);

Check warning on line 338 in src/commands/network.ts

View check run for this annotation

Codecov / codecov/patch

src/commands/network.ts#L337-L338

Added lines #L337 - L338 were not covered by tests
}
}

Expand All @@ -339,7 +348,9 @@
await this.prepareStreamUploaderSecrets(config);
}

await this.prepareBackupUploaderSecrets(config);
if (config.backupBucket) {
await this.prepareBackupUploaderSecrets(config);
}

Check warning on line 353 in src/commands/network.ts

View check run for this annotation

Codecov / codecov/patch

src/commands/network.ts#L352-L353

Added lines #L352 - L353 were not covered by tests
} catch (error: Error | any) {
throw new SoloError('Failed to create Kubernetes storage secret', error);
}
Expand Down Expand Up @@ -373,7 +384,6 @@
awsBucket: string;
awsBucketPrefix: string;
backupBucket: string;
googleCredential: string;
loadBalancerEnabled: boolean;
clusterRefs: ClusterReferences;
consensusNodes: ConsensusNode[];
Expand Down Expand Up @@ -436,7 +446,6 @@
awsBucket: string;
awsBucketPrefix: string;
backupBucket: string;
googleCredential: string;
loadBalancerEnabled: boolean;
domainNamesMapping?: Record<NodeAlias, string>;
}): Record<ClusterReference, string> {
Expand Down
6 changes: 0 additions & 6 deletions test/unit/core/helpers.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,6 @@ describe('Helpers', () => {
expect(clonedArray).not.to.equal(input); // ensure cloning creates a new array
});

it('Should parse argv to args with datamask correctly', () => {
const argv = {[flags.googleCredential.name]: 'VALUE'};
const result = flags.stringifyArgv(argv);
expect(result).to.equal(`--${flags.googleCredential.name} ${flags.googleCredential.definition.dataMask}`);
});

it('Should parse argv to args with boolean flag correctly', () => {
const argv = {[flags.quiet.name]: true};
const result = flags.stringifyArgv(argv);
Expand Down
2 changes: 1 addition & 1 deletion version.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import fs from 'node:fs';
*/

export const HELM_VERSION = 'v3.14.2';
export const SOLO_CHART_VERSION = '0.48.0';
export const SOLO_CHART_VERSION = '0.49.0';
export const HEDERA_PLATFORM_VERSION = 'v0.59.5';
export const MIRROR_NODE_VERSION = 'v0.126.0';
export const HEDERA_EXPLORER_VERSION = '24.12.1';
Expand Down
Loading