From 6a4898fd8f6384f7cbd375545d2f3075fb515329 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Wed, 2 Apr 2025 16:33:23 +0100 Subject: [PATCH 01/27] first implementation exercise Signed-off-by: Jeromy Cannon --- .../impl/config-map-storage-backend.ts | 85 +++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 src/data/backend/impl/config-map-storage-backend.ts diff --git a/src/data/backend/impl/config-map-storage-backend.ts b/src/data/backend/impl/config-map-storage-backend.ts new file mode 100644 index 000000000..9ab131a09 --- /dev/null +++ b/src/data/backend/impl/config-map-storage-backend.ts @@ -0,0 +1,85 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {type StorageBackend} from '../api/storage-backend.js'; +import {StorageOperation} from '../api/storage-operation.js'; +import {InjectTokens} from '../../../core/dependency-injection/inject-tokens.js'; +import {type K8Factory} from '../../../integration/kube/k8-factory.js'; +import {container} from 'tsyringe-neo'; +import {MissingArgumentError} from '../../../core/errors/missing-argument-error.js'; +import {type NamespaceName} from '../../../integration/kube/resources/namespace/namespace-name.js'; +import {type Context} from '../../../core/config/remote/types.js'; +import {type K8} from '../../../integration/kube/k8.js'; +import {type ConfigMap} from '../../../integration/kube/resources/config-map/config-map.js'; +import {StorageBackendError} from '../api/storage-backend-error.js'; + +export class ConfigMapStorageBackend implements StorageBackend { + private readonly k8: K8; + + // TODO only pass in ConfigMap, no K8 references. K8 will be handled from the business layer + // the key is the key within the data object within the configMap + public constructor( + private readonly namespaceName: NamespaceName, + private readonly kubeContext: Context, + private readonly labels: Record = {}, + ) { + if (!this.kubeContext) { + throw new MissingArgumentError('ConfigMapStorageBackend is missing the kubeContext argument'); + } + + this.k8 = container.resolve(InjectTokens.K8Factory).getK8(this.kubeContext); + } + + public async delete(key: string): Promise {} + + public isSupported(op: StorageOperation): boolean { + switch (op) { + case StorageOperation.List: + case StorageOperation.ReadBytes: + case StorageOperation.WriteBytes: + case StorageOperation.Delete: { + return true; + } + default: { + return false; + } + } + } + + public async list(): Promise { + return []; + } + + public async readBytes(key: string): Promise { + try { + const configMap: ConfigMap = await this.k8.configMaps().read(this.namespaceName, key); + + if (configMap) { + const data: Record = configMap.data; + + if (data && Object.keys(data).length > 0) { + const value: string = Object.values(data)[0]; + return Buffer.from(value, 'utf8'); + } else { + throw new StorageBackendError( + `config map is empty: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, + ); + } + } else { + throw new StorageBackendError( + `failed to read config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, + ); + } + } catch (error) { + throw error instanceof StorageBackendError + ? error + : new StorageBackendError( + `error reading config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, + error, + ); + } + } + + public async writeBytes(key: string, data: Buffer): Promise { + return; + } +} From ba26fcc9ef50cbe51f728884e2450e7a45735c02 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Wed, 2 Apr 2025 16:44:31 +0100 Subject: [PATCH 02/27] refactor: update LayeredModelConfigSource to implement ModelConfigSource interface Signed-off-by: Jeromy Cannon --- src/data/configuration/impl/layered-model-config-source.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/data/configuration/impl/layered-model-config-source.ts b/src/data/configuration/impl/layered-model-config-source.ts index 2f91f4857..025d214a8 100644 --- a/src/data/configuration/impl/layered-model-config-source.ts +++ b/src/data/configuration/impl/layered-model-config-source.ts @@ -8,10 +8,11 @@ import {ConfigurationError} from '../api/configuration-error.js'; import {IllegalArgumentError} from '../../../business/errors/illegal-argument-error.js'; import {Forest} from '../../key/lexer/forest.js'; import {type ObjectStorageBackend} from '../../backend/api/object-storage-backend.js'; +import {type ModelConfigSource} from '../spi/model-config-source.js'; export abstract class LayeredModelConfigSource extends LayeredConfigSource - implements LayeredModelConfigSource + implements ModelConfigSource { private _modelData: T; From 7c465f0340715c76a1751086166b868a0b806c7e Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:31:17 +0100 Subject: [PATCH 03/27] fix: update yaml parsing and stringifying methods to use the yaml package Signed-off-by: Jeromy Cannon --- src/data/backend/impl/yaml-file-storage-backend.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/data/backend/impl/yaml-file-storage-backend.ts b/src/data/backend/impl/yaml-file-storage-backend.ts index a30c2e483..332988beb 100644 --- a/src/data/backend/impl/yaml-file-storage-backend.ts +++ b/src/data/backend/impl/yaml-file-storage-backend.ts @@ -3,7 +3,7 @@ import {type ObjectStorageBackend} from '../api/object-storage-backend.js'; import {FileStorageBackend} from './file-storage-backend.js'; import {StorageBackendError} from '../api/storage-backend-error.js'; -import {parse, stringify} from 'yaml'; +import yaml from 'yaml'; import {IllegalArgumentError} from '../../../core/errors/illegal-argument-error.js'; import {PathEx} from '../../../business/utils/path-ex.js'; @@ -25,7 +25,7 @@ export class YamlFileStorageBackend extends FileStorageBackend implements Object } try { - return parse(data.toString('utf-8')); + return yaml.parse(data.toString('utf8')); } catch (error) { throw new StorageBackendError(`error parsing yaml file: ${filePath}`, error); } @@ -38,8 +38,8 @@ export class YamlFileStorageBackend extends FileStorageBackend implements Object const filePath: string = PathEx.join(this.basePath, key); try { - const yamlData: string = stringify(data, {sortMapEntries: true}); - await this.writeBytes(key, Buffer.from(yamlData, 'utf-8')); + const yamlData: string = yaml.stringify(data, {sortMapEntries: true}); + await this.writeBytes(key, Buffer.from(yamlData, 'utf8')); } catch (error) { throw new StorageBackendError(`error writing yaml file: ${filePath}`, error); } From a584c79ef9164bee029a9747fab1326fb9e2c192 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:31:27 +0100 Subject: [PATCH 04/27] feat: implement YamlConfigMapStorageBackend for YAML object storage support Signed-off-by: Jeromy Cannon --- .../impl/config-map-storage-backend.ts | 79 +++++++++---------- .../impl/yaml-config-map-storage-backend.ts | 39 +++++++++ 2 files changed, 77 insertions(+), 41 deletions(-) create mode 100644 src/data/backend/impl/yaml-config-map-storage-backend.ts diff --git a/src/data/backend/impl/config-map-storage-backend.ts b/src/data/backend/impl/config-map-storage-backend.ts index 9ab131a09..7d2f176f2 100644 --- a/src/data/backend/impl/config-map-storage-backend.ts +++ b/src/data/backend/impl/config-map-storage-backend.ts @@ -2,34 +2,36 @@ import {type StorageBackend} from '../api/storage-backend.js'; import {StorageOperation} from '../api/storage-operation.js'; -import {InjectTokens} from '../../../core/dependency-injection/inject-tokens.js'; -import {type K8Factory} from '../../../integration/kube/k8-factory.js'; -import {container} from 'tsyringe-neo'; import {MissingArgumentError} from '../../../core/errors/missing-argument-error.js'; -import {type NamespaceName} from '../../../integration/kube/resources/namespace/namespace-name.js'; -import {type Context} from '../../../core/config/remote/types.js'; -import {type K8} from '../../../integration/kube/k8.js'; import {type ConfigMap} from '../../../integration/kube/resources/config-map/config-map.js'; import {StorageBackendError} from '../api/storage-backend-error.js'; +/** + * ConfigMapStorageBackend is a storage backend that uses a {@link ConfigMap} to store data. + * The key will be the name of the property within the data object within the ConfigMap. + */ export class ConfigMapStorageBackend implements StorageBackend { - private readonly k8: K8; - - // TODO only pass in ConfigMap, no K8 references. K8 will be handled from the business layer - // the key is the key within the data object within the configMap - public constructor( - private readonly namespaceName: NamespaceName, - private readonly kubeContext: Context, - private readonly labels: Record = {}, - ) { - if (!this.kubeContext) { - throw new MissingArgumentError('ConfigMapStorageBackend is missing the kubeContext argument'); + public constructor(private readonly configMap: ConfigMap) { + if (!this.configMap) { + throw new MissingArgumentError('ConfigMapStorageBackend is missing the configMap argument'); } - - this.k8 = container.resolve(InjectTokens.K8Factory).getK8(this.kubeContext); } - public async delete(key: string): Promise {} + public async delete(key: string): Promise { + try { + const data: Record = this.configMap.data; + + if (data && Object.keys(data).length > 0 && data.hasOwnProperty(key)) { + delete data[key]; + } else { + throw new StorageBackendError(`key: ${key} not found in config map`); + } + } catch (error) { + throw error instanceof StorageBackendError + ? error + : new StorageBackendError(`error deleting config map data key: ${key}`, error); + } + } public isSupported(op: StorageOperation): boolean { switch (op) { @@ -46,40 +48,35 @@ export class ConfigMapStorageBackend implements StorageBackend { } public async list(): Promise { - return []; + const data: Record = this.configMap.data; + + return data ? Object.keys(data) : []; } public async readBytes(key: string): Promise { try { - const configMap: ConfigMap = await this.k8.configMaps().read(this.namespaceName, key); + const data: Record = this.configMap.data; - if (configMap) { - const data: Record = configMap.data; - - if (data && Object.keys(data).length > 0) { - const value: string = Object.values(data)[0]; - return Buffer.from(value, 'utf8'); - } else { - throw new StorageBackendError( - `config map is empty: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, - ); - } + if (data && Object.keys(data).length > 0) { + const value: string = Object.values(data)[0]; + return Buffer.from(value, 'utf8'); } else { - throw new StorageBackendError( - `failed to read config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, - ); + throw new StorageBackendError(`config map is empty: ${key}`); } } catch (error) { throw error instanceof StorageBackendError ? error - : new StorageBackendError( - `error reading config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, - error, - ); + : new StorageBackendError(`error reading config map: ${key}`, error); } } public async writeBytes(key: string, data: Buffer): Promise { - return; + try { + this.configMap.data[key] = data.toString('utf8'); + } catch (error) { + throw error instanceof StorageBackendError + ? error + : new StorageBackendError(`error writing config map: ${key}`, error); + } } } diff --git a/src/data/backend/impl/yaml-config-map-storage-backend.ts b/src/data/backend/impl/yaml-config-map-storage-backend.ts new file mode 100644 index 000000000..b019dd657 --- /dev/null +++ b/src/data/backend/impl/yaml-config-map-storage-backend.ts @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: Apache-2.0 + +import yaml from 'yaml'; +import {ConfigMapStorageBackend} from './config-map-storage-backend.js'; +import {type ObjectStorageBackend} from '../api/object-storage-backend.js'; +import {StorageBackendError} from '../api/storage-backend-error.js'; +import {IllegalArgumentError} from '../../../core/errors/illegal-argument-error.js'; + +export class YamlConfigMapStorageBackend extends ConfigMapStorageBackend implements ObjectStorageBackend { + public async readObject(key: string): Promise { + const data: Buffer = await this.readBytes(key); + if (!data) { + throw new StorageBackendError(`failed to read key: ${key} from config map`); + } + + if (data.length === 0) { + throw new StorageBackendError(`data is empty for key: ${key}`); + } + + try { + return yaml.parse(data.toString('utf8')); + } catch (error) { + throw new StorageBackendError(`error parsing yaml from key: ${key}`, error); + } + } + + public async writeObject(key: string, data: object): Promise { + if (!data) { + throw new IllegalArgumentError('data must not be null or undefined'); + } + + try { + const yamlData: string = yaml.stringify(data, {sortMapEntries: true}); + await this.writeBytes(key, Buffer.from(yamlData, 'utf8')); + } catch (error) { + throw new StorageBackendError(`error writing yaml for key: ${key} to config map`, error); + } + } +} From 2366ed0549f852b87d17663afb4d948c41001d76 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:32:03 +0100 Subject: [PATCH 05/27] docs: add JSDoc comment for YamlConfigMapStorageBackend class Signed-off-by: Jeromy Cannon --- src/data/backend/impl/yaml-config-map-storage-backend.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/data/backend/impl/yaml-config-map-storage-backend.ts b/src/data/backend/impl/yaml-config-map-storage-backend.ts index b019dd657..80946dabe 100644 --- a/src/data/backend/impl/yaml-config-map-storage-backend.ts +++ b/src/data/backend/impl/yaml-config-map-storage-backend.ts @@ -6,6 +6,10 @@ import {type ObjectStorageBackend} from '../api/object-storage-backend.js'; import {StorageBackendError} from '../api/storage-backend-error.js'; import {IllegalArgumentError} from '../../../core/errors/illegal-argument-error.js'; +/** + * YamlConfigMapStorageBackend is a storage backend that uses a {@link ConfigMap} to store data. + * The key will be the name of the property within the data object within the ConfigMap. + */ export class YamlConfigMapStorageBackend extends ConfigMapStorageBackend implements ObjectStorageBackend { public async readObject(key: string): Promise { const data: Buffer = await this.readBytes(key); From b07c7ea09916e3242aebb5c60e88446abaa3faf6 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:38:20 +0100 Subject: [PATCH 06/27] refactor: rename CTObjectMapper to ClassToObjectMapper and update references Signed-off-by: Jeromy Cannon --- src/core/dependency-injection/container-init.ts | 4 ++-- .../impl/{ct-object-mapper.ts => class-to-object-mapper.ts} | 4 ++-- test/unit/data/configuration/impl/local-config-source.test.ts | 4 ++-- test/unit/data/schema/model/local/local-config.test.ts | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) rename src/data/mapper/impl/{ct-object-mapper.ts => class-to-object-mapper.ts} (97%) diff --git a/src/core/dependency-injection/container-init.ts b/src/core/dependency-injection/container-init.ts index d9e3e52c9..41265c617 100644 --- a/src/core/dependency-injection/container-init.ts +++ b/src/core/dependency-injection/container-init.ts @@ -31,7 +31,7 @@ import {NodeCommandTasks} from '../../commands/node/tasks.js'; import {ClusterCommandConfigs} from '../../commands/cluster/configs.js'; import {NodeCommandConfigs} from '../../commands/node/configs.js'; import {ErrorHandler} from '../error-handler.js'; -import {CTObjectMapper} from '../../data/mapper/impl/ct-object-mapper.js'; +import {ClassToObjectMapper} from '../../data/mapper/impl/class-to-object-mapper.js'; import {HelmExecutionBuilder} from '../../integration/helm/execution/helm-execution-builder.js'; import {DefaultHelmClient} from '../../integration/helm/impl/default-helm-client.js'; import {HelpRenderer} from '../help-renderer.js'; @@ -92,7 +92,7 @@ export class Container { } // Data Layer ObjectMapper - container.register(InjectTokens.ObjectMapper, {useClass: CTObjectMapper}, {lifecycle: Lifecycle.Singleton}); + container.register(InjectTokens.ObjectMapper, {useClass: ClassToObjectMapper}, {lifecycle: Lifecycle.Singleton}); container.register(InjectTokens.KeyFormatter, {useValue: ConfigKeyFormatter.instance()}); // Data Layer Config diff --git a/src/data/mapper/impl/ct-object-mapper.ts b/src/data/mapper/impl/class-to-object-mapper.ts similarity index 97% rename from src/data/mapper/impl/ct-object-mapper.ts rename to src/data/mapper/impl/class-to-object-mapper.ts index 627819f5f..822771a21 100644 --- a/src/data/mapper/impl/ct-object-mapper.ts +++ b/src/data/mapper/impl/class-to-object-mapper.ts @@ -14,11 +14,11 @@ import {type Primitive} from '../../../business/utils/primitive.js'; import {type PrimitiveArray} from '../../../business/utils/primitive-array.js'; @injectable() -export class CTObjectMapper implements ObjectMapper { +export class ClassToObjectMapper implements ObjectMapper { private readonly flatMapper: FlatKeyMapper; public constructor(@inject(InjectTokens.KeyFormatter) private readonly formatter: KeyFormatter) { - this.flatMapper = new FlatKeyMapper(patchInject(formatter, InjectTokens.KeyFormatter, CTObjectMapper.name)); + this.flatMapper = new FlatKeyMapper(patchInject(formatter, InjectTokens.KeyFormatter, ClassToObjectMapper.name)); } public fromArray(cls: ClassConstructor, array: object[]): R[] { diff --git a/test/unit/data/configuration/impl/local-config-source.test.ts b/test/unit/data/configuration/impl/local-config-source.test.ts index 9984386ab..6e4c0f3a3 100644 --- a/test/unit/data/configuration/impl/local-config-source.test.ts +++ b/test/unit/data/configuration/impl/local-config-source.test.ts @@ -4,7 +4,7 @@ import {expect} from 'chai'; import {LocalConfigSource} from '../../../../../src/data/configuration/impl/local-config-source.js'; import {LocalConfigSchema} from '../../../../../src/data/schema/migration/impl/local/local-config-schema.js'; import {type ObjectMapper} from '../../../../../src/data/mapper/api/object-mapper.js'; -import {CTObjectMapper} from '../../../../../src/data/mapper/impl/ct-object-mapper.js'; +import {ClassToObjectMapper} from '../../../../../src/data/mapper/impl/class-to-object-mapper.js'; import {ConfigKeyFormatter} from '../../../../../src/data/key/config-key-formatter.js'; import {SimpleObjectStorageBackend} from '../../../fixtures/simple-object-storage-backend.fixture.js'; @@ -16,7 +16,7 @@ describe('LocalConfigSource', () => { }); it('asBoolean with null value returns null', async () => { - const objectMapper: ObjectMapper = new CTObjectMapper(ConfigKeyFormatter.instance()); + const objectMapper: ObjectMapper = new ClassToObjectMapper(ConfigKeyFormatter.instance()); const map: Map = new Map(); map.set('local-config', { schemaVersion: 1, diff --git a/test/unit/data/schema/model/local/local-config.test.ts b/test/unit/data/schema/model/local/local-config.test.ts index f30305fc4..3a3a2e190 100644 --- a/test/unit/data/schema/model/local/local-config.test.ts +++ b/test/unit/data/schema/model/local/local-config.test.ts @@ -10,7 +10,7 @@ import os from 'node:os'; import {LocalConfig} from '../../../../../../src/data/schema/model/local/local-config.js'; import {Deployment} from '../../../../../../src/data/schema/model/local/deployment.js'; import {LocalConfigSchema} from '../../../../../../src/data/schema/migration/impl/local/local-config-schema.js'; -import {CTObjectMapper} from '../../../../../../src/data/mapper/impl/ct-object-mapper.js'; +import {ClassToObjectMapper} from '../../../../../../src/data/mapper/impl/class-to-object-mapper.js'; import {ApplicationVersions} from '../../../../../../src/data/schema/model/common/application-versions.js'; import { getSoloVersion, @@ -23,7 +23,7 @@ import { import {ConfigKeyFormatter} from '../../../../../../src/data/key/config-key-formatter.js'; describe('LocalConfig', () => { - const schema: LocalConfigSchema = new LocalConfigSchema(new CTObjectMapper(ConfigKeyFormatter.instance())); + const schema: LocalConfigSchema = new LocalConfigSchema(new ClassToObjectMapper(ConfigKeyFormatter.instance())); const localConfigPath = `test/data/v${getSoloVersion()}-local-config.yaml`; describe('Class Transformer', () => { From 0602da88ccb04b00fd4374b26728a3f66399f153 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 13:13:42 +0100 Subject: [PATCH 07/27] refactor: update name getter to return constructor name dynamically Signed-off-by: Jeromy Cannon --- src/data/configuration/impl/local-config-source.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/data/configuration/impl/local-config-source.ts b/src/data/configuration/impl/local-config-source.ts index 845f9c096..7b1839db2 100644 --- a/src/data/configuration/impl/local-config-source.ts +++ b/src/data/configuration/impl/local-config-source.ts @@ -13,7 +13,7 @@ export class LocalConfigSource extends MutableModelConfigSource imp } public get name(): string { - return 'LocalConfigSource'; + return this.constructor.name; } public get ordinal(): number { From 16bca603748aad5dbb121739b9272df8293ead5d Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 13:14:06 +0100 Subject: [PATCH 08/27] feat: add RemoteConfigSource, RemoteConfigSchema, and migration for remote configuration management Signed-off-by: Jeromy Cannon --- .../impl/remote-config-source.ts | 26 +++++++++++++ .../impl/remote/remote-config-schema.ts | 35 ++++++++++++++++++ .../impl/remote/remote-config-v1-migration.ts | 37 +++++++++++++++++++ src/data/schema/model/remote/remote-config.ts | 3 ++ 4 files changed, 101 insertions(+) create mode 100644 src/data/configuration/impl/remote-config-source.ts create mode 100644 src/data/schema/migration/impl/remote/remote-config-schema.ts create mode 100644 src/data/schema/migration/impl/remote/remote-config-v1-migration.ts diff --git a/src/data/configuration/impl/remote-config-source.ts b/src/data/configuration/impl/remote-config-source.ts new file mode 100644 index 000000000..519ba0c5f --- /dev/null +++ b/src/data/configuration/impl/remote-config-source.ts @@ -0,0 +1,26 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {MutableModelConfigSource} from './mutable-model-config-source.js'; +import {type RemoteConfig} from '../../schema/model/remote/remote-config.js'; +import {type Refreshable} from '../spi/refreshable.js'; +import {type ObjectMapper} from '../../mapper/api/object-mapper.js'; +import {type ObjectStorageBackend} from '../../backend/api/object-storage-backend.js'; +import {type RemoteConfigSchema} from '../../schema/migration/impl/remote/remote-config-schema.js'; + +export class RemoteConfigSource extends MutableModelConfigSource implements Refreshable { + public constructor(schema: RemoteConfigSchema, mapper: ObjectMapper, backend: ObjectStorageBackend) { + super('remote-config-data', schema, backend, mapper); + } + + public get name(): string { + return this.constructor.name; + } + + public get ordinal(): number { + return 300; + } + + public async refresh(): Promise { + await this.load(); + } +} diff --git a/src/data/schema/migration/impl/remote/remote-config-schema.ts b/src/data/schema/migration/impl/remote/remote-config-schema.ts new file mode 100644 index 000000000..5549b2f9c --- /dev/null +++ b/src/data/schema/migration/impl/remote/remote-config-schema.ts @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {SchemaBase} from '../../api/schema-base.js'; +import {type Schema} from '../../api/schema.js'; +import {RemoteConfig} from '../../../model/remote/remote-config.js'; +import {type ClassConstructor} from '../../../../../business/utils/class-constructor.type.js'; +import {type SchemaMigration} from '../../api/schema-migration.js'; +import {type Version} from '../../../../../business/utils/version.js'; +import {InjectTokens} from '../../../../../core/dependency-injection/inject-tokens.js'; +import {type ObjectMapper} from '../../../../mapper/api/object-mapper.js'; +import {RemoteConfigV1Migration} from './remote-config-v1-migration.js'; +import {inject, injectable} from 'tsyringe-neo'; + +@injectable() +export class RemoteConfigSchema extends SchemaBase implements Schema { + public constructor(@inject(InjectTokens.ObjectMapper) mapper: ObjectMapper) { + super(mapper); + } + + public get name(): string { + return RemoteConfig.name; + } + + public get version(): Version { + return RemoteConfig.SCHEMA_VERSION; + } + + public get classCtor(): ClassConstructor { + return RemoteConfig; + } + + public get migrations(): SchemaMigration[] { + return [new RemoteConfigV1Migration()]; + } +} diff --git a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts new file mode 100644 index 000000000..608e750b6 --- /dev/null +++ b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {type SchemaMigration} from '../../api/schema-migration.js'; +import {VersionRange} from '../../../../../business/utils/version-range.js'; +import {Version} from '../../../../../business/utils/version.js'; + +import {IllegalArgumentError} from '../../../../../business/errors/illegal-argument-error.js'; +import {InvalidSchemaVersionError} from '../../api/invalid-schema-version-error.js'; + +export class RemoteConfigV1Migration implements SchemaMigration { + public get range(): VersionRange { + return VersionRange.fromIntegerVersion(0); + } + + public get version(): Version { + return new Version(1); + } + + public migrate(source: object): Promise { + if (!source) { + // We should never pass null or undefined to this method, if this happens we should throw an error + throw new IllegalArgumentError('source must not be null or undefined'); + } + + const clone: any = structuredClone(source); + + // TODO implement the migration + + if (clone.schemaVersion && clone.schemaVersion !== 0) { + // this case should never happen considering the field was not present in version 0 and should default to zero + // during this migration + throw new InvalidSchemaVersionError(clone.schemaVersion, 0); + } + + return clone; + } +} diff --git a/src/data/schema/model/remote/remote-config.ts b/src/data/schema/model/remote/remote-config.ts index 7a0fc3f67..27f8a1e62 100644 --- a/src/data/schema/model/remote/remote-config.ts +++ b/src/data/schema/model/remote/remote-config.ts @@ -6,9 +6,12 @@ import {ApplicationVersions} from '../common/application-versions.js'; import {Cluster} from '../common/cluster.js'; import {DeploymentState} from './deployment-state.js'; import {DeploymentHistory} from './deployment-history.js'; +import {Version} from '../../../../business/utils/version.js'; @Exclude() export class RemoteConfig { + public static readonly SCHEMA_VERSION: Version = new Version(1); + @Expose() public schemaVersion: number; From 90e482dd0f55331da6bb6c6e9d113cb2b04344dd Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 14:29:55 +0100 Subject: [PATCH 09/27] feat: implement migration logic for RemoteConfigV1, updating metadata and structure Signed-off-by: Jeromy Cannon --- .../impl/remote/remote-config-v1-migration.ts | 111 +++++++++++++++++- 1 file changed, 109 insertions(+), 2 deletions(-) diff --git a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts index 608e750b6..5d469ee19 100644 --- a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts +++ b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts @@ -6,6 +6,7 @@ import {Version} from '../../../../../business/utils/version.js'; import {IllegalArgumentError} from '../../../../../business/errors/illegal-argument-error.js'; import {InvalidSchemaVersionError} from '../../api/invalid-schema-version-error.js'; +import {getSoloVersion} from '../../../../../../version.js'; export class RemoteConfigV1Migration implements SchemaMigration { public get range(): VersionRange { @@ -22,16 +23,122 @@ export class RemoteConfigV1Migration implements SchemaMigration { throw new IllegalArgumentError('source must not be null or undefined'); } + // eslint-disable-next-line @typescript-eslint/no-explicit-any const clone: any = structuredClone(source); - // TODO implement the migration - if (clone.schemaVersion && clone.schemaVersion !== 0) { // this case should never happen considering the field was not present in version 0 and should default to zero // during this migration throw new InvalidSchemaVersionError(clone.schemaVersion, 0); } + // set the lastUpdated to now and system:migrate + clone.metadata = { + lastUpdatedAt: new Date(), + lastUpdatedBy: { + name: 'system', + hostname: 'migration', + }, + }; + + // pull the versions from the old config, if it isn't set, then it will be set to 0.0.0 until an upgrade for the component is performed + clone.versions = { + cli: clone.metadata.soloVersion ?? getSoloVersion(), + chart: clone.metadata.soloChartVersion ?? '0.0.0', + consensusNode: clone.metadata.hederaPlatformVersion ?? '0.0.0', + mirrorNodeChart: clone.metadata.hederaMirrorNodeChartVersion ?? '0.0.0', + explorerChart: clone.metadata.hederaExplorerChartVersion ?? '0.0.0', + jsonRpcRelayChart: clone.metadata.hederaJsonRpcRelayChartVersion ?? '0.0.0', + blockNodeChart: '', + }; + + // delete the old version structure + delete clone.metadata.soloVersion; + delete clone.metadata.soloChartVersion; + delete clone.metadata.hederaPlatformVersion; + delete clone.metadata.hederaMirrorNodeChartVersion; + delete clone.metadata.hederaExplorerChartVersion; + delete clone.metadata.hederaJsonRpcRelayChartVersion; + + // migrate the clusters + const clusters: object[] = []; + for (const cluster in Object.keys(clone.clusters)) { + const clusterObject: { + name: string; + namespace: string; + deployment: string; + dnsBaseDomain: string; + dnsConsensusNodePattern: string; + } = clone.clusters[cluster]; + + clusters.push({ + name: clusterObject.name, + namespace: clusterObject.namespace, + deployment: clusterObject.deployment, + dnsBaseDomain: clusterObject.dnsBaseDomain, + dnsConsensusNodePattern: clusterObject.dnsConsensusNodePattern, + }); + } + + // overlay the old cluster references with the new cluster references structure + clone.clusters = clusters; + + // now stored at the cluster level only + delete clone.metadata.namespace; + delete clone.metadata.deploymentName; + + // migrate the components + clone.state = { + ledgerPhase: 'initialized', + consensusNodes: [], + blockNodes: [], + mirrorNodes: [], + relayNodes: [], + haProxies: [], + envoyProxies: [], + explorers: [], + }; + + // migrate the consensus nodes + for (const consensusNode of Object.keys(clone.components.consensusNodes)) { + const component: { + name: string; + nodeId: number; + namespace: string; + cluster: string; + } = clone.components.consensusNodes[consensusNode]; + + clone.state.consensusNodes.push({ + id: component.nodeId, + name: component.name, + namespace: component.namespace, + cluster: component.cluster, + phase: 'started', + }); + } + + // delete the old components structure + delete clone.components; + + // migrate the history + clone.history = {}; + clone.history.commands = []; + for (const historyItem of Object.keys(clone.commandHistory)) { + clone.history.commands.push(historyItem); + } + + // delete the old command history + delete clone.commandHistory; + + // migrate the last executed command + clone.history.lastExecutedCommand = clone.lastExecutedCommand; + + // delete the old last executed command + delete clone.lastExecutedCommand; + + // Set the schema version to the new version + clone.schemaVersion = this.version.value; + return clone; } } From a847c999be62af693916e4413c0e050d64a1ecc9 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 16:09:36 +0100 Subject: [PATCH 10/27] feat: add TODO for completing remote config components after deployment Signed-off-by: Jeromy Cannon --- .../schema/migration/impl/remote/remote-config-v1-migration.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts index 5d469ee19..e484fd15f 100644 --- a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts +++ b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts @@ -80,6 +80,8 @@ export class RemoteConfigV1Migration implements SchemaMigration { }); } + // TODO finish out the other components once we get the updated remote config from a deployed cluster + // overlay the old cluster references with the new cluster references structure clone.clusters = clusters; From 3063d6d50b745127c19bae6bb22169cd638e1bad Mon Sep 17 00:00:00 2001 From: Jeffrey Tang Date: Sun, 20 Apr 2025 16:28:44 -0500 Subject: [PATCH 11/27] save Signed-off-by: Jeffrey Tang --- .../yaml-config-map-storage-backend.test.ts | 70 +++++++++++++++++++ .../impl/remote-config-source.test.ts | 45 ++++++++++++ .../impl/remote/remote-config-schema.test.ts | 44 ++++++++++++ 3 files changed, 159 insertions(+) create mode 100644 test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts create mode 100644 test/unit/data/configuration/impl/remote-config-source.test.ts create mode 100644 test/unit/data/schema/migration/impl/remote/remote-config-schema.test.ts diff --git a/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts b/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts new file mode 100644 index 000000000..36271cbf4 --- /dev/null +++ b/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts @@ -0,0 +1,70 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {YamlConfigMapStorageBackend} from '../../../../../src/data/backend/impl/yaml-config-map-storage-backend.js'; +import {expect} from 'chai'; +import sinon from 'sinon'; +import {K8ClientConfigMap} from '../../../../../src/integration/kube/k8-client/resources/config-map/k8-client-config-map.js'; +import {NamespaceName} from '../../../../../src/integration/kube/resources/namespace/namespace-name.js'; + +describe('YamlConfigMapStorageBackend', () => { + let backend: YamlConfigMapStorageBackend; + + beforeEach(() => { + const namespace = NamespaceName.of('test-ns'); + const configMap: K8ClientConfigMap = new K8ClientConfigMap(namespace, 'test-cm', {}, {}); + backend = new YamlConfigMapStorageBackend(configMap); + }); + + describe('readObject', () => { + it('should parse YAML from readBytes', async () => { + const yamlStr: string = 'foo: bar\nnum: 42\n'; + const stub = sinon.stub(backend, 'readBytes').resolves(Buffer.from(yamlStr, 'utf8')); + const result = await backend.readObject('some-key'); + expect(result).to.deep.equal({foo: 'bar', num: 42}); + stub.restore(); + }); + + it('should throw if readBytes returns empty buffer', async () => { + sinon.stub(backend, 'readBytes').resolves(Buffer.from('', 'utf8')); + await expect(backend.readObject('empty-key')).to.be.rejectedWith('data is empty for key: empty-key'); + }); + + it('should throw if readBytes returns undefined', async () => { + sinon.stub(backend, 'readBytes').resolves(undefined as any); + await expect(backend.readObject('missing-key')).to.be.rejectedWith( + 'failed to read key: missing-key from config map', + ); + }); + + it('should throw on invalid YAML', async () => { + sinon.stub(backend, 'readBytes').resolves(Buffer.from('not: [valid, yaml', 'utf8')); + await expect(backend.readObject('bad-yaml')).to.be.rejectedWith('error parsing yaml from key: bad-yaml'); + }); + }); + + describe('writeObject', () => { + it('should write YAML string to writeBytes', async () => { + const stub = sinon.stub(backend, 'writeBytes').resolves(); + const data = {foo: 'bar', num: 42}; + await backend.writeObject('some-key', data); + expect(stub.calledOnce).to.be.true; + const written = stub.firstCall.args[1].toString('utf8'); + expect(written).to.include('foo: bar'); + expect(written).to.include('num: 42'); + stub.restore(); + }); + + it('should throw if data is null or undefined', async () => { + await expect(backend.writeObject('some-key', undefined as any)).to.be.rejectedWith( + 'data must not be null or undefined', + ); + }); + + it('should throw if writeBytes throws', async () => { + sinon.stub(backend, 'writeBytes').rejects(new Error('fail')); + await expect(backend.writeObject('some-key', {foo: 'bar'})).to.be.rejectedWith( + 'error writing yaml for key: some-key to config map', + ); + }); + }); +}); diff --git a/test/unit/data/configuration/impl/remote-config-source.test.ts b/test/unit/data/configuration/impl/remote-config-source.test.ts new file mode 100644 index 000000000..c7983b18f --- /dev/null +++ b/test/unit/data/configuration/impl/remote-config-source.test.ts @@ -0,0 +1,45 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {expect} from 'chai'; +import sinon from 'sinon'; +import {RemoteConfigSource} from '../../../../../src/data/configuration/impl/remote-config-source.js'; + +describe('RemoteConfigSource', () => { + let schema: any; + let mapper: any; + let backend: any; + + beforeEach(() => { + schema = {}; + mapper = {}; + backend = { + list: async () => [], + readBytes: async (_key: string) => Buffer.from([]), + writeBytes: async (_key: string, _data: Buffer) => {}, + readObject: async (_key: string) => ({}), + writeObject: async (_key: string, _data: object) => {}, + }; + }); + + it('should instantiate without error', () => { + expect(() => new RemoteConfigSource(schema, mapper, backend)).not.to.throw(); + }); + + it('should have name "RemoteConfigSource"', () => { + const source: RemoteConfigSource = new RemoteConfigSource(schema, mapper, backend); + expect(source.name).to.equal('RemoteConfigSource'); + }); + + it('should have ordinal 300', () => { + const source: RemoteConfigSource = new RemoteConfigSource(schema, mapper, backend); + expect(source.ordinal).to.equal(300); + }); + + it('should call load() when refresh() is called', async () => { + const source: RemoteConfigSource = new RemoteConfigSource(schema, mapper, backend); + const loadStub = sinon.stub(source, 'load').resolves(); + await source.refresh(); + expect(loadStub.calledOnce).to.be.true; + loadStub.restore(); + }); +}); diff --git a/test/unit/data/schema/migration/impl/remote/remote-config-schema.test.ts b/test/unit/data/schema/migration/impl/remote/remote-config-schema.test.ts new file mode 100644 index 000000000..451f75f4c --- /dev/null +++ b/test/unit/data/schema/migration/impl/remote/remote-config-schema.test.ts @@ -0,0 +1,44 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {RemoteConfigSchema} from '../../../../../../../src/data/schema/migration/impl/remote/remote-config-schema.js'; +import {RemoteConfigV1Migration} from '../../../../../../../src/data/schema/migration/impl/remote/remote-config-v1-migration.js'; +import {type ObjectMapper} from '../../../../../../../src/data/mapper/api/object-mapper.js'; +import {expect} from 'chai'; +import {RemoteConfig} from '../../../../../../../src/data/schema/model/remote/remote-config.js'; +import {type SchemaMigration} from '../../../../../../../src/data/schema/migration/api/schema-migration.js'; + +describe('RemoteConfigSchema', () => { + let objectMapper: ObjectMapper; + + beforeEach(() => { + // Mock ObjectMapper (can be a simple object as long as it's not used in logic) + objectMapper = {} as ObjectMapper; + }); + + it('should instantiate without error', () => { + expect(() => new RemoteConfigSchema(objectMapper)).not.to.throw(); + }); + + it('should return the correct name', () => { + const schema: RemoteConfigSchema = new RemoteConfigSchema(objectMapper); + expect(schema.name).to.be.equal('RemoteConfig'); + }); + + it('should return the correct version', () => { + const schema: RemoteConfigSchema = new RemoteConfigSchema(objectMapper); + expect(schema.version).equal(RemoteConfig.SCHEMA_VERSION); + }); + + it('should return the correct classCtor', () => { + const schema: RemoteConfigSchema = new RemoteConfigSchema(objectMapper); + expect(schema.classCtor).equal(RemoteConfig); + }); + + it('should return a migrations array containing RemoteConfigV1Migration', () => { + const schema: RemoteConfigSchema = new RemoteConfigSchema(objectMapper); + const migrations: SchemaMigration[] = schema.migrations; + expect(Array.isArray(migrations)).equal(true); + expect(migrations.length).equal(1); + expect(migrations[0]).instanceOf(RemoteConfigV1Migration); + }); +}); From 34040a60e674ce8583e2852b073e99c80b2036f9 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Wed, 2 Apr 2025 16:33:23 +0100 Subject: [PATCH 12/27] first implementation exercise Signed-off-by: Jeromy Cannon --- .../impl/config-map-storage-backend.ts | 85 +++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 src/data/backend/impl/config-map-storage-backend.ts diff --git a/src/data/backend/impl/config-map-storage-backend.ts b/src/data/backend/impl/config-map-storage-backend.ts new file mode 100644 index 000000000..9ab131a09 --- /dev/null +++ b/src/data/backend/impl/config-map-storage-backend.ts @@ -0,0 +1,85 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {type StorageBackend} from '../api/storage-backend.js'; +import {StorageOperation} from '../api/storage-operation.js'; +import {InjectTokens} from '../../../core/dependency-injection/inject-tokens.js'; +import {type K8Factory} from '../../../integration/kube/k8-factory.js'; +import {container} from 'tsyringe-neo'; +import {MissingArgumentError} from '../../../core/errors/missing-argument-error.js'; +import {type NamespaceName} from '../../../integration/kube/resources/namespace/namespace-name.js'; +import {type Context} from '../../../core/config/remote/types.js'; +import {type K8} from '../../../integration/kube/k8.js'; +import {type ConfigMap} from '../../../integration/kube/resources/config-map/config-map.js'; +import {StorageBackendError} from '../api/storage-backend-error.js'; + +export class ConfigMapStorageBackend implements StorageBackend { + private readonly k8: K8; + + // TODO only pass in ConfigMap, no K8 references. K8 will be handled from the business layer + // the key is the key within the data object within the configMap + public constructor( + private readonly namespaceName: NamespaceName, + private readonly kubeContext: Context, + private readonly labels: Record = {}, + ) { + if (!this.kubeContext) { + throw new MissingArgumentError('ConfigMapStorageBackend is missing the kubeContext argument'); + } + + this.k8 = container.resolve(InjectTokens.K8Factory).getK8(this.kubeContext); + } + + public async delete(key: string): Promise {} + + public isSupported(op: StorageOperation): boolean { + switch (op) { + case StorageOperation.List: + case StorageOperation.ReadBytes: + case StorageOperation.WriteBytes: + case StorageOperation.Delete: { + return true; + } + default: { + return false; + } + } + } + + public async list(): Promise { + return []; + } + + public async readBytes(key: string): Promise { + try { + const configMap: ConfigMap = await this.k8.configMaps().read(this.namespaceName, key); + + if (configMap) { + const data: Record = configMap.data; + + if (data && Object.keys(data).length > 0) { + const value: string = Object.values(data)[0]; + return Buffer.from(value, 'utf8'); + } else { + throw new StorageBackendError( + `config map is empty: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, + ); + } + } else { + throw new StorageBackendError( + `failed to read config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, + ); + } + } catch (error) { + throw error instanceof StorageBackendError + ? error + : new StorageBackendError( + `error reading config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, + error, + ); + } + } + + public async writeBytes(key: string, data: Buffer): Promise { + return; + } +} From 4c8698144fb94a3ed287ed765c7cf2cf6bd0b492 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Wed, 2 Apr 2025 16:44:31 +0100 Subject: [PATCH 13/27] refactor: update LayeredModelConfigSource to implement ModelConfigSource interface Signed-off-by: Jeromy Cannon --- src/data/configuration/impl/layered-model-config-source.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/data/configuration/impl/layered-model-config-source.ts b/src/data/configuration/impl/layered-model-config-source.ts index 2f91f4857..025d214a8 100644 --- a/src/data/configuration/impl/layered-model-config-source.ts +++ b/src/data/configuration/impl/layered-model-config-source.ts @@ -8,10 +8,11 @@ import {ConfigurationError} from '../api/configuration-error.js'; import {IllegalArgumentError} from '../../../business/errors/illegal-argument-error.js'; import {Forest} from '../../key/lexer/forest.js'; import {type ObjectStorageBackend} from '../../backend/api/object-storage-backend.js'; +import {type ModelConfigSource} from '../spi/model-config-source.js'; export abstract class LayeredModelConfigSource extends LayeredConfigSource - implements LayeredModelConfigSource + implements ModelConfigSource { private _modelData: T; From 4e5e3a14d9fc233953a518aa998834e4463c7a0f Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:31:17 +0100 Subject: [PATCH 14/27] fix: update yaml parsing and stringifying methods to use the yaml package Signed-off-by: Jeromy Cannon --- src/data/backend/impl/yaml-file-storage-backend.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/data/backend/impl/yaml-file-storage-backend.ts b/src/data/backend/impl/yaml-file-storage-backend.ts index a30c2e483..332988beb 100644 --- a/src/data/backend/impl/yaml-file-storage-backend.ts +++ b/src/data/backend/impl/yaml-file-storage-backend.ts @@ -3,7 +3,7 @@ import {type ObjectStorageBackend} from '../api/object-storage-backend.js'; import {FileStorageBackend} from './file-storage-backend.js'; import {StorageBackendError} from '../api/storage-backend-error.js'; -import {parse, stringify} from 'yaml'; +import yaml from 'yaml'; import {IllegalArgumentError} from '../../../core/errors/illegal-argument-error.js'; import {PathEx} from '../../../business/utils/path-ex.js'; @@ -25,7 +25,7 @@ export class YamlFileStorageBackend extends FileStorageBackend implements Object } try { - return parse(data.toString('utf-8')); + return yaml.parse(data.toString('utf8')); } catch (error) { throw new StorageBackendError(`error parsing yaml file: ${filePath}`, error); } @@ -38,8 +38,8 @@ export class YamlFileStorageBackend extends FileStorageBackend implements Object const filePath: string = PathEx.join(this.basePath, key); try { - const yamlData: string = stringify(data, {sortMapEntries: true}); - await this.writeBytes(key, Buffer.from(yamlData, 'utf-8')); + const yamlData: string = yaml.stringify(data, {sortMapEntries: true}); + await this.writeBytes(key, Buffer.from(yamlData, 'utf8')); } catch (error) { throw new StorageBackendError(`error writing yaml file: ${filePath}`, error); } From 3d7e1f54ce742ee2edb2dd5a2b9587f8140f4db7 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:31:27 +0100 Subject: [PATCH 15/27] feat: implement YamlConfigMapStorageBackend for YAML object storage support Signed-off-by: Jeromy Cannon --- .../impl/config-map-storage-backend.ts | 79 +++++++++---------- .../impl/yaml-config-map-storage-backend.ts | 39 +++++++++ 2 files changed, 77 insertions(+), 41 deletions(-) create mode 100644 src/data/backend/impl/yaml-config-map-storage-backend.ts diff --git a/src/data/backend/impl/config-map-storage-backend.ts b/src/data/backend/impl/config-map-storage-backend.ts index 9ab131a09..7d2f176f2 100644 --- a/src/data/backend/impl/config-map-storage-backend.ts +++ b/src/data/backend/impl/config-map-storage-backend.ts @@ -2,34 +2,36 @@ import {type StorageBackend} from '../api/storage-backend.js'; import {StorageOperation} from '../api/storage-operation.js'; -import {InjectTokens} from '../../../core/dependency-injection/inject-tokens.js'; -import {type K8Factory} from '../../../integration/kube/k8-factory.js'; -import {container} from 'tsyringe-neo'; import {MissingArgumentError} from '../../../core/errors/missing-argument-error.js'; -import {type NamespaceName} from '../../../integration/kube/resources/namespace/namespace-name.js'; -import {type Context} from '../../../core/config/remote/types.js'; -import {type K8} from '../../../integration/kube/k8.js'; import {type ConfigMap} from '../../../integration/kube/resources/config-map/config-map.js'; import {StorageBackendError} from '../api/storage-backend-error.js'; +/** + * ConfigMapStorageBackend is a storage backend that uses a {@link ConfigMap} to store data. + * The key will be the name of the property within the data object within the ConfigMap. + */ export class ConfigMapStorageBackend implements StorageBackend { - private readonly k8: K8; - - // TODO only pass in ConfigMap, no K8 references. K8 will be handled from the business layer - // the key is the key within the data object within the configMap - public constructor( - private readonly namespaceName: NamespaceName, - private readonly kubeContext: Context, - private readonly labels: Record = {}, - ) { - if (!this.kubeContext) { - throw new MissingArgumentError('ConfigMapStorageBackend is missing the kubeContext argument'); + public constructor(private readonly configMap: ConfigMap) { + if (!this.configMap) { + throw new MissingArgumentError('ConfigMapStorageBackend is missing the configMap argument'); } - - this.k8 = container.resolve(InjectTokens.K8Factory).getK8(this.kubeContext); } - public async delete(key: string): Promise {} + public async delete(key: string): Promise { + try { + const data: Record = this.configMap.data; + + if (data && Object.keys(data).length > 0 && data.hasOwnProperty(key)) { + delete data[key]; + } else { + throw new StorageBackendError(`key: ${key} not found in config map`); + } + } catch (error) { + throw error instanceof StorageBackendError + ? error + : new StorageBackendError(`error deleting config map data key: ${key}`, error); + } + } public isSupported(op: StorageOperation): boolean { switch (op) { @@ -46,40 +48,35 @@ export class ConfigMapStorageBackend implements StorageBackend { } public async list(): Promise { - return []; + const data: Record = this.configMap.data; + + return data ? Object.keys(data) : []; } public async readBytes(key: string): Promise { try { - const configMap: ConfigMap = await this.k8.configMaps().read(this.namespaceName, key); + const data: Record = this.configMap.data; - if (configMap) { - const data: Record = configMap.data; - - if (data && Object.keys(data).length > 0) { - const value: string = Object.values(data)[0]; - return Buffer.from(value, 'utf8'); - } else { - throw new StorageBackendError( - `config map is empty: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, - ); - } + if (data && Object.keys(data).length > 0) { + const value: string = Object.values(data)[0]; + return Buffer.from(value, 'utf8'); } else { - throw new StorageBackendError( - `failed to read config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, - ); + throw new StorageBackendError(`config map is empty: ${key}`); } } catch (error) { throw error instanceof StorageBackendError ? error - : new StorageBackendError( - `error reading config map: ${key}, from namespace: ${this.namespaceName}, context: ${this.kubeContext}`, - error, - ); + : new StorageBackendError(`error reading config map: ${key}`, error); } } public async writeBytes(key: string, data: Buffer): Promise { - return; + try { + this.configMap.data[key] = data.toString('utf8'); + } catch (error) { + throw error instanceof StorageBackendError + ? error + : new StorageBackendError(`error writing config map: ${key}`, error); + } } } diff --git a/src/data/backend/impl/yaml-config-map-storage-backend.ts b/src/data/backend/impl/yaml-config-map-storage-backend.ts new file mode 100644 index 000000000..b019dd657 --- /dev/null +++ b/src/data/backend/impl/yaml-config-map-storage-backend.ts @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: Apache-2.0 + +import yaml from 'yaml'; +import {ConfigMapStorageBackend} from './config-map-storage-backend.js'; +import {type ObjectStorageBackend} from '../api/object-storage-backend.js'; +import {StorageBackendError} from '../api/storage-backend-error.js'; +import {IllegalArgumentError} from '../../../core/errors/illegal-argument-error.js'; + +export class YamlConfigMapStorageBackend extends ConfigMapStorageBackend implements ObjectStorageBackend { + public async readObject(key: string): Promise { + const data: Buffer = await this.readBytes(key); + if (!data) { + throw new StorageBackendError(`failed to read key: ${key} from config map`); + } + + if (data.length === 0) { + throw new StorageBackendError(`data is empty for key: ${key}`); + } + + try { + return yaml.parse(data.toString('utf8')); + } catch (error) { + throw new StorageBackendError(`error parsing yaml from key: ${key}`, error); + } + } + + public async writeObject(key: string, data: object): Promise { + if (!data) { + throw new IllegalArgumentError('data must not be null or undefined'); + } + + try { + const yamlData: string = yaml.stringify(data, {sortMapEntries: true}); + await this.writeBytes(key, Buffer.from(yamlData, 'utf8')); + } catch (error) { + throw new StorageBackendError(`error writing yaml for key: ${key} to config map`, error); + } + } +} From 22b12c279c62b41e8b4aa35b8112a329940c13ea Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:32:03 +0100 Subject: [PATCH 16/27] docs: add JSDoc comment for YamlConfigMapStorageBackend class Signed-off-by: Jeromy Cannon --- src/data/backend/impl/yaml-config-map-storage-backend.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/data/backend/impl/yaml-config-map-storage-backend.ts b/src/data/backend/impl/yaml-config-map-storage-backend.ts index b019dd657..80946dabe 100644 --- a/src/data/backend/impl/yaml-config-map-storage-backend.ts +++ b/src/data/backend/impl/yaml-config-map-storage-backend.ts @@ -6,6 +6,10 @@ import {type ObjectStorageBackend} from '../api/object-storage-backend.js'; import {StorageBackendError} from '../api/storage-backend-error.js'; import {IllegalArgumentError} from '../../../core/errors/illegal-argument-error.js'; +/** + * YamlConfigMapStorageBackend is a storage backend that uses a {@link ConfigMap} to store data. + * The key will be the name of the property within the data object within the ConfigMap. + */ export class YamlConfigMapStorageBackend extends ConfigMapStorageBackend implements ObjectStorageBackend { public async readObject(key: string): Promise { const data: Buffer = await this.readBytes(key); From df13d38a2e756d6f1154a0bb7b4585913603e950 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 12:38:20 +0100 Subject: [PATCH 17/27] refactor: rename CTObjectMapper to ClassToObjectMapper and update references Signed-off-by: Jeromy Cannon --- src/core/dependency-injection/container-init.ts | 4 ++-- .../impl/{ct-object-mapper.ts => class-to-object-mapper.ts} | 4 ++-- test/unit/data/configuration/impl/local-config-source.test.ts | 4 ++-- test/unit/data/schema/model/local/local-config.test.ts | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) rename src/data/mapper/impl/{ct-object-mapper.ts => class-to-object-mapper.ts} (97%) diff --git a/src/core/dependency-injection/container-init.ts b/src/core/dependency-injection/container-init.ts index d9e3e52c9..41265c617 100644 --- a/src/core/dependency-injection/container-init.ts +++ b/src/core/dependency-injection/container-init.ts @@ -31,7 +31,7 @@ import {NodeCommandTasks} from '../../commands/node/tasks.js'; import {ClusterCommandConfigs} from '../../commands/cluster/configs.js'; import {NodeCommandConfigs} from '../../commands/node/configs.js'; import {ErrorHandler} from '../error-handler.js'; -import {CTObjectMapper} from '../../data/mapper/impl/ct-object-mapper.js'; +import {ClassToObjectMapper} from '../../data/mapper/impl/class-to-object-mapper.js'; import {HelmExecutionBuilder} from '../../integration/helm/execution/helm-execution-builder.js'; import {DefaultHelmClient} from '../../integration/helm/impl/default-helm-client.js'; import {HelpRenderer} from '../help-renderer.js'; @@ -92,7 +92,7 @@ export class Container { } // Data Layer ObjectMapper - container.register(InjectTokens.ObjectMapper, {useClass: CTObjectMapper}, {lifecycle: Lifecycle.Singleton}); + container.register(InjectTokens.ObjectMapper, {useClass: ClassToObjectMapper}, {lifecycle: Lifecycle.Singleton}); container.register(InjectTokens.KeyFormatter, {useValue: ConfigKeyFormatter.instance()}); // Data Layer Config diff --git a/src/data/mapper/impl/ct-object-mapper.ts b/src/data/mapper/impl/class-to-object-mapper.ts similarity index 97% rename from src/data/mapper/impl/ct-object-mapper.ts rename to src/data/mapper/impl/class-to-object-mapper.ts index 627819f5f..822771a21 100644 --- a/src/data/mapper/impl/ct-object-mapper.ts +++ b/src/data/mapper/impl/class-to-object-mapper.ts @@ -14,11 +14,11 @@ import {type Primitive} from '../../../business/utils/primitive.js'; import {type PrimitiveArray} from '../../../business/utils/primitive-array.js'; @injectable() -export class CTObjectMapper implements ObjectMapper { +export class ClassToObjectMapper implements ObjectMapper { private readonly flatMapper: FlatKeyMapper; public constructor(@inject(InjectTokens.KeyFormatter) private readonly formatter: KeyFormatter) { - this.flatMapper = new FlatKeyMapper(patchInject(formatter, InjectTokens.KeyFormatter, CTObjectMapper.name)); + this.flatMapper = new FlatKeyMapper(patchInject(formatter, InjectTokens.KeyFormatter, ClassToObjectMapper.name)); } public fromArray(cls: ClassConstructor, array: object[]): R[] { diff --git a/test/unit/data/configuration/impl/local-config-source.test.ts b/test/unit/data/configuration/impl/local-config-source.test.ts index 9984386ab..6e4c0f3a3 100644 --- a/test/unit/data/configuration/impl/local-config-source.test.ts +++ b/test/unit/data/configuration/impl/local-config-source.test.ts @@ -4,7 +4,7 @@ import {expect} from 'chai'; import {LocalConfigSource} from '../../../../../src/data/configuration/impl/local-config-source.js'; import {LocalConfigSchema} from '../../../../../src/data/schema/migration/impl/local/local-config-schema.js'; import {type ObjectMapper} from '../../../../../src/data/mapper/api/object-mapper.js'; -import {CTObjectMapper} from '../../../../../src/data/mapper/impl/ct-object-mapper.js'; +import {ClassToObjectMapper} from '../../../../../src/data/mapper/impl/class-to-object-mapper.js'; import {ConfigKeyFormatter} from '../../../../../src/data/key/config-key-formatter.js'; import {SimpleObjectStorageBackend} from '../../../fixtures/simple-object-storage-backend.fixture.js'; @@ -16,7 +16,7 @@ describe('LocalConfigSource', () => { }); it('asBoolean with null value returns null', async () => { - const objectMapper: ObjectMapper = new CTObjectMapper(ConfigKeyFormatter.instance()); + const objectMapper: ObjectMapper = new ClassToObjectMapper(ConfigKeyFormatter.instance()); const map: Map = new Map(); map.set('local-config', { schemaVersion: 1, diff --git a/test/unit/data/schema/model/local/local-config.test.ts b/test/unit/data/schema/model/local/local-config.test.ts index f30305fc4..3a3a2e190 100644 --- a/test/unit/data/schema/model/local/local-config.test.ts +++ b/test/unit/data/schema/model/local/local-config.test.ts @@ -10,7 +10,7 @@ import os from 'node:os'; import {LocalConfig} from '../../../../../../src/data/schema/model/local/local-config.js'; import {Deployment} from '../../../../../../src/data/schema/model/local/deployment.js'; import {LocalConfigSchema} from '../../../../../../src/data/schema/migration/impl/local/local-config-schema.js'; -import {CTObjectMapper} from '../../../../../../src/data/mapper/impl/ct-object-mapper.js'; +import {ClassToObjectMapper} from '../../../../../../src/data/mapper/impl/class-to-object-mapper.js'; import {ApplicationVersions} from '../../../../../../src/data/schema/model/common/application-versions.js'; import { getSoloVersion, @@ -23,7 +23,7 @@ import { import {ConfigKeyFormatter} from '../../../../../../src/data/key/config-key-formatter.js'; describe('LocalConfig', () => { - const schema: LocalConfigSchema = new LocalConfigSchema(new CTObjectMapper(ConfigKeyFormatter.instance())); + const schema: LocalConfigSchema = new LocalConfigSchema(new ClassToObjectMapper(ConfigKeyFormatter.instance())); const localConfigPath = `test/data/v${getSoloVersion()}-local-config.yaml`; describe('Class Transformer', () => { From 40bb8ce9fe292e58f7628714b1f3eee57c7d5c61 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 13:13:42 +0100 Subject: [PATCH 18/27] refactor: update name getter to return constructor name dynamically Signed-off-by: Jeromy Cannon --- src/data/configuration/impl/local-config-source.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/data/configuration/impl/local-config-source.ts b/src/data/configuration/impl/local-config-source.ts index 845f9c096..7b1839db2 100644 --- a/src/data/configuration/impl/local-config-source.ts +++ b/src/data/configuration/impl/local-config-source.ts @@ -13,7 +13,7 @@ export class LocalConfigSource extends MutableModelConfigSource imp } public get name(): string { - return 'LocalConfigSource'; + return this.constructor.name; } public get ordinal(): number { From 50d38e500971f3ccd1ce1713bd17f6e435ac1a7e Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 13:14:06 +0100 Subject: [PATCH 19/27] feat: add RemoteConfigSource, RemoteConfigSchema, and migration for remote configuration management Signed-off-by: Jeromy Cannon --- .../impl/remote-config-source.ts | 26 +++++++++++++ .../impl/remote/remote-config-schema.ts | 35 ++++++++++++++++++ .../impl/remote/remote-config-v1-migration.ts | 37 +++++++++++++++++++ src/data/schema/model/remote/remote-config.ts | 3 ++ 4 files changed, 101 insertions(+) create mode 100644 src/data/configuration/impl/remote-config-source.ts create mode 100644 src/data/schema/migration/impl/remote/remote-config-schema.ts create mode 100644 src/data/schema/migration/impl/remote/remote-config-v1-migration.ts diff --git a/src/data/configuration/impl/remote-config-source.ts b/src/data/configuration/impl/remote-config-source.ts new file mode 100644 index 000000000..519ba0c5f --- /dev/null +++ b/src/data/configuration/impl/remote-config-source.ts @@ -0,0 +1,26 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {MutableModelConfigSource} from './mutable-model-config-source.js'; +import {type RemoteConfig} from '../../schema/model/remote/remote-config.js'; +import {type Refreshable} from '../spi/refreshable.js'; +import {type ObjectMapper} from '../../mapper/api/object-mapper.js'; +import {type ObjectStorageBackend} from '../../backend/api/object-storage-backend.js'; +import {type RemoteConfigSchema} from '../../schema/migration/impl/remote/remote-config-schema.js'; + +export class RemoteConfigSource extends MutableModelConfigSource implements Refreshable { + public constructor(schema: RemoteConfigSchema, mapper: ObjectMapper, backend: ObjectStorageBackend) { + super('remote-config-data', schema, backend, mapper); + } + + public get name(): string { + return this.constructor.name; + } + + public get ordinal(): number { + return 300; + } + + public async refresh(): Promise { + await this.load(); + } +} diff --git a/src/data/schema/migration/impl/remote/remote-config-schema.ts b/src/data/schema/migration/impl/remote/remote-config-schema.ts new file mode 100644 index 000000000..5549b2f9c --- /dev/null +++ b/src/data/schema/migration/impl/remote/remote-config-schema.ts @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {SchemaBase} from '../../api/schema-base.js'; +import {type Schema} from '../../api/schema.js'; +import {RemoteConfig} from '../../../model/remote/remote-config.js'; +import {type ClassConstructor} from '../../../../../business/utils/class-constructor.type.js'; +import {type SchemaMigration} from '../../api/schema-migration.js'; +import {type Version} from '../../../../../business/utils/version.js'; +import {InjectTokens} from '../../../../../core/dependency-injection/inject-tokens.js'; +import {type ObjectMapper} from '../../../../mapper/api/object-mapper.js'; +import {RemoteConfigV1Migration} from './remote-config-v1-migration.js'; +import {inject, injectable} from 'tsyringe-neo'; + +@injectable() +export class RemoteConfigSchema extends SchemaBase implements Schema { + public constructor(@inject(InjectTokens.ObjectMapper) mapper: ObjectMapper) { + super(mapper); + } + + public get name(): string { + return RemoteConfig.name; + } + + public get version(): Version { + return RemoteConfig.SCHEMA_VERSION; + } + + public get classCtor(): ClassConstructor { + return RemoteConfig; + } + + public get migrations(): SchemaMigration[] { + return [new RemoteConfigV1Migration()]; + } +} diff --git a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts new file mode 100644 index 000000000..608e750b6 --- /dev/null +++ b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {type SchemaMigration} from '../../api/schema-migration.js'; +import {VersionRange} from '../../../../../business/utils/version-range.js'; +import {Version} from '../../../../../business/utils/version.js'; + +import {IllegalArgumentError} from '../../../../../business/errors/illegal-argument-error.js'; +import {InvalidSchemaVersionError} from '../../api/invalid-schema-version-error.js'; + +export class RemoteConfigV1Migration implements SchemaMigration { + public get range(): VersionRange { + return VersionRange.fromIntegerVersion(0); + } + + public get version(): Version { + return new Version(1); + } + + public migrate(source: object): Promise { + if (!source) { + // We should never pass null or undefined to this method, if this happens we should throw an error + throw new IllegalArgumentError('source must not be null or undefined'); + } + + const clone: any = structuredClone(source); + + // TODO implement the migration + + if (clone.schemaVersion && clone.schemaVersion !== 0) { + // this case should never happen considering the field was not present in version 0 and should default to zero + // during this migration + throw new InvalidSchemaVersionError(clone.schemaVersion, 0); + } + + return clone; + } +} diff --git a/src/data/schema/model/remote/remote-config.ts b/src/data/schema/model/remote/remote-config.ts index 7a0fc3f67..27f8a1e62 100644 --- a/src/data/schema/model/remote/remote-config.ts +++ b/src/data/schema/model/remote/remote-config.ts @@ -6,9 +6,12 @@ import {ApplicationVersions} from '../common/application-versions.js'; import {Cluster} from '../common/cluster.js'; import {DeploymentState} from './deployment-state.js'; import {DeploymentHistory} from './deployment-history.js'; +import {Version} from '../../../../business/utils/version.js'; @Exclude() export class RemoteConfig { + public static readonly SCHEMA_VERSION: Version = new Version(1); + @Expose() public schemaVersion: number; From 1c4e54b13e97eaf0faf1bb26aeac14cb3a0ea02f Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 14:29:55 +0100 Subject: [PATCH 20/27] feat: implement migration logic for RemoteConfigV1, updating metadata and structure Signed-off-by: Jeromy Cannon --- .../impl/remote/remote-config-v1-migration.ts | 111 +++++++++++++++++- 1 file changed, 109 insertions(+), 2 deletions(-) diff --git a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts index 608e750b6..5d469ee19 100644 --- a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts +++ b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts @@ -6,6 +6,7 @@ import {Version} from '../../../../../business/utils/version.js'; import {IllegalArgumentError} from '../../../../../business/errors/illegal-argument-error.js'; import {InvalidSchemaVersionError} from '../../api/invalid-schema-version-error.js'; +import {getSoloVersion} from '../../../../../../version.js'; export class RemoteConfigV1Migration implements SchemaMigration { public get range(): VersionRange { @@ -22,16 +23,122 @@ export class RemoteConfigV1Migration implements SchemaMigration { throw new IllegalArgumentError('source must not be null or undefined'); } + // eslint-disable-next-line @typescript-eslint/no-explicit-any const clone: any = structuredClone(source); - // TODO implement the migration - if (clone.schemaVersion && clone.schemaVersion !== 0) { // this case should never happen considering the field was not present in version 0 and should default to zero // during this migration throw new InvalidSchemaVersionError(clone.schemaVersion, 0); } + // set the lastUpdated to now and system:migrate + clone.metadata = { + lastUpdatedAt: new Date(), + lastUpdatedBy: { + name: 'system', + hostname: 'migration', + }, + }; + + // pull the versions from the old config, if it isn't set, then it will be set to 0.0.0 until an upgrade for the component is performed + clone.versions = { + cli: clone.metadata.soloVersion ?? getSoloVersion(), + chart: clone.metadata.soloChartVersion ?? '0.0.0', + consensusNode: clone.metadata.hederaPlatformVersion ?? '0.0.0', + mirrorNodeChart: clone.metadata.hederaMirrorNodeChartVersion ?? '0.0.0', + explorerChart: clone.metadata.hederaExplorerChartVersion ?? '0.0.0', + jsonRpcRelayChart: clone.metadata.hederaJsonRpcRelayChartVersion ?? '0.0.0', + blockNodeChart: '', + }; + + // delete the old version structure + delete clone.metadata.soloVersion; + delete clone.metadata.soloChartVersion; + delete clone.metadata.hederaPlatformVersion; + delete clone.metadata.hederaMirrorNodeChartVersion; + delete clone.metadata.hederaExplorerChartVersion; + delete clone.metadata.hederaJsonRpcRelayChartVersion; + + // migrate the clusters + const clusters: object[] = []; + for (const cluster in Object.keys(clone.clusters)) { + const clusterObject: { + name: string; + namespace: string; + deployment: string; + dnsBaseDomain: string; + dnsConsensusNodePattern: string; + } = clone.clusters[cluster]; + + clusters.push({ + name: clusterObject.name, + namespace: clusterObject.namespace, + deployment: clusterObject.deployment, + dnsBaseDomain: clusterObject.dnsBaseDomain, + dnsConsensusNodePattern: clusterObject.dnsConsensusNodePattern, + }); + } + + // overlay the old cluster references with the new cluster references structure + clone.clusters = clusters; + + // now stored at the cluster level only + delete clone.metadata.namespace; + delete clone.metadata.deploymentName; + + // migrate the components + clone.state = { + ledgerPhase: 'initialized', + consensusNodes: [], + blockNodes: [], + mirrorNodes: [], + relayNodes: [], + haProxies: [], + envoyProxies: [], + explorers: [], + }; + + // migrate the consensus nodes + for (const consensusNode of Object.keys(clone.components.consensusNodes)) { + const component: { + name: string; + nodeId: number; + namespace: string; + cluster: string; + } = clone.components.consensusNodes[consensusNode]; + + clone.state.consensusNodes.push({ + id: component.nodeId, + name: component.name, + namespace: component.namespace, + cluster: component.cluster, + phase: 'started', + }); + } + + // delete the old components structure + delete clone.components; + + // migrate the history + clone.history = {}; + clone.history.commands = []; + for (const historyItem of Object.keys(clone.commandHistory)) { + clone.history.commands.push(historyItem); + } + + // delete the old command history + delete clone.commandHistory; + + // migrate the last executed command + clone.history.lastExecutedCommand = clone.lastExecutedCommand; + + // delete the old last executed command + delete clone.lastExecutedCommand; + + // Set the schema version to the new version + clone.schemaVersion = this.version.value; + return clone; } } From c48989ebba1905b47e875274105d762a886758e1 Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Thu, 17 Apr 2025 16:09:36 +0100 Subject: [PATCH 21/27] feat: add TODO for completing remote config components after deployment Signed-off-by: Jeromy Cannon --- .../schema/migration/impl/remote/remote-config-v1-migration.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts index 5d469ee19..e484fd15f 100644 --- a/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts +++ b/src/data/schema/migration/impl/remote/remote-config-v1-migration.ts @@ -80,6 +80,8 @@ export class RemoteConfigV1Migration implements SchemaMigration { }); } + // TODO finish out the other components once we get the updated remote config from a deployed cluster + // overlay the old cluster references with the new cluster references structure clone.clusters = clusters; From db6e1229ebbb30f1d7d036e32f782ead8e66fbae Mon Sep 17 00:00:00 2001 From: Jeromy Cannon Date: Mon, 21 Apr 2025 20:32:38 +0100 Subject: [PATCH 22/27] feat: update remote configuration with new component structure and versioning Signed-off-by: Jeromy Cannon --- test/data/v0-35-1-remote-config.yaml | 201 ++++++++++++++++++++------- 1 file changed, 149 insertions(+), 52 deletions(-) diff --git a/test/data/v0-35-1-remote-config.yaml b/test/data/v0-35-1-remote-config.yaml index 50cad9d45..0bbcd17d7 100644 --- a/test/data/v0-35-1-remote-config.yaml +++ b/test/data/v0-35-1-remote-config.yaml @@ -3,57 +3,154 @@ metadata: deploymentName: alpha-prod lastUpdatedAt: 2025-02-17T02:25:26.149000Z lastUpdateBy: nathan@swirldslabs.com - soloChartVersion: "" - hederaPlatformVersion: "" - hederaMirrorNodeChartVersion: "" - hederaExplorerChartVersion: "" - hederaJsonRpcRelayChartVersion: "" + soloChartVersion: 0.44.0 + hederaPlatformVersion: v0.58.10 + hederaMirrorNodeChartVersion: v0.122 + hederaExplorerChartVersion: 24.12.0 + hederaJsonRpcRelayChartVersion: v0.63.2 soloVersion: 0.34.0 -version: 1.0.0 -clusters: - gke-alpha-prod-us-central1: - name: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - deployment: alpha-prod - dnsBaseDomain: cluster.local - dnsConsensusNodePattern: network-${nodeAlias}-svc.${namespace}.svc -components: - relays: {} - haProxies: {} - mirrorNodes: {} - envoyProxies: {} - consensusNodes: - node1: - name: node1 - cluster: gke-alpha-prod-us-central1 + version: 1.0.0 + clusters: + gke-alpha-prod-us-central1: + name: gke-alpha-prod-us-central1 namespace: solo-alpha-prod - state: requested - nodeId: 0 - node2: - name: node2 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - state: requested - nodeId: 1 - node3: - name: node3 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - state: requested - nodeId: 2 - node4: - name: node4 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - state: requested - nodeId: 3 - mirrorNodeExplorers: {} -commandHistory: - - deployment create -lastExecutedCommand: deployment create -flags: - nodeAliasesUnparsed: node1,node2,node3,node4 - releaseTag: v0.58.10 - relayReleaseTag: v0.63.2 - hederaExplorerVersion: 24.12.0 - mirrorNodeVersion: v0.122 + deployment: alpha-prod + dnsBaseDomain: cluster.local + dnsConsensusNodePattern: network-${nodeAlias}-svc.${namespace}.svc + components: + relays: + relay: + consensusNodeAliases: + - node1 + name: relay + cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 + namespace: solo-alpha-prod + haProxies: + haproxy-node1: + name: haproxy-node1 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + haproxy-node2: + name: haproxy-node2 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + haproxy-node3: + name: haproxy-node3 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + haproxy-node4: + name: haproxy-node4 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + mirrorNodes: + mirrorNode: + name: mirrorNode + cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 + namespace: solo-alpha-prod + envoyProxies: + envoy-proxy-node1: + name: envoy-proxy-node1 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + envoy-proxy-node2: + name: envoy-proxy-node2 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + envoy-proxy-node3: + name: envoy-proxy-node3 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + envoy-proxy-node4: + name: envoy-proxy-node4 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + consensusNodes: + node1: + name: node1 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 0 + node2: + name: node2 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 1 + node3: + name: node3 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 2 + node4: + name: node4 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 3 + mirrorNodeExplorers: + mirrorNodeExplorer: + name: mirrorNodeExplorer + cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 + namespace: solo-alpha-prod + commandHistory: + - deployment create + - "Executed by nathan@swirldslabs.com: network deploy --deployment alpha-prod + --values-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/init-c\ + ontainers-values.yaml --settings-txt + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/settin\ + gs.txt --log4j2-xml + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/log4j2\ + .xml --application-properties + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/applic\ + ation.properties --genesis-throttles-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/thrott\ + les.json --pvcs --storage-type gcs_only --gcs-endpoint *** --gcs-bucket *** + --gcs-access-key *** --gcs-secrets *** --backup-bucket *** + --google-credential *** --load-balancer --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: node setup --deployment alpha-prod + --node-aliases node1,node2,node3,node4 --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: node start --deployment alpha-prod + --node-aliases node1,node2,node3,node4 --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: account init --dev --deployment + alpha-prod" + - "Executed by nathan@swirldslabs.com: mirror-node deploy --deployment + alpha-prod --operator-id 0.0.2 --operator-key *** --storage-type gcs_only + --storage-endpoint *** --storage-bucket *** --storage-access-key *** + --storage-secrets *** --values-file + /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml + --pinger --use-external-database --external-database-host 10.0.6.2 + --external-database-owner-username mirror_provisioner + --external-database-owner-password *** --external-database-read-username + mirror_node --external-database-read-password *** --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: explorer deploy --namespace + solo-alpha-prod --deployment alpha-prod --values-file + /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml + --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: explorer deploy --namespace + solo-alpha-prod --deployment alpha-prod --values-file + /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml + --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: relay deploy --deployment alpha-prod + --node-aliases node1 --values-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/relay-\ + values.yaml --operator-id 0.0.2 --operator-key *** --quiet-mode --dev" + lastExecutedCommand: "Executed by nathan@swirldslabs.com: relay deploy + --deployment alpha-prod --node-aliases node1 --values-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/pro\ + duction/google/hashsphere-production/sphere-alpha-prod/assets/solo/relay-valu\ + es.yaml --operator-id 0.0.2 --operator-key *** --quiet-mode --dev" + flags: + nodeAliasesUnparsed: node1,node2,node3,node4 + releaseTag: v0.58.10 + relayReleaseTag: v0.63.2 + hederaExplorerVersion: 24.12.0 + mirrorNodeVersion: v0.122 From 907c09a00dbf2a1e4f226312ef31e51571512a7b Mon Sep 17 00:00:00 2001 From: Jeffrey Tang Date: Mon, 21 Apr 2025 23:22:03 -0500 Subject: [PATCH 23/27] fix bug in function readBytes Signed-off-by: Jeffrey Tang --- .../impl/config-map-storage-backend.ts | 2 +- .../impl/config-map-storage-backend.test.ts | 92 +++++++++++++++++++ .../yaml-config-map-storage-backend.test.ts | 4 +- 3 files changed, 95 insertions(+), 3 deletions(-) create mode 100644 test/unit/data/backend/impl/config-map-storage-backend.test.ts diff --git a/src/data/backend/impl/config-map-storage-backend.ts b/src/data/backend/impl/config-map-storage-backend.ts index 7d2f176f2..45a51d71e 100644 --- a/src/data/backend/impl/config-map-storage-backend.ts +++ b/src/data/backend/impl/config-map-storage-backend.ts @@ -58,7 +58,7 @@ export class ConfigMapStorageBackend implements StorageBackend { const data: Record = this.configMap.data; if (data && Object.keys(data).length > 0) { - const value: string = Object.values(data)[0]; + const value: string = data[key]; return Buffer.from(value, 'utf8'); } else { throw new StorageBackendError(`config map is empty: ${key}`); diff --git a/test/unit/data/backend/impl/config-map-storage-backend.test.ts b/test/unit/data/backend/impl/config-map-storage-backend.test.ts new file mode 100644 index 000000000..0770a2f09 --- /dev/null +++ b/test/unit/data/backend/impl/config-map-storage-backend.test.ts @@ -0,0 +1,92 @@ +// SPDX-License-Identifier: Apache-2.0 + +import {expect} from 'chai'; +import {ConfigMapStorageBackend} from '../../../../../src/data/backend/impl/config-map-storage-backend.js'; +import {StorageOperation} from '../../../../../src/data/backend/api/storage-operation.js'; +import {StorageBackendError} from '../../../../../src/data/backend/api/storage-backend-error.js'; +import {type ConfigMap} from '../../../../../src/integration/kube/resources/config-map/config-map.js'; +import {K8ClientConfigMap} from '../../../../../src/integration/kube/k8-client/resources/config-map/k8-client-config-map.js'; +import {NamespaceName} from '../../../../../src/integration/kube/resources/namespace/namespace-name.js'; + +describe('ConfigMapStorageBackend', (): void => { + let configMap: ConfigMap; + let backend: ConfigMapStorageBackend; + + beforeEach((): void => { + configMap = new K8ClientConfigMap( + NamespaceName.of('test-ns'), + 'name', + {label1: 'why', label2: 'not'}, + {foo: 'bar', baz: 'qux'}, + ); + backend = new ConfigMapStorageBackend(configMap); + }); + + it('should throw if configMap is missing', (): void => { + expect((): ConfigMapStorageBackend => new ConfigMapStorageBackend(undefined)).to.throw( + 'ConfigMapStorageBackend is missing the configMap argument', + ); + }); + + describe('delete', (): void => { + it('should delete a key', async (): Promise => { + await backend.delete('foo'); + expect(configMap.data).to.not.have.property('foo'); + }); + it('should throw if key not found', async (): Promise => { + await expect(backend.delete('notfound')).to.be.rejectedWith('key: notfound not found in config map'); + }); + }); + + describe('isSupported', () => { + it('should return true for supported operations', (): void => { + expect(backend.isSupported(StorageOperation.List)).to.be.true; + expect(backend.isSupported(StorageOperation.ReadBytes)).to.be.true; + expect(backend.isSupported(StorageOperation.WriteBytes)).to.be.true; + expect(backend.isSupported(StorageOperation.Delete)).to.be.true; + }); + it('should return false for unsupported operations', (): void => { + expect(backend.isSupported(StorageOperation.ReadObject)).to.be.false; + }); + }); + + describe('list', (): void => { + it('should return all keys in the configMap data', async (): Promise => { + const keys: string[] = await backend.list(); + expect(keys).to.have.members(['foo', 'baz']); + }); + it('should return an empty array if data is missing', async (): Promise => { + backend = new ConfigMapStorageBackend({data: undefined, name: '', namespace: undefined}); + const keys: string[] = await backend.list(); + expect(keys).to.deep.equal([]); + }); + }); + + describe('readBytes', (): void => { + it('should return Buffer for existing key', async (): Promise => { + const buf: Buffer = await backend.readBytes('foo'); + expect(buf.toString('utf8')).to.equal('bar'); + }); + it('should throw if key not found', async (): Promise => { + await expect(backend.readBytes('notfound')).to.be.rejectedWith(StorageBackendError); + }); + }); + + describe('writeBytes', (): void => { + it('should write buffer data to the configMap', async (): Promise => { + const buf: Buffer = Buffer.from('new-value', 'utf8'); + await backend.writeBytes('foo', buf); + expect(configMap.data.foo).to.equal('new-value'); + }); + it('should add new key if not present', async (): Promise => { + const buf: Buffer = Buffer.from('another', 'utf8'); + await backend.writeBytes('new-key', buf); + expect(configMap.data['new-key']).to.equal('another'); + }); + it('should throw if data is missing', async (): Promise => { + backend = new ConfigMapStorageBackend({data: undefined, name: '', namespace: undefined}); + const buf: Buffer = Buffer.from('something', 'utf8'); + await expect(backend.writeBytes('foo', buf)).to.be.rejectedWith(StorageBackendError); + }); + }); +}); diff --git a/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts b/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts index 36271cbf4..995e63719 100644 --- a/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts +++ b/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts @@ -17,8 +17,8 @@ describe('YamlConfigMapStorageBackend', () => { describe('readObject', () => { it('should parse YAML from readBytes', async () => { - const yamlStr: string = 'foo: bar\nnum: 42\n'; - const stub = sinon.stub(backend, 'readBytes').resolves(Buffer.from(yamlStr, 'utf8')); + const yamlString: string = 'foo: bar\nnum: 42\n'; + const stub = sinon.stub(backend, 'readBytes').resolves(Buffer.from(yamlString, 'utf8')); const result = await backend.readObject('some-key'); expect(result).to.deep.equal({foo: 'bar', num: 42}); stub.restore(); From 17615835e23c1b8968f3d1bc1affc8efaf6ccb7d Mon Sep 17 00:00:00 2001 From: Jeffrey Tang Date: Wed, 23 Apr 2025 10:35:01 -0500 Subject: [PATCH 24/27] save Signed-off-by: Jeffrey Tang --- .../impl/config-map-storage-backend.test.ts | 42 ++++++++++- .../yaml-config-map-storage-backend.test.ts | 38 +++++----- .../impl/remote-config-source.test.ts | 72 +++++++++++++------ 3 files changed, 112 insertions(+), 40 deletions(-) diff --git a/test/unit/data/backend/impl/config-map-storage-backend.test.ts b/test/unit/data/backend/impl/config-map-storage-backend.test.ts index 0770a2f09..75e9af7d1 100644 --- a/test/unit/data/backend/impl/config-map-storage-backend.test.ts +++ b/test/unit/data/backend/impl/config-map-storage-backend.test.ts @@ -36,9 +36,20 @@ describe('ConfigMapStorageBackend', (): void => { it('should throw if key not found', async (): Promise => { await expect(backend.delete('notfound')).to.be.rejectedWith('key: notfound not found in config map'); }); + it('should trigger unexpected errors in delete', async (): Promise => { + // Simulate configMap.data throwing an unexpected error + const badBackend: ConfigMapStorageBackend = new ConfigMapStorageBackend({ + get data(): void { + throw new Error('unexpected'); + }, + name: '', + namespace: undefined, + } as never); + await expect(badBackend.delete('foo')).to.be.rejectedWith('error deleting config map data key: foo'); + }); }); - describe('isSupported', () => { + describe('isSupported', (): void => { it('should return true for supported operations', (): void => { expect(backend.isSupported(StorageOperation.List)).to.be.true; expect(backend.isSupported(StorageOperation.ReadBytes)).to.be.true; @@ -70,6 +81,24 @@ describe('ConfigMapStorageBackend', (): void => { it('should throw if key not found', async (): Promise => { await expect(backend.readBytes('notfound')).to.be.rejectedWith(StorageBackendError); }); + it('should throw if configMap.data is empty or undefined in readBytes', async (): Promise => { + const emptyBackend: ConfigMapStorageBackend = new ConfigMapStorageBackend({ + data: undefined, + name: '', + namespace: undefined, + }); + await expect(emptyBackend.readBytes('foo')).to.be.rejectedWith('config map is empty: foo'); + }); + it('should trigger unexpected errors in readBytes', async (): Promise => { + const badBackend: ConfigMapStorageBackend = new ConfigMapStorageBackend({ + get data(): void { + throw new Error('unexpected'); + }, + name: '', + namespace: undefined, + } as never); + await expect(badBackend.readBytes('foo')).to.be.rejectedWith('error reading config map: foo'); + }); }); describe('writeBytes', (): void => { @@ -88,5 +117,16 @@ describe('ConfigMapStorageBackend', (): void => { const buf: Buffer = Buffer.from('something', 'utf8'); await expect(backend.writeBytes('foo', buf)).to.be.rejectedWith(StorageBackendError); }); + it('should trigger unexpected errors in writeBytes', async (): Promise => { + const badBackend: ConfigMapStorageBackend = new ConfigMapStorageBackend({ + get data(): void { + throw new Error('unexpected'); + }, + name: '', + namespace: undefined, + } as never); + const buf: Buffer = Buffer.from('fail', 'utf8'); + await expect(badBackend.writeBytes('foo', buf)).to.be.rejectedWith('error writing config map: foo'); + }); }); }); diff --git a/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts b/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts index 995e63719..cb30aa5a2 100644 --- a/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts +++ b/test/unit/data/backend/impl/yaml-config-map-storage-backend.test.ts @@ -6,61 +6,61 @@ import sinon from 'sinon'; import {K8ClientConfigMap} from '../../../../../src/integration/kube/k8-client/resources/config-map/k8-client-config-map.js'; import {NamespaceName} from '../../../../../src/integration/kube/resources/namespace/namespace-name.js'; -describe('YamlConfigMapStorageBackend', () => { +describe('YamlConfigMapStorageBackend', (): void => { let backend: YamlConfigMapStorageBackend; - beforeEach(() => { - const namespace = NamespaceName.of('test-ns'); + beforeEach((): void => { + const namespace: NamespaceName = NamespaceName.of('test-ns'); const configMap: K8ClientConfigMap = new K8ClientConfigMap(namespace, 'test-cm', {}, {}); backend = new YamlConfigMapStorageBackend(configMap); }); - describe('readObject', () => { - it('should parse YAML from readBytes', async () => { + describe('readObject', (): void => { + it('should parse YAML from readBytes', async (): Promise => { const yamlString: string = 'foo: bar\nnum: 42\n'; - const stub = sinon.stub(backend, 'readBytes').resolves(Buffer.from(yamlString, 'utf8')); - const result = await backend.readObject('some-key'); + const stub: sinon.SinonStub = sinon.stub(backend, 'readBytes').resolves(Buffer.from(yamlString, 'utf8')); + const result: object = await backend.readObject('some-key'); expect(result).to.deep.equal({foo: 'bar', num: 42}); stub.restore(); }); - it('should throw if readBytes returns empty buffer', async () => { + it('should throw if readBytes returns empty buffer', async (): Promise => { sinon.stub(backend, 'readBytes').resolves(Buffer.from('', 'utf8')); await expect(backend.readObject('empty-key')).to.be.rejectedWith('data is empty for key: empty-key'); }); - it('should throw if readBytes returns undefined', async () => { - sinon.stub(backend, 'readBytes').resolves(undefined as any); + it('should throw if readBytes returns undefined', async (): Promise => { + sinon.stub(backend, 'readBytes').resolves(undefined as never); await expect(backend.readObject('missing-key')).to.be.rejectedWith( 'failed to read key: missing-key from config map', ); }); - it('should throw on invalid YAML', async () => { + it('should throw on invalid YAML', async (): Promise => { sinon.stub(backend, 'readBytes').resolves(Buffer.from('not: [valid, yaml', 'utf8')); await expect(backend.readObject('bad-yaml')).to.be.rejectedWith('error parsing yaml from key: bad-yaml'); }); }); - describe('writeObject', () => { - it('should write YAML string to writeBytes', async () => { - const stub = sinon.stub(backend, 'writeBytes').resolves(); - const data = {foo: 'bar', num: 42}; + describe('writeObject', (): void => { + it('should write YAML string to writeBytes', async (): Promise => { + const stub: sinon.SinonStub = sinon.stub(backend, 'writeBytes').resolves(); + const data: object = {foo: 'bar', num: 42}; await backend.writeObject('some-key', data); expect(stub.calledOnce).to.be.true; - const written = stub.firstCall.args[1].toString('utf8'); + const written: string = stub.firstCall.args[1].toString('utf8'); expect(written).to.include('foo: bar'); expect(written).to.include('num: 42'); stub.restore(); }); - it('should throw if data is null or undefined', async () => { - await expect(backend.writeObject('some-key', undefined as any)).to.be.rejectedWith( + it('should throw if data is null or undefined', async (): Promise => { + await expect(backend.writeObject('some-key', undefined as never)).to.be.rejectedWith( 'data must not be null or undefined', ); }); - it('should throw if writeBytes throws', async () => { + it('should throw if writeBytes throws', async (): Promise => { sinon.stub(backend, 'writeBytes').rejects(new Error('fail')); await expect(backend.writeObject('some-key', {foo: 'bar'})).to.be.rejectedWith( 'error writing yaml for key: some-key to config map', diff --git a/test/unit/data/configuration/impl/remote-config-source.test.ts b/test/unit/data/configuration/impl/remote-config-source.test.ts index c7983b18f..9edd8ded5 100644 --- a/test/unit/data/configuration/impl/remote-config-source.test.ts +++ b/test/unit/data/configuration/impl/remote-config-source.test.ts @@ -3,43 +3,75 @@ import {expect} from 'chai'; import sinon from 'sinon'; import {RemoteConfigSource} from '../../../../../src/data/configuration/impl/remote-config-source.js'; +import {RemoteConfigSchema} from '../../../../../src/data/schema/migration/impl/remote/remote-config-schema.js'; +import {SimpleObjectStorageBackend} from '../../../fixtures/simple-object-storage-backend.fixture.js'; +import {type ObjectMapper} from '../../../../../src/data/mapper/api/object-mapper.js'; -describe('RemoteConfigSource', () => { - let schema: any; - let mapper: any; - let backend: any; +describe('RemoteConfigSource', (): void => { + let schema: RemoteConfigSchema; + let mapper: ObjectMapper; + let backend: SimpleObjectStorageBackend; + let source: RemoteConfigSource; + const map: Map = new Map(); + map.set('local-config', { + schemaVersion: 1, + deployments: [{name: 'true', namespace: 'false', clusters: ['true', {key: 'value'}, '{"key": "value"}']}], + }); + beforeEach((): void => { + mapper = {} as ObjectMapper; + schema = new RemoteConfigSchema(mapper); + backend = new SimpleObjectStorageBackend(map); + sinon.stub(backend, 'writeObject').resolves(); + source = new RemoteConfigSource(schema, mapper, backend); + mapper.applyPropertyValue = sinon.stub(); + }); - beforeEach(() => { - schema = {}; - mapper = {}; - backend = { - list: async () => [], - readBytes: async (_key: string) => Buffer.from([]), - writeBytes: async (_key: string, _data: Buffer) => {}, - readObject: async (_key: string) => ({}), - writeObject: async (_key: string, _data: object) => {}, - }; + it('should call backend.writeObject on persist', async (): Promise => { + await source.persist(); + expect((backend.writeObject as sinon.SinonStub).calledOnce).to.be.true; }); - it('should instantiate without error', () => { - expect(() => new RemoteConfigSource(schema, mapper, backend)).not.to.throw(); + it('should instantiate without error', (): void => { + expect((): RemoteConfigSource => new RemoteConfigSource(schema, mapper, backend)).not.to.throw(); }); - it('should have name "RemoteConfigSource"', () => { + it('should have name "RemoteConfigSource"', (): void => { const source: RemoteConfigSource = new RemoteConfigSource(schema, mapper, backend); expect(source.name).to.equal('RemoteConfigSource'); }); - it('should have ordinal 300', () => { + it('should have ordinal 300', (): void => { const source: RemoteConfigSource = new RemoteConfigSource(schema, mapper, backend); expect(source.ordinal).to.equal(300); }); - it('should call load() when refresh() is called', async () => { + it('should call load() when refresh() is called', async (): Promise => { const source: RemoteConfigSource = new RemoteConfigSource(schema, mapper, backend); - const loadStub = sinon.stub(source, 'load').resolves(); + const loadStub: sinon.SinonStub = sinon.stub(source, 'load').resolves(); await source.refresh(); expect(loadStub.calledOnce).to.be.true; loadStub.restore(); }); + + describe('edge/error cases', (): void => { + it('should throw if putObject called with missing key', (): void => { + expect((): void => source.putObject(undefined as never, {foo: 1})).to.throw('key must not be null or undefined'); + }); + + it('should throw if putObjectArray called with missing key', (): void => { + expect((): void => source.putObjectArray(undefined as never, [{foo: 1}])).to.throw( + 'key must not be null or undefined', + ); + }); + + it('should throw if putScalar called with missing key', (): void => { + expect((): void => source.putScalar(undefined as never, 'val')).to.throw('key must not be null or undefined'); + }); + + it('should throw if putScalarArray called with missing key', (): void => { + expect((): void => source.putScalarArray(undefined as never, ['a'])).to.throw( + 'key must not be null or undefined', + ); + }); + }); }); From ea3afcd6e9f2f7968418c87c1690db88f9ed142d Mon Sep 17 00:00:00 2001 From: Jeffrey Tang Date: Wed, 23 Apr 2025 11:00:34 -0500 Subject: [PATCH 25/27] fix remote config test yaml Signed-off-by: Jeffrey Tang --- test/data/v0-35-1-remote-config.yaml | 286 +++++++++--------- .../impl/remote-config-source.test.ts | 32 +- .../schema/model/remote/remote-config.test.ts | 54 ++-- 3 files changed, 185 insertions(+), 187 deletions(-) diff --git a/test/data/v0-35-1-remote-config.yaml b/test/data/v0-35-1-remote-config.yaml index 0bbcd17d7..5ffabe05b 100644 --- a/test/data/v0-35-1-remote-config.yaml +++ b/test/data/v0-35-1-remote-config.yaml @@ -10,147 +10,147 @@ metadata: hederaJsonRpcRelayChartVersion: v0.63.2 soloVersion: 0.34.0 version: 1.0.0 - clusters: - gke-alpha-prod-us-central1: - name: gke-alpha-prod-us-central1 +clusters: + gke-alpha-prod-us-central1: + name: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + deployment: alpha-prod + dnsBaseDomain: cluster.local + dnsConsensusNodePattern: network-${nodeAlias}-svc.${namespace}.svc +components: + relays: + relay: + consensusNodeAliases: + - node1 + name: relay + cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 namespace: solo-alpha-prod - deployment: alpha-prod - dnsBaseDomain: cluster.local - dnsConsensusNodePattern: network-${nodeAlias}-svc.${namespace}.svc - components: - relays: - relay: - consensusNodeAliases: - - node1 - name: relay - cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 - namespace: solo-alpha-prod - haProxies: - haproxy-node1: - name: haproxy-node1 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - haproxy-node2: - name: haproxy-node2 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - haproxy-node3: - name: haproxy-node3 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - haproxy-node4: - name: haproxy-node4 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - mirrorNodes: - mirrorNode: - name: mirrorNode - cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 - namespace: solo-alpha-prod - envoyProxies: - envoy-proxy-node1: - name: envoy-proxy-node1 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - envoy-proxy-node2: - name: envoy-proxy-node2 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - envoy-proxy-node3: - name: envoy-proxy-node3 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - envoy-proxy-node4: - name: envoy-proxy-node4 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - consensusNodes: - node1: - name: node1 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - state: started - nodeId: 0 - node2: - name: node2 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - state: started - nodeId: 1 - node3: - name: node3 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - state: started - nodeId: 2 - node4: - name: node4 - cluster: gke-alpha-prod-us-central1 - namespace: solo-alpha-prod - state: started - nodeId: 3 - mirrorNodeExplorers: - mirrorNodeExplorer: - name: mirrorNodeExplorer - cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 - namespace: solo-alpha-prod - commandHistory: - - deployment create - - "Executed by nathan@swirldslabs.com: network deploy --deployment alpha-prod - --values-file - /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ - roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/init-c\ - ontainers-values.yaml --settings-txt - /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ - roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/settin\ - gs.txt --log4j2-xml - /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ - roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/log4j2\ - .xml --application-properties - /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ - roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/applic\ - ation.properties --genesis-throttles-file - /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ - roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/thrott\ - les.json --pvcs --storage-type gcs_only --gcs-endpoint *** --gcs-bucket *** - --gcs-access-key *** --gcs-secrets *** --backup-bucket *** - --google-credential *** --load-balancer --quiet-mode --dev" - - "Executed by nathan@swirldslabs.com: node setup --deployment alpha-prod - --node-aliases node1,node2,node3,node4 --quiet-mode --dev" - - "Executed by nathan@swirldslabs.com: node start --deployment alpha-prod - --node-aliases node1,node2,node3,node4 --quiet-mode --dev" - - "Executed by nathan@swirldslabs.com: account init --dev --deployment - alpha-prod" - - "Executed by nathan@swirldslabs.com: mirror-node deploy --deployment - alpha-prod --operator-id 0.0.2 --operator-key *** --storage-type gcs_only - --storage-endpoint *** --storage-bucket *** --storage-access-key *** - --storage-secrets *** --values-file - /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml - --pinger --use-external-database --external-database-host 10.0.6.2 - --external-database-owner-username mirror_provisioner - --external-database-owner-password *** --external-database-read-username - mirror_node --external-database-read-password *** --quiet-mode --dev" - - "Executed by nathan@swirldslabs.com: explorer deploy --namespace - solo-alpha-prod --deployment alpha-prod --values-file - /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml - --quiet-mode --dev" - - "Executed by nathan@swirldslabs.com: explorer deploy --namespace - solo-alpha-prod --deployment alpha-prod --values-file - /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml - --quiet-mode --dev" - - "Executed by nathan@swirldslabs.com: relay deploy --deployment alpha-prod - --node-aliases node1 --values-file - /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ - roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/relay-\ - values.yaml --operator-id 0.0.2 --operator-key *** --quiet-mode --dev" - lastExecutedCommand: "Executed by nathan@swirldslabs.com: relay deploy - --deployment alpha-prod --node-aliases node1 --values-file - /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/pro\ - duction/google/hashsphere-production/sphere-alpha-prod/assets/solo/relay-valu\ - es.yaml --operator-id 0.0.2 --operator-key *** --quiet-mode --dev" - flags: - nodeAliasesUnparsed: node1,node2,node3,node4 - releaseTag: v0.58.10 - relayReleaseTag: v0.63.2 - hederaExplorerVersion: 24.12.0 - mirrorNodeVersion: v0.122 + haProxies: + haproxy-node1: + name: haproxy-node1 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + haproxy-node2: + name: haproxy-node2 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + haproxy-node3: + name: haproxy-node3 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + haproxy-node4: + name: haproxy-node4 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + mirrorNodes: + mirrorNode: + name: mirrorNode + cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 + namespace: solo-alpha-prod + envoyProxies: + envoy-proxy-node1: + name: envoy-proxy-node1 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + envoy-proxy-node2: + name: envoy-proxy-node2 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + envoy-proxy-node3: + name: envoy-proxy-node3 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + envoy-proxy-node4: + name: envoy-proxy-node4 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + consensusNodes: + node1: + name: node1 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 0 + node2: + name: node2 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 1 + node3: + name: node3 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 2 + node4: + name: node4 + cluster: gke-alpha-prod-us-central1 + namespace: solo-alpha-prod + state: started + nodeId: 3 + mirrorNodeExplorers: + mirrorNodeExplorer: + name: mirrorNodeExplorer + cluster: gke_hashsphere-production_us-central1_alpha-prod-us-central1 + namespace: solo-alpha-prod +commandHistory: + - deployment create + - "Executed by nathan@swirldslabs.com: network deploy --deployment alpha-prod + --values-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/init-c\ + ontainers-values.yaml --settings-txt + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/settin\ + gs.txt --log4j2-xml + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/log4j2\ + .xml --application-properties + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/applic\ + ation.properties --genesis-throttles-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/thrott\ + les.json --pvcs --storage-type gcs_only --gcs-endpoint *** --gcs-bucket *** + --gcs-access-key *** --gcs-secrets *** --backup-bucket *** + --google-credential *** --load-balancer --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: node setup --deployment alpha-prod + --node-aliases node1,node2,node3,node4 --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: node start --deployment alpha-prod + --node-aliases node1,node2,node3,node4 --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: account init --dev --deployment + alpha-prod" + - "Executed by nathan@swirldslabs.com: mirror-node deploy --deployment + alpha-prod --operator-id 0.0.2 --operator-key *** --storage-type gcs_only + --storage-endpoint *** --storage-bucket *** --storage-access-key *** + --storage-secrets *** --values-file + /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml + --pinger --use-external-database --external-database-host 10.0.6.2 + --external-database-owner-username mirror_provisioner + --external-database-owner-password *** --external-database-read-username + mirror_node --external-database-read-password *** --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: explorer deploy --namespace + solo-alpha-prod --deployment alpha-prod --values-file + /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml + --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: explorer deploy --namespace + solo-alpha-prod --deployment alpha-prod --values-file + /var/folders/pr/l0t3xfzx1cscm1kmy63mq24m0000gp/T/tmp.gSbIiYI8ip.yaml + --quiet-mode --dev" + - "Executed by nathan@swirldslabs.com: relay deploy --deployment alpha-prod + --node-aliases node1 --values-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/p\ + roduction/google/hashsphere-production/sphere-alpha-prod/assets/solo/relay-\ + values.yaml --operator-id 0.0.2 --operator-key *** --quiet-mode --dev" +lastExecutedCommand: "Executed by nathan@swirldslabs.com: relay deploy + --deployment alpha-prod --node-aliases node1 --values-file + /Users/nathan.klick/Git/swirldslabs/hashsphere-infrastructure/deployments/pro\ + duction/google/hashsphere-production/sphere-alpha-prod/assets/solo/relay-valu\ + es.yaml --operator-id 0.0.2 --operator-key *** --quiet-mode --dev" +flags: + nodeAliasesUnparsed: node1,node2,node3,node4 + releaseTag: v0.58.10 + relayReleaseTag: v0.63.2 + hederaExplorerVersion: 24.12.0 + mirrorNodeVersion: v0.122 diff --git a/test/unit/data/configuration/impl/remote-config-source.test.ts b/test/unit/data/configuration/impl/remote-config-source.test.ts index 9edd8ded5..8761507d6 100644 --- a/test/unit/data/configuration/impl/remote-config-source.test.ts +++ b/test/unit/data/configuration/impl/remote-config-source.test.ts @@ -53,25 +53,23 @@ describe('RemoteConfigSource', (): void => { loadStub.restore(); }); - describe('edge/error cases', (): void => { - it('should throw if putObject called with missing key', (): void => { - expect((): void => source.putObject(undefined as never, {foo: 1})).to.throw('key must not be null or undefined'); - }); + it('should throw if putObject called with missing key', (): void => { + expect((): void => source.putObject(undefined as never, {foo: 1})).to.throw('key must not be null or undefined'); + }); - it('should throw if putObjectArray called with missing key', (): void => { - expect((): void => source.putObjectArray(undefined as never, [{foo: 1}])).to.throw( - 'key must not be null or undefined', - ); - }); + it('should throw if putObjectArray called with missing key', (): void => { + expect((): void => source.putObjectArray(undefined as never, [{foo: 1}])).to.throw( + 'key must not be null or undefined', + ); + }); - it('should throw if putScalar called with missing key', (): void => { - expect((): void => source.putScalar(undefined as never, 'val')).to.throw('key must not be null or undefined'); - }); + it('should throw if putScalar called with missing key', (): void => { + expect((): void => source.putScalar(undefined as never, 'val')).to.throw('key must not be null or undefined'); + }); - it('should throw if putScalarArray called with missing key', (): void => { - expect((): void => source.putScalarArray(undefined as never, ['a'])).to.throw( - 'key must not be null or undefined', - ); - }); + it('should throw if putScalarArray called with missing key', (): void => { + expect((): void => source.putScalarArray(undefined as never, ['a'])).to.throw( + 'key must not be null or undefined', + ); }); }); diff --git a/test/unit/data/schema/model/remote/remote-config.test.ts b/test/unit/data/schema/model/remote/remote-config.test.ts index 3e73e2dda..328a0b16a 100644 --- a/test/unit/data/schema/model/remote/remote-config.test.ts +++ b/test/unit/data/schema/model/remote/remote-config.test.ts @@ -13,14 +13,14 @@ type MigrationCandidate = any; function migrateVersionPrefix(version: string): string { const strippedVersionPrefix: string = version.replace(/^v/, ''); - const parts = strippedVersionPrefix.split('.').map(Number); // Split and convert to numbers + const parts: number[] = strippedVersionPrefix.split('.').map(Number); // Split and convert to numbers while (parts.length < 3) { parts.push(0); // Add missing minor/patch as 0 } return parts.join('.'); } -function migrateVersions(plainObject: MigrationCandidate) { +function migrateVersions(plainObject: MigrationCandidate): void { plainObject.versions = {}; plainObject.versions.cli = migrateVersionPrefix(plainObject.metadata?.soloVersion || '0.0.0'); plainObject.versions.chart = migrateVersionPrefix(plainObject.metadata?.soloChartVersion || '0.0.0'); @@ -40,7 +40,7 @@ function migrateVersions(plainObject: MigrationCandidate) { plainObject.versions.blockNodeChart = 'v0.0.0'; } -function migrateClusters(plainObject: MigrationCandidate) { +function migrateClusters(plainObject: MigrationCandidate): void { const clusters: object = plainObject.clusters; const clustersArray: object[] = []; for (const key in clusters) { @@ -51,7 +51,7 @@ function migrateClusters(plainObject: MigrationCandidate) { plainObject.clusters = clustersArray; } -function migrateHistory(plainObject: MigrationCandidate) { +function migrateHistory(plainObject: MigrationCandidate): void { plainObject.history = {}; plainObject.history.commands = []; for (const historyItem of plainObject.commandHistory) { @@ -59,7 +59,7 @@ function migrateHistory(plainObject: MigrationCandidate) { } } -function migrateConsensusNodes(plainObject: MigrationCandidate) { +function migrateConsensusNodes(plainObject: MigrationCandidate): void { plainObject.state.consensusNodes = []; for (const plainConsensusNodeKey of Object.keys(plainObject.components?.consensusNodes)) { const oldConsensusNode = plainObject.components.consensusNodes[plainConsensusNodeKey]; @@ -101,27 +101,27 @@ function migrateConsensusNodes(plainObject: MigrationCandidate) { } } -function migrateHaProxies(plainObject: MigrationCandidate) { +function migrateHaProxies(plainObject: MigrationCandidate): void { plainObject.state.haProxies = []; } -function migrateEnvoyProxies(plainObject: MigrationCandidate) { +function migrateEnvoyProxies(plainObject: MigrationCandidate): void { plainObject.state.envoyProxies = []; } -function migrateMirrorNodes(plainObject: MigrationCandidate) { +function migrateMirrorNodes(plainObject: MigrationCandidate): void { plainObject.state.mirrorNodes = []; } -function migrateExplorers(plainObject: MigrationCandidate) { +function migrateExplorers(plainObject: MigrationCandidate): void { plainObject.state.explorers = []; } -function migrateJsonRpcRelays(plainObject: MigrationCandidate) { +function migrateJsonRpcRelays(plainObject: MigrationCandidate): void { plainObject.state.relayNodes = []; } -function migrateState(plainObject: MigrationCandidate) { +function migrateState(plainObject: MigrationCandidate): void { plainObject.state = {}; plainObject.state.ledgerPhase = LedgerPhase.UNINITIALIZED; migrateConsensusNodes(plainObject); @@ -147,14 +147,14 @@ function migrate(plainObject: MigrationCandidate): void { migrateState(plainObject); } -describe('RemoteConfig', () => { - const remoteConfigPath = 'test/data/v0-35-1-remote-config.yaml'; +describe('RemoteConfig', (): void => { + const remoteConfigPath: string = 'test/data/v0-35-1-remote-config.yaml'; - describe('Class Transformer', () => { + describe('Class Transformer', (): void => { let yamlData: string; let plainObject: MigrationCandidate; - beforeEach(() => { + beforeEach((): void => { yamlData = readFileSync(remoteConfigPath, 'utf8'); expect(yamlData).to.not.be.undefined.and.to.not.be.null; @@ -164,12 +164,12 @@ describe('RemoteConfig', () => { migrate(plainObject); }); - it('should transform plain to class', async () => { + it('should transform plain to class', async (): Promise => { const rc: RemoteConfig = plainToInstance(RemoteConfig, plainObject); expect(rc).to.not.be.undefined.and.to.not.be.null; - expect(rc.history.commands.length).to.be.equal(1); + expect(rc.history.commands.length).to.be.equal(9); expect(rc.versions.cli.version).to.equal('0.34.0'); - expect(rc.versions.chart.version).to.equal('0.0.0'); + expect(rc.versions.chart.version).to.equal('0.44.0'); expect(rc.versions.consensusNode.version).to.equal('0.58.10'); expect(rc.versions.mirrorNodeChart.version).to.equal('0.122.0'); expect(rc.versions.explorerChart.version).to.equal('24.12.0'); @@ -180,17 +180,17 @@ describe('RemoteConfig', () => { expect(rc.state.consensusNodes[0].name).to.be.equal('node1'); expect(rc.state.consensusNodes[0].namespace).to.be.equal('solo-alpha-prod'); expect(rc.state.consensusNodes[0].cluster).to.be.equal('gke-alpha-prod-us-central1'); - expect(rc.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.REQUESTED); + expect(rc.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.STARTED); expect(rc.state.ledgerPhase).to.be.equal(LedgerPhase.UNINITIALIZED); }); - it('should transform class to plain', async () => { + it('should transform class to plain', async (): Promise => { const rc: RemoteConfig = plainToInstance(RemoteConfig, plainObject); const plainRemoteConfigObject = instanceToPlain(rc); expect(plainRemoteConfigObject).to.not.be.undefined.and.to.not.be.null; - expect(plainRemoteConfigObject.history.commands.length).to.be.equal(1); + expect(plainRemoteConfigObject.history.commands.length).to.be.equal(9); expect(plainRemoteConfigObject.versions.cli).to.equal('0.34.0'); - expect(plainRemoteConfigObject.versions.chart).to.equal('0.0.0'); + expect(plainRemoteConfigObject.versions.chart).to.equal('0.44.0'); expect(plainRemoteConfigObject.versions.consensusNode).to.equal('0.58.10'); expect(plainRemoteConfigObject.versions.mirrorNodeChart).to.equal('0.122.0'); expect(plainRemoteConfigObject.versions.explorerChart).to.equal('24.12.0'); @@ -201,18 +201,18 @@ describe('RemoteConfig', () => { expect(plainRemoteConfigObject.state.consensusNodes[0].name).to.be.equal('node1'); expect(plainRemoteConfigObject.state.consensusNodes[0].namespace).to.be.equal('solo-alpha-prod'); expect(plainRemoteConfigObject.state.consensusNodes[0].cluster).to.be.equal('gke-alpha-prod-us-central1'); - expect(plainRemoteConfigObject.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.REQUESTED); + expect(plainRemoteConfigObject.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.STARTED); expect(plainRemoteConfigObject.state.ledgerPhase).to.be.equal(LedgerPhase.UNINITIALIZED); }); - it('should be able to go from a class to an object back to a class', async () => { + it('should be able to go from a class to an object back to a class', async (): Promise => { const rc: RemoteConfig = plainToInstance(RemoteConfig, plainObject); const plainRemoteConfigObject = instanceToPlain(rc); const rc2: RemoteConfig = plainToInstance(RemoteConfig, plainRemoteConfigObject); expect(rc2).to.not.be.undefined.and.to.not.be.null; - expect(rc2.history.commands.length).to.be.equal(1); + expect(rc2.history.commands.length).to.be.equal(9); expect(rc2.versions.cli.version).to.equal('0.34.0'); - expect(rc2.versions.chart.version).to.equal('0.0.0'); + expect(rc2.versions.chart.version).to.equal('0.44.0'); expect(rc2.versions.consensusNode.version).to.equal('0.58.10'); expect(rc2.versions.mirrorNodeChart.version).to.equal('0.122.0'); expect(rc2.versions.explorerChart.version).to.equal('24.12.0'); @@ -223,7 +223,7 @@ describe('RemoteConfig', () => { expect(rc2.state.consensusNodes[0].name).to.be.equal('node1'); expect(rc2.state.consensusNodes[0].namespace).to.be.equal('solo-alpha-prod'); expect(rc2.state.consensusNodes[0].cluster).to.be.equal('gke-alpha-prod-us-central1'); - expect(rc2.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.REQUESTED); + expect(rc2.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.STARTED); expect(rc2.state.ledgerPhase).to.be.equal(LedgerPhase.UNINITIALIZED); }); }); From f10f6cafec5258f599d75cc7fb537e9e8dd7474a Mon Sep 17 00:00:00 2001 From: Jeffrey Tang Date: Wed, 23 Apr 2025 11:13:39 -0500 Subject: [PATCH 26/27] take out common expect as functions Signed-off-by: Jeffrey Tang --- .../impl/remote-config-source.test.ts | 4 +- .../schema/model/remote/remote-config.test.ts | 61 ++++++++----------- 2 files changed, 28 insertions(+), 37 deletions(-) diff --git a/test/unit/data/configuration/impl/remote-config-source.test.ts b/test/unit/data/configuration/impl/remote-config-source.test.ts index 8761507d6..285bcab76 100644 --- a/test/unit/data/configuration/impl/remote-config-source.test.ts +++ b/test/unit/data/configuration/impl/remote-config-source.test.ts @@ -68,8 +68,6 @@ describe('RemoteConfigSource', (): void => { }); it('should throw if putScalarArray called with missing key', (): void => { - expect((): void => source.putScalarArray(undefined as never, ['a'])).to.throw( - 'key must not be null or undefined', - ); + expect((): void => source.putScalarArray(undefined as never, ['a'])).to.throw('key must not be null or undefined'); }); }); diff --git a/test/unit/data/schema/model/remote/remote-config.test.ts b/test/unit/data/schema/model/remote/remote-config.test.ts index 328a0b16a..d4d45bd80 100644 --- a/test/unit/data/schema/model/remote/remote-config.test.ts +++ b/test/unit/data/schema/model/remote/remote-config.test.ts @@ -164,8 +164,7 @@ describe('RemoteConfig', (): void => { migrate(plainObject); }); - it('should transform plain to class', async (): Promise => { - const rc: RemoteConfig = plainToInstance(RemoteConfig, plainObject); + function expectRemoteConfigClass(rc: RemoteConfig) { expect(rc).to.not.be.undefined.and.to.not.be.null; expect(rc.history.commands.length).to.be.equal(9); expect(rc.versions.cli.version).to.equal('0.34.0'); @@ -182,49 +181,43 @@ describe('RemoteConfig', (): void => { expect(rc.state.consensusNodes[0].cluster).to.be.equal('gke-alpha-prod-us-central1'); expect(rc.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.STARTED); expect(rc.state.ledgerPhase).to.be.equal(LedgerPhase.UNINITIALIZED); + } + + function expectRemoteConfigPlain(object: any) { + expect(object).to.not.be.undefined.and.to.not.be.null; + expect(object.history.commands.length).to.be.equal(9); + expect(object.versions.cli).to.equal('0.34.0'); + expect(object.versions.chart).to.equal('0.44.0'); + expect(object.versions.consensusNode).to.equal('0.58.10'); + expect(object.versions.mirrorNodeChart).to.equal('0.122.0'); + expect(object.versions.explorerChart).to.equal('24.12.0'); + expect(object.versions.jsonRpcRelayChart).to.equal('0.63.2'); + expect(object.clusters.length).to.be.equal(1); + expect(object.state.consensusNodes.length).to.be.equal(4); + expect(object.state.consensusNodes[0].id).to.be.equal(0); + expect(object.state.consensusNodes[0].name).to.be.equal('node1'); + expect(object.state.consensusNodes[0].namespace).to.be.equal('solo-alpha-prod'); + expect(object.state.consensusNodes[0].cluster).to.be.equal('gke-alpha-prod-us-central1'); + expect(object.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.STARTED); + expect(object.state.ledgerPhase).to.be.equal(LedgerPhase.UNINITIALIZED); + } + + it('should transform plain to class', async (): Promise => { + const rc: RemoteConfig = plainToInstance(RemoteConfig, plainObject); + expectRemoteConfigClass(rc); }); it('should transform class to plain', async (): Promise => { const rc: RemoteConfig = plainToInstance(RemoteConfig, plainObject); const plainRemoteConfigObject = instanceToPlain(rc); - expect(plainRemoteConfigObject).to.not.be.undefined.and.to.not.be.null; - expect(plainRemoteConfigObject.history.commands.length).to.be.equal(9); - expect(plainRemoteConfigObject.versions.cli).to.equal('0.34.0'); - expect(plainRemoteConfigObject.versions.chart).to.equal('0.44.0'); - expect(plainRemoteConfigObject.versions.consensusNode).to.equal('0.58.10'); - expect(plainRemoteConfigObject.versions.mirrorNodeChart).to.equal('0.122.0'); - expect(plainRemoteConfigObject.versions.explorerChart).to.equal('24.12.0'); - expect(plainRemoteConfigObject.versions.jsonRpcRelayChart).to.equal('0.63.2'); - expect(plainRemoteConfigObject.clusters.length).to.be.equal(1); - expect(plainRemoteConfigObject.state.consensusNodes.length).to.be.equal(4); - expect(plainRemoteConfigObject.state.consensusNodes[0].id).to.be.equal(0); - expect(plainRemoteConfigObject.state.consensusNodes[0].name).to.be.equal('node1'); - expect(plainRemoteConfigObject.state.consensusNodes[0].namespace).to.be.equal('solo-alpha-prod'); - expect(plainRemoteConfigObject.state.consensusNodes[0].cluster).to.be.equal('gke-alpha-prod-us-central1'); - expect(plainRemoteConfigObject.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.STARTED); - expect(plainRemoteConfigObject.state.ledgerPhase).to.be.equal(LedgerPhase.UNINITIALIZED); + expectRemoteConfigPlain(plainRemoteConfigObject); }); it('should be able to go from a class to an object back to a class', async (): Promise => { const rc: RemoteConfig = plainToInstance(RemoteConfig, plainObject); const plainRemoteConfigObject = instanceToPlain(rc); const rc2: RemoteConfig = plainToInstance(RemoteConfig, plainRemoteConfigObject); - expect(rc2).to.not.be.undefined.and.to.not.be.null; - expect(rc2.history.commands.length).to.be.equal(9); - expect(rc2.versions.cli.version).to.equal('0.34.0'); - expect(rc2.versions.chart.version).to.equal('0.44.0'); - expect(rc2.versions.consensusNode.version).to.equal('0.58.10'); - expect(rc2.versions.mirrorNodeChart.version).to.equal('0.122.0'); - expect(rc2.versions.explorerChart.version).to.equal('24.12.0'); - expect(rc2.versions.jsonRpcRelayChart.version).to.equal('0.63.2'); - expect(rc2.clusters.length).to.be.equal(1); - expect(rc2.state.consensusNodes.length).to.be.equal(4); - expect(rc2.state.consensusNodes[0].id).to.be.equal(0); - expect(rc2.state.consensusNodes[0].name).to.be.equal('node1'); - expect(rc2.state.consensusNodes[0].namespace).to.be.equal('solo-alpha-prod'); - expect(rc2.state.consensusNodes[0].cluster).to.be.equal('gke-alpha-prod-us-central1'); - expect(rc2.state.consensusNodes[0].phase).to.be.equal(DeploymentPhase.STARTED); - expect(rc2.state.ledgerPhase).to.be.equal(LedgerPhase.UNINITIALIZED); + expectRemoteConfigClass(rc2); }); }); }); From 72b085388b27556b0a2893457fad6426969d231c Mon Sep 17 00:00:00 2001 From: Zhan Milenkov Date: Wed, 30 Apr 2025 15:28:44 +0300 Subject: [PATCH 27/27] task format Signed-off-by: Zhan Milenkov --- docs/site/content/User/StepByStepGuide.md | 117 +++++++++++++--------- 1 file changed, 69 insertions(+), 48 deletions(-) diff --git a/docs/site/content/User/StepByStepGuide.md b/docs/site/content/User/StepByStepGuide.md index fce7855b7..778683cb1 100644 --- a/docs/site/content/User/StepByStepGuide.md +++ b/docs/site/content/User/StepByStepGuide.md @@ -1,32 +1,34 @@ ## Advanced User Guide ## Table of Contents -- [Setup Kubernetes cluster](#setup-kubernetes-cluster) - - [Remote cluster](#remote-cluster) - - [Local cluster](#local-cluster) -- [Step by Step Instructions](#step-by-step-instructions) - - [Initialize solo directories](#initialize-solo-directories) - - [Generate pem formatted node keys](#generate-pem-formatted-node-keys) - - [Create a deployment in the specified clusters](#create-a-deployment-in-the-specified-clusters-generate-remoteconfig-and-localconfig-objects) - - [Setup cluster with shared components](#setup-cluster-with-shared-components) - - [Create a solo deployment](#create-a-solo-deployment) - - [Deploy helm chart with Hedera network components](#deploy-helm-chart-with-hedera-network-components) - - [Setup node with Hedera platform software](#setup-node-with-hedera-platform-software) - - [Deploy mirror node](#deploy-mirror-node) - - [Deploy explorer mode](#deploy-explorer-mode) - - [Deploy a JSON RPC relay](#deploy-a-json-rpc-relay) - - [Access Hedera Services](#access-hedera-services) - - [Destroy relay node](#destroy-relay-node) - - [Destroy mirror node](#destroy-mirror-node) - - [Destroy explorer node](#destroy-explorer-node) - - [Destroy network](#destroy-network) + +* [Setup Kubernetes cluster](#setup-kubernetes-cluster) + * [Remote cluster](#remote-cluster) + * [Local cluster](#local-cluster) +* [Step by Step Instructions](#step-by-step-instructions) + * [Initialize solo directories](#initialize-solo-directories) + * [Generate pem formatted node keys](#generate-pem-formatted-node-keys) + * [Create a deployment in the specified clusters](#create-a-deployment-in-the-specified-clusters-generate-remoteconfig-and-localconfig-objects) + * [Setup cluster with shared components](#setup-cluster-with-shared-components) + * [Create a solo deployment](#create-a-solo-deployment) + * [Deploy helm chart with Hedera network components](#deploy-helm-chart-with-hedera-network-components) + * [Setup node with Hedera platform software](#setup-node-with-hedera-platform-software) + * [Deploy mirror node](#deploy-mirror-node) + * [Deploy explorer mode](#deploy-explorer-mode) + * [Deploy a JSON RPC relay](#deploy-a-json-rpc-relay) + * [Access Hedera Services](#access-hedera-services) + * [Destroy relay node](#destroy-relay-node) + * [Destroy mirror node](#destroy-mirror-node) + * [Destroy explorer node](#destroy-explorer-node) + * [Destroy network](#destroy-network) For those who would like to have more control or need some customized setups, here are some step by step instructions of how to setup and deploy a solo network. + ### Setup Kubernetes cluster #### Remote cluster -- You may use remote kubernetes cluster. In this case, ensure kubernetes context is set up correctly. +* You may use remote kubernetes cluster. In this case, ensure kubernetes context is set up correctly. ``` kubectl config use-context @@ -34,7 +36,7 @@ kubectl config use-context #### Local cluster -- You may use [kind](https://kind.sigs.k8s.io/) or [microk8s](https://microk8s.io/) to create a cluster. In this case, +* You may use [kind](https://kind.sigs.k8s.io/) or [microk8s](https://microk8s.io/) to create a cluster. In this case, ensure your Docker engine has enough resources (e.g. Memory >=8Gb, CPU: >=4). Below we show how you can use `kind` to create a cluster First, use the following command to set up the environment variables: @@ -52,6 +54,7 @@ Then run the following command to set the kubectl context to the new cluster: ```bash kind create cluster -n "${SOLO_CLUSTER_NAME}" ``` + Example output ``` @@ -78,7 +81,6 @@ Not sure what to do next? 😅 Check out https://kind.sigs.k8s.io/docs/user/qui You may now view pods in your cluster using `k9s -A` as below: - ``` Context: kind-solo <0> all Attach Delete | |/ _/ __ \______ @@ -105,7 +107,6 @@ You may now view pods in your cluster using `k9s -A` as below: └─────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ ``` - ### Step by Step Instructions #### Initialize `solo` directories: @@ -117,7 +118,7 @@ rm -rf ~/.solo solo init ``` -- Example output +* Example output ``` @@ -150,12 +151,14 @@ If a full reset is needed, delete the directory or relevant sub-directories befo ``` #### Create a deployment in the specified clusters, generate RemoteConfig and LocalConfig objects. -- Associates a cluster reference to a k8s context + +* Associates a cluster reference to a k8s context + ``` solo cluster-ref connect --cluster-ref kind-${SOLO_CLUSTER_SETUP_NAMESPACE} --context kind-${SOLO_CLUSTER_NAME} --email "${SOLO_EMAIL}" ``` -- Example output +* Example output ``` @@ -174,12 +177,14 @@ Current Command : cluster-ref connect --cluster-ref kind-solo-e2e --context kin ❯ Associate a context with a cluster reference: ✔ Associate a context with a cluster reference: kind-solo-e2e ``` -- Create a deployment + +* Create a deployment + ``` solo deployment create -n "${SOLO_NAMESPACE}" --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output ``` @@ -195,12 +200,14 @@ Kubernetes Namespace : solo ❯ Add deployment to local config ✔ Adding deployment: solo-deployment with namespace: solo to local config ``` -- Add a cluster to deployment + +* Add a cluster to deployment + ``` solo deployment add-cluster --deployment "${SOLO_DEPLOYMENT}" --cluster-ref kind-${SOLO_CLUSTER_SETUP_NAMESPACE} --num-consensus-nodes 3 ``` -- Example output +* Example output ``` @@ -232,7 +239,7 @@ Current Command : deployment add-cluster --deployment solo-deployment --cluster solo node keys --gossip-keys --tls-keys -i node1,node2,node3 --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output ``` @@ -267,20 +274,23 @@ Current Command : node keys --gossip-keys --tls-keys --node-aliases node1,node2 ❯ Finalize ✔ Finalize ``` + PEM key files are generated in `~/.solo/cache/keys` directory. + ``` hedera-node1.crt hedera-node3.crt s-private-node1.pem s-public-node1.pem unused-gossip-pem hedera-node1.key hedera-node3.key s-private-node2.pem s-public-node2.pem unused-tls hedera-node2.crt hedera-node4.crt s-private-node3.pem s-public-node3.pem hedera-node2.key hedera-node4.key s-private-node4.pem s-public-node4.pem ``` + #### Setup cluster with shared components ``` solo cluster-ref setup -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" ``` -- Example output +* Example output ``` @@ -303,7 +313,6 @@ Version : 0.50.0 In a separate terminal, you may run `k9s` to view the pod status. - #### Deploy helm chart with Hedera network components It may take a while (5~15 minutes depending on your internet speed) to download various docker images and get the pods started. @@ -314,7 +323,7 @@ If it fails, ensure you have enough resources allocated for Docker engine and re solo network deploy -i node1,node2,node3 --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output ``` @@ -394,13 +403,14 @@ Version : 0.50.0 ``` #### Setup node with Hedera platform software. -- It may take a while as it download the hedera platform code from + +* It may take a while as it download the hedera platform code from ``` solo node setup -i node1,node2,node3 --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output ``` @@ -462,13 +472,13 @@ Current Command : node setup --node-aliases node1,node2,node3 --deployment solo ✔ Change node state to setup in remote config ``` -- Start the nodes +* Start the nodes ``` solo node start -i node1,node2,node3 --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output ``` @@ -538,15 +548,15 @@ Current Command : node start --node-aliases node1,node2,node3 --deployment solo ✔ Add node stakes ``` ---- - +*** #### Deploy mirror node ``` solo mirror-node deploy --deployment "${SOLO_DEPLOYMENT}" --cluster-ref kind-${SOLO_CLUSTER_SETUP_NAMESPACE} ``` -- Example output + +* Example output ``` @@ -596,7 +606,8 @@ Version : v0.126.0 ``` solo explorer deploy --deployment "${SOLO_DEPLOYMENT}" --cluster-ref kind-${SOLO_CLUSTER_SETUP_NAMESPACE} ``` -- Example output + +* Example output ``` @@ -635,7 +646,7 @@ Version : 24.12.1 solo relay deploy -i node1,node2,node3 --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output ``` @@ -667,14 +678,16 @@ Version : v0.67.0 ``` #### Access Hedera Services + Next: [Access Hedera Services](https://solo.hiero.org/User/AccessHederaServices/) #### Destroy relay node + ``` solo relay destroy --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output ``` @@ -703,10 +716,13 @@ Current Command : relay destroy --node-aliases node1,node2,node3 --deployment s ``` #### Destroy mirror node + ``` solo mirror-node destroy --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output + +* Example output + ``` ******************************* Solo ********************************************* @@ -728,12 +744,15 @@ Current Command : mirror-node destroy --deployment solo-deployment --quiet-mode ❯ Remove mirror node from remote config ✔ Remove mirror node from remote config ``` + #### Destroy explorer node + ``` solo explorer destroy --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output +* Example output + ``` ******************************* Solo ********************************************* @@ -757,10 +776,13 @@ Current Command : explorer destroy --deployment solo-deployment --quiet-mode ``` #### Destroy network + ``` solo network destroy --deployment "${SOLO_DEPLOYMENT}" ``` -- Example output + +* Example output + ``` ******************************* Solo ********************************************* @@ -779,7 +801,6 @@ Current Command : network destroy --deployment solo-deployment --quiet-mode ✔ Deleting the RemoteConfig configmap in namespace solo ``` - You may view the list of pods using `k9s` as below: ```