diff --git a/bumiworker/bumiworker/modules/abandoned_base.py b/bumiworker/bumiworker/modules/abandoned_base.py index 8ef1ceb7e..8650540b4 100644 --- a/bumiworker/bumiworker/modules/abandoned_base.py +++ b/bumiworker/bumiworker/modules/abandoned_base.py @@ -1,7 +1,7 @@ from collections import defaultdict from datetime import datetime, timedelta, timezone from bumiworker.bumiworker.modules.base import ( - ArchiveBase, ArchiveReason, ModuleBase, DAYS_IN_MONTH + LOG, ArchiveBase, ArchiveReason, ModuleBase, DAYS_IN_MONTH ) from tools.optscale_data.clickhouse import ExternalDataConverter from tools.optscale_time import utcnow, startday @@ -81,6 +81,7 @@ def _are_below_thresholds(res_data_request_map, metric_threshold_map): if all(data_request_map.get(key, 0) <= threshold_value for key, threshold_value in metric_threshold_map.items()): resource_ids.append(res_id) + LOG.debug(f'AB - Resources below thresholds: {resource_ids}') return resource_ids @staticmethod @@ -106,6 +107,8 @@ def _get(self): now = utcnow() start_date = now - timedelta(days=self.days_threshold) + LOG.debug(f'AB - Start date for abandoned buckets: {start_date}') + cloud_accounts = self.get_cloud_accounts(self.SUPPORTED_CLOUD_TYPES, self.skip_cloud_accounts) buckets_by_account = self.get_active_resources( @@ -159,6 +162,9 @@ def _get(self): base_result_dict.update( self.metrics_result(data_req_map)) result.append(base_result_dict) + + LOG.debug(f'AB - Abandoned buckets result: {result}') + return result diff --git a/bumiworker/bumiworker/modules/archive/s3_abandoned_buckets.py b/bumiworker/bumiworker/modules/archive/s3_abandoned_buckets.py index 0e8ec1f30..b62d7901d 100644 --- a/bumiworker/bumiworker/modules/archive/s3_abandoned_buckets.py +++ b/bumiworker/bumiworker/modules/archive/s3_abandoned_buckets.py @@ -3,8 +3,7 @@ ) from bumiworker.bumiworker.modules.recommendations.s3_abandoned_buckets import ( S3AbandonedBuckets as S3AbandonedBucketsRecommendation, - AVG_DATA_SIZE_KEY, TIER_1_REQUESTS_QUANTITY_KEY, - TIER_2_REQUESTS_QUANTITY_KEY + GET_OBJECT_KEY, PUT_OBJECT_KEY ) @@ -15,12 +14,13 @@ class S3AbandonedBuckets(S3AbandonedBucketsArchiveBase, ] def get_previous_metric_threshold_map(self, previous_options): + # Buckets are considered abandoned if both GetObject and PutObject + # operations are zero (no read or write activity) + # For backward compatibility, if old options exist, we still use + # the new logic since the recommendation criteria has changed return { - TIER_1_REQUESTS_QUANTITY_KEY: previous_options.get( - 'tier_1_request_quantity_threshold'), - TIER_2_REQUESTS_QUANTITY_KEY: previous_options.get( - 'tier_2_request_quantity_threshold'), - AVG_DATA_SIZE_KEY: previous_options.get('data_size_threshold') + GET_OBJECT_KEY: 0, + PUT_OBJECT_KEY: 0 } diff --git a/bumiworker/bumiworker/modules/recommendations/s3_abandoned_buckets.py b/bumiworker/bumiworker/modules/recommendations/s3_abandoned_buckets.py index 919818ccb..cb0544b02 100644 --- a/bumiworker/bumiworker/modules/recommendations/s3_abandoned_buckets.py +++ b/bumiworker/bumiworker/modules/recommendations/s3_abandoned_buckets.py @@ -5,15 +5,9 @@ LOG = logging.getLogger(__name__) -DEFAULT_DAYS_THRESHOLD = 7 -DATA_SIZE_THRESHOLD = 1024 -DATA_SIZE_KEY = 'data_size' -TIER_1_REQUESTS_THRESHOLD = 100 -TIER_2_REQUESTS_THRESHOLD = 2000 -MBS_IN_GB = 1024 -AVG_DATA_SIZE_KEY = 'avg_data_size' -TIER_1_REQUESTS_QUANTITY_KEY = 'tier_1_request_quantity' -TIER_2_REQUESTS_QUANTITY_KEY = 'tier_2_request_quantity' +DEFAULT_DAYS_THRESHOLD = 30 +GET_OBJECT_KEY = 'get_object_count' +PUT_OBJECT_KEY = 'put_object_count' class S3AbandonedBuckets(S3AbandonedBucketsBase): @@ -26,12 +20,6 @@ def __init__(self, organization_id, config_client, created_at): self.option_ordered_map = OrderedDict({ 'days_threshold': { 'default': DEFAULT_DAYS_THRESHOLD}, - 'data_size_threshold': { - 'default': DATA_SIZE_THRESHOLD}, - 'tier_1_request_quantity_threshold': { - 'default': TIER_1_REQUESTS_THRESHOLD}, - 'tier_2_request_quantity_threshold': { - 'default': TIER_2_REQUESTS_THRESHOLD}, 'excluded_pools': { 'default': {}, 'clean_func': self.clean_excluded_pools, @@ -40,29 +28,28 @@ def __init__(self, organization_id, config_client, created_at): }) def get_metric_threshold_map(self): - options = self.get_options() + # Buckets are considered abandoned if both GetObject and PutObject + # operations are zero (no read or write activity) + LOG.debug(f'AB - GET_OBJECT_KEY: {GET_OBJECT_KEY}, PUT_OBJECT_KEY: {PUT_OBJECT_KEY}') return { - TIER_1_REQUESTS_QUANTITY_KEY: options.get( - 'tier_1_request_quantity_threshold'), - TIER_2_REQUESTS_QUANTITY_KEY: options.get( - 'tier_2_request_quantity_threshold'), - AVG_DATA_SIZE_KEY: options.get('data_size_threshold') + GET_OBJECT_KEY: False, + PUT_OBJECT_KEY: False } def _get_data_size_request_metrics(self, cloud_account_id, cloud_resource_ids, start_date, days_threshold): - product_families = ['Data Transfer', 'API Request'] - tier_1_request_type = 'Requests-Tier1' - tier_2_request_type = 'Requests-Tier2' - data_api_requests = self.mongo_client.restapi.raw_expenses.aggregate([ + # Query for GetObject and PutObject operations in API Request product family + target_operations = ['GetObject', 'PutObject'] + api_request_pipeline = [ { '$match': { '$and': [ {'resource_id': {'$in': cloud_resource_ids}}, {'cloud_account_id': cloud_account_id}, {'start_date': {'$gte': start_date}}, - {'product/productFamily': {'$in': product_families}} + {'product/productFamily': 'API Request'}, + {'lineItem/Operation': {'$in': target_operations}} ] } }, @@ -70,60 +57,46 @@ def _get_data_size_request_metrics(self, cloud_account_id, '$group': { '_id': { '_id': '$resource_id', - 'productFamily': '$product/productFamily', - 'tier_type': '$lineItem/UsageType', 'operation': '$lineItem/Operation' }, - 'usage_amount': {'$push': '$lineItem/UsageAmount'} + 'total_usage': { + '$sum': '$lineItem/UsageAmount' + } } } - ]) - resource_data_request_map = {} - for data_api_request in data_api_requests: - cloud_resource_id = data_api_request['_id']['_id'] - if not resource_data_request_map.get(cloud_resource_id): - resource_data_request_map[cloud_resource_id] = {} - resource_data_request_map[cloud_resource_id][ - DATA_SIZE_KEY] = 0.0 - resource_data_request_map[cloud_resource_id][ - TIER_1_REQUESTS_QUANTITY_KEY] = 0 - resource_data_request_map[cloud_resource_id][ - TIER_2_REQUESTS_QUANTITY_KEY] = 0 - total_sum = sum( - [float(x) for x in data_api_request['usage_amount']]) - if data_api_request['_id']['productFamily'] == 'Data Transfer': - resource_data_request_map[cloud_resource_id][ - DATA_SIZE_KEY] += total_sum - else: - res_tier_type = data_api_request['_id']['tier_type'] - res_operation = data_api_request['_id']['operation'] - if tier_1_request_type in res_tier_type: - resource_data_request_map[cloud_resource_id][ - TIER_1_REQUESTS_QUANTITY_KEY] += int(total_sum) - elif (tier_2_request_type in res_tier_type and - res_operation == 'GetObject'): - resource_data_request_map[cloud_resource_id][ - TIER_2_REQUESTS_QUANTITY_KEY] += int(total_sum) + ] + api_requests = self.mongo_client.restapi.raw_expenses.aggregate( + api_request_pipeline) + api_requests_list = list(api_requests) + LOG.debug(f'AB - API Requests aggregation result: {api_requests_list}') resource_meter_value = {} - for res_id, meter_map in resource_data_request_map.items(): - if not resource_meter_value.get(res_id): - resource_meter_value[res_id] = {} - for meter_key, total in meter_map.items(): - if meter_key == DATA_SIZE_KEY: - avg_size = (total / days_threshold) * MBS_IN_GB - resource_meter_value[res_id][AVG_DATA_SIZE_KEY] = avg_size - else: - resource_meter_value[res_id][meter_key] = total + # Initialize all resources with no recorded activity + for res_id in cloud_resource_ids: + resource_meter_value[res_id] = { + GET_OBJECT_KEY: False, + PUT_OBJECT_KEY: False + } + # Aggregate operation usage (already summed by MongoDB) + for api_request in api_requests_list: + cloud_resource_id = api_request['_id']['_id'] + operation = api_request['_id']['operation'] + total_sum = int(api_request['total_usage']) + has_usage = bool(total_sum) + if operation == 'GetObject': + resource_meter_value[cloud_resource_id][ + GET_OBJECT_KEY] = has_usage + elif operation == 'PutObject': + resource_meter_value[cloud_resource_id][ + PUT_OBJECT_KEY] = has_usage + + LOG.debug(f'AB - Resource meter values: {resource_meter_value}') return resource_meter_value @staticmethod def metrics_result(data_req_map): return { - 'tier_1_request_quantity': data_req_map.get( - TIER_1_REQUESTS_QUANTITY_KEY), - 'tier_2_request_quantity': data_req_map.get( - TIER_2_REQUESTS_QUANTITY_KEY), - 'avg_data_size': data_req_map.get(AVG_DATA_SIZE_KEY), + 'get_object_count': data_req_map.get(GET_OBJECT_KEY, False), + 'put_object_count': data_req_map.get(PUT_OBJECT_KEY, False), } diff --git a/bumiworker/bumiworker/tests/conftest.py b/bumiworker/bumiworker/tests/conftest.py index 2ed5d4241..49e52c729 100644 --- a/bumiworker/bumiworker/tests/conftest.py +++ b/bumiworker/bumiworker/tests/conftest.py @@ -112,18 +112,42 @@ def __init__(self, *args, **kwargs): # # tools subpackages stubs # +if "tools" not in sys.modules: + tools_mod = types.ModuleType("tools") + sys.modules["tools"] = tools_mod + if "tools.optscale_time" not in sys.modules: time_mod = types.ModuleType("tools.optscale_time") from time import time as _time - def utcnow_timestamp(): return int(_time()) - def utcnow(): return utcnow_timestamp() - def utcfromtimestamp(ts): return ts - def startday(ts): return ts + from datetime import datetime, timezone + + def utcnow_timestamp(): + return int(_time()) + + def utcnow(): + return datetime.now(timezone.utc) + + def utcfromtimestamp(ts): + return datetime.fromtimestamp(ts, tz=timezone.utc) + + def startday(ts): + if isinstance(ts, datetime): + return datetime(ts.year, ts.month, ts.day, tzinfo=ts.tzinfo) + return ts + time_mod.utcnow_timestamp = utcnow_timestamp time_mod.utcnow = utcnow time_mod.utcfromtimestamp = utcfromtimestamp time_mod.startday = startday sys.modules["tools.optscale_time"] = time_mod + # Make it accessible as tools.optscale_time attribute + sys.modules["tools"].optscale_time = time_mod + +if "tools.optscale_data" not in sys.modules: + data_mod = types.ModuleType("tools.optscale_data") + sys.modules["tools.optscale_data"] = data_mod + # Make it accessible as tools.optscale_data attribute + sys.modules["tools"].optscale_data = data_mod if "tools.optscale_data.clickhouse" not in sys.modules: ch_mod = types.ModuleType("tools.optscale_data.clickhouse") @@ -131,4 +155,6 @@ class ExternalDataConverter: def __init__(self, *args, **kwargs): pass ch_mod.ExternalDataConverter = ExternalDataConverter - sys.modules["tools.optscale_data.clickhouse"] = ch_mod \ No newline at end of file + sys.modules["tools.optscale_data.clickhouse"] = ch_mod + # Make it accessible as tools.optscale_data.clickhouse attribute + sys.modules["tools.optscale_data"].clickhouse = ch_mod diff --git a/bumiworker/bumiworker/tests/test_s3_abandoned_buckets.py b/bumiworker/bumiworker/tests/test_s3_abandoned_buckets.py new file mode 100644 index 000000000..a8309b544 --- /dev/null +++ b/bumiworker/bumiworker/tests/test_s3_abandoned_buckets.py @@ -0,0 +1,299 @@ +""" +Test scenarios for S3 Abandoned Buckets recommendation. + +This test file covers the new implementation that checks for zero GetObject and PutObject +operations over the past 30 days to identify abandoned buckets. + +Test Structure: +- Mocks MongoDB raw_expenses aggregation results +- Tests individual methods and edge cases +- Covers edge cases and boundary conditions + +Note: This test file assumes the new implementation with: +- GET_OBJECT_KEY and PUT_OBJECT_KEY constants +- Updated _get_data_size_request_metrics method +- Updated get_metric_threshold_map method +- 30-day default threshold +""" + +from datetime import datetime, timezone, timedelta +from typing import Callable, Dict +from unittest.mock import Mock +import copy + +import pytest # type: ignore + +from bumiworker.bumiworker.modules.recommendations.s3_abandoned_buckets import ( + S3AbandonedBuckets, + GET_OBJECT_KEY, + PUT_OBJECT_KEY, +) + + +NOW_FIXED = datetime(2025, 1, 15, 0, 0, 0, tzinfo=timezone.utc) +START_DATE_30_DAYS = NOW_FIXED - timedelta(days=30) + +# Base resource template +RESOURCE_BUCKET = { + "_id": "bucket-resource-1", + "cloud_account_id": "account-1", + "cloud_resource_id": "bucket-abandoned-1", + "name": "bucket-abandoned-1", + "applied_rules": [], + "created_at": 1730430000, + "deleted_at": 0, + "employee_id": "employee-1", + "first_seen": 1730430000, + "last_seen": 1761681929, + "pool_id": "pool-1", + "region": "us-east-1", + "resource_type": "Bucket", + "active": True, + "tags": {}, +} + + +@pytest.fixture +def module_factory(monkeypatch) -> Callable[..., S3AbandonedBuckets]: + def _factory( + *, + organization_id: str = "org-1", + created_at: int = int(NOW_FIXED.timestamp()), + cloud_accounts: Dict = None, + ) -> S3AbandonedBuckets: + mod = S3AbandonedBuckets( + organization_id=organization_id, + config_client=Mock(), + created_at=created_at, + ) + monkeypatch.setattr("tools.optscale_time.utcnow", lambda: NOW_FIXED) + + if cloud_accounts is not None: + mod.get_cloud_accounts = lambda *_args, **_kwargs: cloud_accounts + else: + mod.get_cloud_accounts = lambda *_args, **_kwargs: { + "account-1": {"id": "account-1", "type": "aws_cnr", "name": "Account 1"} + } + + return mod + + return _factory + + +@pytest.fixture +def mod_base(module_factory): + """Base module with default options and placeholders.""" + mod = module_factory() + mod.get_options = Mock( + return_value={ + "days_threshold": 30, + "excluded_pools": {}, + "skip_cloud_accounts": [], + } + ) + # Default mongo mock (tests override as needed) + mock_mongo = Mock() + mock_mongo.restapi.raw_expenses.aggregate = Mock(return_value=[]) + mod._mongo_client = mock_mongo + + # Default helpers + mod.get_employees = Mock(return_value={}) + mod.get_pools = Mock(return_value={}) + mod.get_month_saving_by_daily_avg_expenses = Mock( + return_value={"bucket-resource-1": 10.5} + ) + return mod + + +class TestGetDataSizeRequestMetrics: + """Tests for `_get_data_size_request_metrics` method.""" + + def test_no_operations(self, mod_base): + """No GetObject or PutObject operations.""" + mod = mod_base + + # Mock MongoDB client - only what this test needs + mock_mongo = Mock() + mock_mongo.restapi.raw_expenses.aggregate = Mock(return_value=[]) + mod._mongo_client = mock_mongo + + result = mod._get_data_size_request_metrics( + cloud_account_id="account-1", + cloud_resource_ids=["bucket-abandoned-1"], + start_date=START_DATE_30_DAYS, + days_threshold=30 + ) + + assert "bucket-abandoned-1" in result + assert result["bucket-abandoned-1"][GET_OBJECT_KEY] == 0 + assert result["bucket-abandoned-1"][PUT_OBJECT_KEY] == 0 + + def test_getobject_only(self, mod_base): + """GetObject operations only.""" + mod = mod_base + raw_expenses = [ + { + "_id": { + "_id": "bucket-read-only-1", + "operation": "GetObject" + }, + "total_usage": 150.0 + } + ] + # Mock MongoDB client - only what this test needs + mock_mongo = Mock() + mock_mongo.restapi.raw_expenses.aggregate = Mock(return_value=raw_expenses) + mod._mongo_client = mock_mongo + + result = mod._get_data_size_request_metrics( + cloud_account_id="account-1", + cloud_resource_ids=["bucket-read-only-1"], + start_date=START_DATE_30_DAYS, + days_threshold=30 + ) + + assert "bucket-read-only-1" in result + assert result["bucket-read-only-1"][GET_OBJECT_KEY] == 150 + assert result["bucket-read-only-1"][PUT_OBJECT_KEY] == 0 + + def test_putobject_only(self, mod_base): + """PutObject operations only.""" + mod = mod_base + raw_expenses = [ + { + "_id": { + "_id": "bucket-write-only-1", + "operation": "PutObject" + }, + "total_usage": 150.0 + } + ] + # Mock MongoDB client - only what this test needs + mock_mongo = Mock() + mock_mongo.restapi.raw_expenses.aggregate = Mock(return_value=raw_expenses) + mod._mongo_client = mock_mongo + + result = mod._get_data_size_request_metrics( + cloud_account_id="account-1", + cloud_resource_ids=["bucket-write-only-1"], + start_date=START_DATE_30_DAYS, + days_threshold=30 + ) + + assert "bucket-write-only-1" in result + assert result["bucket-write-only-1"][GET_OBJECT_KEY] == 0 + assert result["bucket-write-only-1"][PUT_OBJECT_KEY] == 150 + + def test_putobject_and_getobject(self, mod_base): + """PutObject and GetObject operations.""" + mod = mod_base + raw_expenses = [ + { + "_id": { + "_id": "bucket-active-only-1", + "operation": "PutObject" + }, + "total_usage": 150.0 + }, + { + "_id": { + "_id": "bucket-active-only-1", + "operation": "GetObject" + }, + "total_usage": 100.0 + } + ] + # Mock MongoDB client - only what this test needs + mock_mongo = Mock() + mock_mongo.restapi.raw_expenses.aggregate = Mock(return_value=raw_expenses) + mod._mongo_client = mock_mongo + + result = mod._get_data_size_request_metrics( + cloud_account_id="account-1", + cloud_resource_ids=["bucket-active-only-1"], + start_date=START_DATE_30_DAYS, + days_threshold=30 + ) + + assert "bucket-active-only-1" in result + assert result["bucket-active-only-1"][GET_OBJECT_KEY] is True + assert result["bucket-active-only-1"][PUT_OBJECT_KEY] is True + +class TestIntegration: + """End-to-end `_get` behavior focusing on recommendation inclusion/exclusion.""" + + def test_abandoned_bucket_is_recommended(self, mod_base): + """Zero Get/Put -> abandoned -> recommended (saving > 0).""" + mod = mod_base + # No operations + mod._mongo_client.restapi.raw_expenses.aggregate = Mock(return_value=[]) + # Active bucket + bucket = copy.deepcopy(RESOURCE_BUCKET) + bucket["cloud_resource_id"] = "bucket-abandoned-1" + mod.get_active_resources = Mock(return_value={"account-1": [bucket]}) + + result = mod._get() + + assert len(result) == 1 + assert result[0]["cloud_resource_id"] == "bucket-abandoned-1" + assert result[0]["get_object_count"] is False + assert result[0]["put_object_count"] is False + assert result[0]["saving"] == 10.5 + + def test_active_bucket_not_recommended(self, mod_base): + """Any Get/Put > 0 -> not abandoned -> not recommended.""" + mod = mod_base + raw_expenses = [ + { + "_id": {"_id": "bucket-active-1", "operation": "GetObject"}, + "total_usage": 10.0, + } + ] + mod._mongo_client.restapi.raw_expenses.aggregate = Mock(return_value=raw_expenses) + + bucket = copy.deepcopy(RESOURCE_BUCKET) + bucket["cloud_resource_id"] = "bucket-active-1" + mod.get_active_resources = Mock(return_value={"account-1": [bucket]}) + + result = mod._get() + + assert len(result) == 0 + + def test_zero_saving_not_recommended(self, mod_base): + """Zero ops but zero saving -> skip recommendation.""" + mod = mod_base + mod._mongo_client.restapi.raw_expenses.aggregate = Mock(return_value=[]) + mod.get_month_saving_by_daily_avg_expenses = Mock(return_value={"bucket-resource-1": 0}) + + bucket = copy.deepcopy(RESOURCE_BUCKET) + bucket["cloud_resource_id"] = "bucket-abandoned-1" + mod.get_active_resources = Mock(return_value={"account-1": [bucket]}) + + result = mod._get() + + assert len(result) == 0 + + def test_operations_not_get_or_put_object_are_recommended(self, mod_base): + """Any operations not GetObject or PutObject are recommended.""" + mod = mod_base + raw_expenses = [ + { + "_id": {"_id": "bucket-active-list-operations-1", "operation": "ListObjects"}, + "total_usage": 10.0, + } + ] + mod._mongo_client.restapi.raw_expenses.aggregate = Mock(return_value=raw_expenses) + + bucket = copy.deepcopy(RESOURCE_BUCKET) + bucket["cloud_resource_id"] = "bucket-active-list-operations-1" + mod.get_active_resources = Mock(return_value={"account-1": [bucket]}) + + result = mod._get() + + assert len(result) == 1 + assert result[0]["cloud_resource_id"] == "bucket-active-list-operations-1" + assert result[0]["get_object_count"] is False + assert result[0]["put_object_count"] is False + assert result[0]["saving"] == 10.5 + + diff --git a/ngui/ui/src/components/SideModalManager/SideModals/recommendations/AbandonedS3BucketsModal.tsx b/ngui/ui/src/components/SideModalManager/SideModals/recommendations/AbandonedS3BucketsModal.tsx index 2faf7c72e..a6e4cf9fc 100644 --- a/ngui/ui/src/components/SideModalManager/SideModals/recommendations/AbandonedS3BucketsModal.tsx +++ b/ngui/ui/src/components/SideModalManager/SideModals/recommendations/AbandonedS3BucketsModal.tsx @@ -1,4 +1,5 @@ import { FormProvider } from "react-hook-form"; +import { FormattedMessage } from "react-intl"; import { SETTINGS_TYPE_SUCCESS_MESSAGE, COMMON_YEAR_LENGTH } from "utils/constants"; import { lessOrEqual } from "utils/validation"; import BaseSideModal from "../BaseSideModal"; @@ -9,10 +10,7 @@ import TextWithInlineInput from "./components/TextWithInlineInput"; import { useCommonSettingsData, useFormWithValuesFromOptions } from "./hooks"; const VALUE_KEYS = Object.freeze({ - [THRESHOLD_INPUT_NAMES.DAYS_THRESHOLD]: "days_threshold", - [THRESHOLD_INPUT_NAMES.DATA_SIZE_THRESHOLD]: "data_size_threshold", - [THRESHOLD_INPUT_NAMES.TIER_1_REQUESTS_QUANTITY_THRESHOLD]: "tier_1_request_quantity_threshold", - [THRESHOLD_INPUT_NAMES.TIER_2_REQUESTS_QUANTITY_THRESHOLD]: "tier_2_request_quantity_threshold" + [THRESHOLD_INPUT_NAMES.DAYS_THRESHOLD]: "days_threshold" }); const AbandonedS3BucketsForm = ({ recommendationType, onSuccess }) => { @@ -32,31 +30,7 @@ const AbandonedS3BucketsForm = ({ recommendationType, onSuccess }) => { lessOrEqualValidation={lessOrEqual(COMMON_YEAR_LENGTH)} name={THRESHOLD_INPUT_NAMES.DAYS_THRESHOLD} /> - + + + + ), - accessorKey: "tier_1_request_quantity" + accessorKey: "get_object_count" }, { header: ( - - + + ), - accessorKey: "tier_2_request_quantity" + accessorKey: "put_object_count" }, detectedAt({ headerDataTestId: "lbl_s3_abandoned_buckets_detected_at" }), possibleMonthlySavings({ @@ -61,14 +53,9 @@ class AbandonedS3Buckets extends BaseRecommendation { descriptionMessageId = "abandonedS3BucketsDescription"; get descriptionMessageValues() { - const { - days_threshold: daysThreshold, - data_size_threshold: dataSizeAvg, - tier_1_request_quantity_threshold: tier1RequestsQuantity, - tier_2_request_quantity_threshold: tier2RequestsQuantity - } = this.options; - - return { daysThreshold, dataSizeAvg, tier1RequestsQuantity, tier2RequestsQuantity }; + const { days_threshold: daysThreshold } = this.options; + + return { daysThreshold }; } emptyMessageId = "noAbandonedS3Buckets"; diff --git a/ngui/ui/src/translations/en-US/app.json b/ngui/ui/src/translations/en-US/app.json index 390f18e3f..5ede9acf0 100644 --- a/ngui/ui/src/translations/en-US/app.json +++ b/ngui/ui/src/translations/en-US/app.json @@ -34,8 +34,8 @@ "abandonedNebiusS3BucketsDescription": "Some of your active Nebius S3 buckets have been detected as abandoned (average data size has been less than {dataSizeAvg} {dataSizeAvg, plural,\n =1 {megabyte}\n other {megabytes}}, GET requests quantity has been less than {getRequestsQuantity}, POST requests quantity has been less than {postRequestsQuantity}, PUT requests quantity has been less than {putRequestsQuantity}, HEAD requests quantity has been less than {headRequestsQuantity}, OPTIONS requests quantity has been less than {optionsRequestsQuantity}, and DELETE requests quantity has been less than {deleteRequestsQuantity} for the last {daysThreshold} {daysThreshold, plural,\n =1 {day}\n other {days}\n}). Consider their deletion to reduce expenses.", "abandonedNebiusS3BucketsResourceRecommendation": "This Nebius S3 bucket is detected as abandoned. Consider its termination to reduce expenses.", "abandonedS3Buckets": "Abandoned Amazon S3 buckets", - "abandonedS3BucketsDescription": "Some of your active Amazon S3 buckets have been detected as abandoned (average data size has been less than {dataSizeAvg} {dataSizeAvg, plural,\n =1 {megabyte}\n other {megabytes}}, Tier1 requests quantity has been less than {tier1RequestsQuantity}, and GET requests quantity has been less than {tier2RequestsQuantity} for the last {daysThreshold} {daysThreshold, plural,\n =1 {day}\n other {days}\n}). Consider their deletion to reduce expenses.", - "abandonedS3BucketsResourceRecommendation": "This Amazon S3 bucket is detected as abandoned. Consider its termination to reduce expenses.", + "abandonedS3BucketsDescription": "Some of your active Amazon S3 buckets have recorded no GET or PUT requests for the last {daysThreshold} {daysThreshold, plural,\n =1 {day}\n other {days}\n}. Consider their deletion to reduce expenses.", + "abandonedS3BucketsResourceRecommendation": "This Amazon S3 bucket has had no GET or PUT requests recently. Consider its termination to reduce expenses.", "abandonedS3BucketsTitle": "{value} {value, plural,\n =1 {abandoned Amazon S3 bucket}\n other {abandoned Amazon S3 buckets}}.", "abortConditions": "Abort conditions", "abortIndividualRunIfItsDurationExceeds": "Abort individual run if its duration exceeds", @@ -2328,10 +2328,8 @@ "thresholds.abandonedNebiusS3Buckets.optionsRequestsQuantityThreshold": "its OPTIONS requests quantity has been less than {input}", "thresholds.abandonedNebiusS3Buckets.postRequestsQuantityThreshold": "its POST requests quantity has been less than {input}", "thresholds.abandonedNebiusS3Buckets.putRequestsQuantityThreshold": "its PUT requests quantity has been less than {input}", - "thresholds.abandonedS3Buckets.dataSizeThreshold": "its average data size has been less than {input} megabytes", - "thresholds.abandonedS3Buckets.getRequestsQuantityThreshold": "its GET requests quantity has been less than {input}", - "thresholds.abandonedS3Buckets.intro": "Consider an Amazon S3 bucket abandoned if for the last {input} days", - "thresholds.abandonedS3Buckets.tier1RequestsQuantityThreshold": "its Tier1 requests quantity has been less than {input}", + "thresholds.abandonedS3Buckets.activityDescription": "Both GET and PUT request counts must remain zero during this period.", + "thresholds.abandonedS3Buckets.intro": "Consider an Amazon S3 bucket abandoned if for the last {input} days it has had no GET or PUT requests.", "thresholds.cvosAgreementOpportunities": "Cover instances with Committed Volume of Services if they have been detected as sustainable for {input} days", "thresholds.inactiveConsoleUsers": "Consider IAM user console access password inactive if it has not been used for the last {input} days.", "thresholds.inactiveUsers": "Consider IAM user as inactive if it does not have any recorded access activity for the last {input} days.",