From f03f51f3f6035a8a73b864bb41debd18f97cf6df Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Mon, 31 Mar 2025 12:25:32 -0700 Subject: [PATCH 01/23] Add Excluded Locations Feature --- sdk/cosmos/azure-cosmos/azure/cosmos/_base.py | 1 + .../azure/cosmos/_cosmos_client_connection.py | 10 ++ .../azure/cosmos/_global_endpoint_manager.py | 4 +- .../azure/cosmos/_location_cache.py | 101 +++++++++--- .../azure/cosmos/_request_object.py | 25 ++- .../azure/cosmos/aio/_container.py | 30 ++++ .../aio/_cosmos_client_connection_async.py | 10 ++ .../aio/_global_endpoint_manager_async.py | 6 +- .../azure-cosmos/azure/cosmos/container.py | 30 ++++ .../azure/cosmos/cosmos_client.py | 4 + .../azure-cosmos/azure/cosmos/documents.py | 8 + .../samples/excluded_locations.py | 110 +++++++++++++ .../azure-cosmos/tests/test_health_check.py | 6 +- .../tests/test_health_check_async.py | 12 +- .../azure-cosmos/tests/test_location_cache.py | 148 +++++++++++++++++- .../tests/test_retry_policy_async.py | 1 + 16 files changed, 462 insertions(+), 44 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/samples/excluded_locations.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index 654b23c5d71f..bcfc611456ec 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -63,6 +63,7 @@ 'priority': 'priorityLevel', 'no_response': 'responsePayloadOnWriteDisabled', 'max_item_count': 'maxItemCount', + 'excluded_locations': 'excludedLocations', } # Cosmos resource ID validation regex breakdown: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 3934c23bcf99..d64da38defb1 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -2044,6 +2044,7 @@ def PatchItem( documents._OperationType.Patch, options) # Patch will use WriteEndpoint since it uses PUT operation request_params = RequestObject(resource_type, documents._OperationType.Patch) + request_params.set_excluded_location_from_options(options) request_data = {} if options.get("filterPredicate"): request_data["condition"] = options.get("filterPredicate") @@ -2132,6 +2133,7 @@ def _Batch( headers = base.GetHeaders(self, initial_headers, "post", path, collection_id, "docs", documents._OperationType.Batch, options) request_params = RequestObject("docs", documents._OperationType.Batch) + request_params.set_excluded_location_from_options(options) return cast( Tuple[List[Dict[str, Any]], CaseInsensitiveDict], self.__Post(path, request_params, batch_operations, headers, **kwargs) @@ -2192,6 +2194,7 @@ def DeleteAllItemsByPartitionKey( headers = base.GetHeaders(self, self.default_headers, "post", path, collection_id, "partitionkey", documents._OperationType.Delete, options) request_params = RequestObject("partitionkey", documents._OperationType.Delete) + request_params.set_excluded_location_from_options(options) _, last_response_headers = self.__Post( path=path, request_params=request_params, @@ -2647,6 +2650,7 @@ def Create( # Create will use WriteEndpoint since it uses POST operation request_params = RequestObject(typ, documents._OperationType.Create) + request_params.set_excluded_location_from_options(options) result, last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) self.last_response_headers = last_response_headers @@ -2693,6 +2697,7 @@ def Upsert( # Upsert will use WriteEndpoint since it uses POST operation request_params = RequestObject(typ, documents._OperationType.Upsert) + request_params.set_excluded_location_from_options(options) result, last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) self.last_response_headers = last_response_headers # update session for write request @@ -2736,6 +2741,7 @@ def Replace( options) # Replace will use WriteEndpoint since it uses PUT operation request_params = RequestObject(typ, documents._OperationType.Replace) + request_params.set_excluded_location_from_options(options) result, last_response_headers = self.__Put(path, request_params, resource, headers, **kwargs) self.last_response_headers = last_response_headers @@ -2777,6 +2783,7 @@ def Read( headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, documents._OperationType.Read, options) # Read will use ReadEndpoint since it uses GET operation request_params = RequestObject(typ, documents._OperationType.Read) + request_params.set_excluded_location_from_options(options) result, last_response_headers = self.__Get(path, request_params, headers, **kwargs) self.last_response_headers = last_response_headers if response_hook: @@ -2816,6 +2823,7 @@ def DeleteResource( options) # Delete will use WriteEndpoint since it uses DELETE operation request_params = RequestObject(typ, documents._OperationType.Delete) + request_params.set_excluded_location_from_options(options) result, last_response_headers = self.__Delete(path, request_params, headers, **kwargs) self.last_response_headers = last_response_headers @@ -3052,6 +3060,7 @@ def __GetBodiesFromQueryResult(result: Dict[str, Any]) -> List[Dict[str, Any]]: resource_type, documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed ) + request_params.set_excluded_location_from_options(options) headers = base.GetHeaders( self, initial_headers, @@ -3090,6 +3099,7 @@ def __GetBodiesFromQueryResult(result: Dict[str, Any]) -> List[Dict[str, Any]]: # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) request_params = RequestObject(resource_type, documents._OperationType.SqlQuery) + request_params.set_excluded_location_from_options(options) req_headers = base.GetHeaders( self, initial_headers, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py index e167871dd4a5..944b684e392b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py @@ -50,10 +50,8 @@ def __init__(self, client): self.DefaultEndpoint = client.url_connection self.refresh_time_interval_in_ms = self.get_refresh_time_interval_in_ms_stub() self.location_cache = LocationCache( - self.PreferredLocations, self.DefaultEndpoint, - self.EnableEndpointDiscovery, - client.connection_policy.UseMultipleWriteLocations + client.connection_policy ) self.refresh_needed = False self.refresh_lock = threading.RLock() diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_location_cache.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_location_cache.py index 96651d5c8b7f..02b293e29b4b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_location_cache.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_location_cache.py @@ -25,12 +25,13 @@ import collections import logging import time -from typing import Set +from typing import Set, Mapping, List from urllib.parse import urlparse from . import documents from . import http_constants from .documents import _OperationType +from ._request_object import RequestObject # pylint: disable=protected-access @@ -113,7 +114,10 @@ def get_endpoints_by_location(new_locations, except Exception as e: raise e - return endpoints_by_location, parsed_locations + # Also store a hash map of endpoints for each location + locations_by_endpoints = {value.get_primary(): key for key, value in endpoints_by_location.items()} + + return endpoints_by_location, locations_by_endpoints, parsed_locations def add_endpoint_if_preferred(endpoint: str, preferred_endpoints: Set[str], endpoints: Set[str]) -> bool: if endpoint in preferred_endpoints: @@ -150,6 +154,21 @@ def _get_health_check_endpoints( return endpoints +def _get_applicable_regional_endpoints(endpoints: List[RegionalRoutingContext], + location_name_by_endpoint: Mapping[str, str], + fall_back_endpoint: RegionalRoutingContext, + exclude_location_list: List[str]) -> List[RegionalRoutingContext]: + # filter endpoints by excluded locations + applicable_endpoints = [] + for endpoint in endpoints: + if location_name_by_endpoint.get(endpoint.get_primary()) not in exclude_location_list: + applicable_endpoints.append(endpoint) + + # if endpoint is empty add fallback endpoint + if not applicable_endpoints: + applicable_endpoints.append(fall_back_endpoint) + + return applicable_endpoints class LocationCache(object): # pylint: disable=too-many-public-methods,too-many-instance-attributes def current_time_millis(self): @@ -157,15 +176,10 @@ def current_time_millis(self): def __init__( self, - preferred_locations, default_endpoint, - enable_endpoint_discovery, - use_multiple_write_locations, + connection_policy, ): - self.preferred_locations = preferred_locations self.default_regional_routing_context = RegionalRoutingContext(default_endpoint, default_endpoint) - self.enable_endpoint_discovery = enable_endpoint_discovery - self.use_multiple_write_locations = use_multiple_write_locations self.enable_multiple_writable_locations = False self.write_regional_routing_contexts = [self.default_regional_routing_context] self.read_regional_routing_contexts = [self.default_regional_routing_context] @@ -173,8 +187,11 @@ def __init__( self.last_cache_update_time_stamp = 0 self.account_read_regional_routing_contexts_by_location = {} # pylint: disable=name-too-long self.account_write_regional_routing_contexts_by_location = {} # pylint: disable=name-too-long + self.account_locations_by_read_regional_routing_context = {} # pylint: disable=name-too-long + self.account_locations_by_write_regional_routing_context = {} # pylint: disable=name-too-long self.account_write_locations = [] self.account_read_locations = [] + self.connection_policy = connection_policy def get_write_regional_routing_contexts(self): return self.write_regional_routing_contexts @@ -207,6 +224,44 @@ def get_ordered_write_locations(self): def get_ordered_read_locations(self): return self.account_read_locations + def _get_configured_excluded_locations(self, request: RequestObject): + # If excluded locations were configured on request, use request level excluded locations. + excluded_locations = request.excluded_locations + if excluded_locations is None: + # If excluded locations were only configured on client(connection_policy), use client level + excluded_locations = self.connection_policy.ExcludedLocations + return excluded_locations + + def _get_applicable_read_regional_endpoints(self, request: RequestObject): + # Get configured excluded locations + excluded_locations = self._get_configured_excluded_locations(request) + + # If excluded locations were configured, return filtered regional endpoints by excluded locations. + if excluded_locations: + return _get_applicable_regional_endpoints( + self.get_read_regional_routing_contexts(), + self.account_locations_by_read_regional_routing_context, + self.get_write_regional_routing_contexts()[0], + excluded_locations) + + # Else, return all regional endpoints + return self.get_read_regional_routing_contexts() + + def _get_applicable_write_regional_endpoints(self, request: RequestObject): + # Get configured excluded locations + excluded_locations = self._get_configured_excluded_locations(request) + + # If excluded locations were configured, return filtered regional endpoints by excluded locations. + if excluded_locations: + return _get_applicable_regional_endpoints( + self.get_write_regional_routing_contexts(), + self.account_locations_by_write_regional_routing_context, + self.default_regional_routing_context, + excluded_locations) + + # Else, return all regional endpoints + return self.get_write_regional_routing_contexts() + def resolve_service_endpoint(self, request): if request.location_endpoint_to_route: return request.location_endpoint_to_route @@ -227,7 +282,7 @@ def resolve_service_endpoint(self, request): # For non-document resource types in case of client can use multiple write locations # or when client cannot use multiple write locations, flip-flop between the # first and the second writable region in DatabaseAccount (for manual failover) - if self.enable_endpoint_discovery and self.account_write_locations: + if self.connection_policy.EnableEndpointDiscovery and self.account_write_locations: location_index = min(location_index % 2, len(self.account_write_locations) - 1) write_location = self.account_write_locations[location_index] if (self.account_write_regional_routing_contexts_by_location @@ -247,9 +302,9 @@ def resolve_service_endpoint(self, request): return self.default_regional_routing_context.get_primary() regional_routing_contexts = ( - self.get_write_regional_routing_contexts() + self._get_applicable_write_regional_endpoints(request) if documents._OperationType.IsWriteOperation(request.operation_type) - else self.get_read_regional_routing_contexts() + else self._get_applicable_read_regional_endpoints(request) ) regional_routing_context = regional_routing_contexts[location_index % len(regional_routing_contexts)] if ( @@ -263,12 +318,14 @@ def resolve_service_endpoint(self, request): return regional_routing_context.get_primary() def should_refresh_endpoints(self): # pylint: disable=too-many-return-statements - most_preferred_location = self.preferred_locations[0] if self.preferred_locations else None + most_preferred_location = self.connection_policy.PreferredLocations[0] \ + if self.connection_policy.PreferredLocations else None # we should schedule refresh in background if we are unable to target the user's most preferredLocation. - if self.enable_endpoint_discovery: + if self.connection_policy.EnableEndpointDiscovery: - should_refresh = self.use_multiple_write_locations and not self.enable_multiple_writable_locations + should_refresh = (self.connection_policy.UseMultipleWriteLocations + and not self.enable_multiple_writable_locations) if (most_preferred_location and most_preferred_location in self.account_read_regional_routing_contexts_by_location): @@ -358,25 +415,27 @@ def update_location_cache(self, write_locations=None, read_locations=None, enabl if enable_multiple_writable_locations: self.enable_multiple_writable_locations = enable_multiple_writable_locations - if self.enable_endpoint_discovery: + if self.connection_policy.EnableEndpointDiscovery: if read_locations: (self.account_read_regional_routing_contexts_by_location, + self.account_locations_by_read_regional_routing_context, self.account_read_locations) = get_endpoints_by_location( read_locations, self.account_read_regional_routing_contexts_by_location, self.default_regional_routing_context, False, - self.use_multiple_write_locations + self.connection_policy.UseMultipleWriteLocations ) if write_locations: (self.account_write_regional_routing_contexts_by_location, + self.account_locations_by_write_regional_routing_context, self.account_write_locations) = get_endpoints_by_location( write_locations, self.account_write_regional_routing_contexts_by_location, self.default_regional_routing_context, True, - self.use_multiple_write_locations + self.connection_policy.UseMultipleWriteLocations ) self.write_regional_routing_contexts = self.get_preferred_regional_routing_contexts( @@ -399,18 +458,18 @@ def get_preferred_regional_routing_contexts( regional_endpoints = [] # if enableEndpointDiscovery is false, we always use the defaultEndpoint that # user passed in during documentClient init - if self.enable_endpoint_discovery and endpoints_by_location: # pylint: disable=too-many-nested-blocks + if self.connection_policy.EnableEndpointDiscovery and endpoints_by_location: # pylint: disable=too-many-nested-blocks if ( self.can_use_multiple_write_locations() or expected_available_operation == EndpointOperationType.ReadType ): unavailable_endpoints = [] - if self.preferred_locations: + if self.connection_policy.PreferredLocations: # When client can not use multiple write locations, preferred locations # list should only be used determining read endpoints order. If client # can use multiple write locations, preferred locations list should be # used for determining both read and write endpoints order. - for location in self.preferred_locations: + for location in self.connection_policy.PreferredLocations: regional_endpoint = endpoints_by_location[location] if location in endpoints_by_location \ else None if regional_endpoint: @@ -436,7 +495,7 @@ def get_preferred_regional_routing_contexts( return regional_endpoints def can_use_multiple_write_locations(self): - return self.use_multiple_write_locations and self.enable_multiple_writable_locations + return self.connection_policy.UseMultipleWriteLocations and self.enable_multiple_writable_locations def can_use_multiple_write_locations_for_request(self, request): # pylint: disable=name-too-long return self.can_use_multiple_write_locations() and ( diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py index a220c6af42c2..94805934ce74 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py @@ -21,7 +21,8 @@ """Represents a request object. """ -from typing import Optional +from typing import Optional, Mapping, Any + class RequestObject(object): def __init__(self, resource_type: str, operation_type: str, endpoint_override: Optional[str] = None) -> None: @@ -33,6 +34,7 @@ def __init__(self, resource_type: str, operation_type: str, endpoint_override: O self.location_index_to_route: Optional[int] = None self.location_endpoint_to_route: Optional[str] = None self.last_routed_location_endpoint_within_region: Optional[str] = None + self.excluded_locations = None def route_to_location_with_preferred_location_flag( # pylint: disable=name-too-long self, @@ -52,3 +54,24 @@ def clear_route_to_location(self) -> None: self.location_index_to_route = None self.use_preferred_locations = None self.location_endpoint_to_route = None + + def _can_set_excluded_location(self, options: Mapping[str, Any]) -> bool: + # If resource types for requests are not one of the followings, excluded locations cannot be set + if self.resource_type.lower() not in ['docs', 'documents', 'partitionkey']: + return False + + # If 'excludedLocations' wasn't in the options, excluded locations cannot be set + if (options is None + or 'excludedLocations' not in options): + return False + + # The 'excludedLocations' cannot be None + if options['excludedLocations'] is None: + raise ValueError("Excluded locations cannot be None. " + "If you want to remove all excluded locations, try passing an empty list.") + + return True + + def set_excluded_location_from_options(self, options: Mapping[str, Any]) -> None: + if self._can_set_excluded_location(options): + self.excluded_locations = options['excludedLocations'] diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py index 0142e215f318..590f43331652 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py @@ -224,6 +224,8 @@ async def create_item( :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. :keyword str session_token: Token for use with Session consistency. :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each @@ -303,6 +305,8 @@ async def read_item( :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item couldn't be retrieved. :returns: A CosmosDict representing the retrieved item. :rtype: ~azure.cosmos.CosmosDict[str, Any] @@ -361,6 +365,8 @@ def read_all_items( :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :returns: An AsyncItemPaged of items (dicts). :rtype: AsyncItemPaged[Dict[str, Any]] """ @@ -441,6 +447,8 @@ def query_items( :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :returns: An AsyncItemPaged of items (dicts). :rtype: AsyncItemPaged[Dict[str, Any]] @@ -537,6 +545,8 @@ def query_items_change_feed( ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An AsyncItemPaged of items (dicts). @@ -575,6 +585,8 @@ def query_items_change_feed( ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An AsyncItemPaged of items (dicts). @@ -601,6 +613,8 @@ def query_items_change_feed( request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. :paramtype priority: Literal["High", "Low"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An AsyncItemPaged of items (dicts). @@ -639,6 +653,8 @@ def query_items_change_feed( ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An AsyncItemPaged of items (dicts). @@ -675,6 +691,8 @@ def query_items_change_feed( # pylint: disable=unused-argument ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An AsyncItemPaged of items (dicts). @@ -748,6 +766,8 @@ async def upsert_item( :keyword bool no_response: Indicates whether service should be instructed to skip sending response payloads. When not specified explicitly here, the default value will be determined from client-level options. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. :returns: A CosmosDict representing the upserted item. The dict will be empty if `no_response` is specified. @@ -830,6 +850,8 @@ async def replace_item( :keyword bool no_response: Indicates whether service should be instructed to skip sending response payloads. When not specified explicitly here, the default value will be determined from client-level options. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The replace operation failed or the item with given id does not exist. :returns: A CosmosDict representing the item after replace went through. The dict will be empty if `no_response` @@ -906,6 +928,8 @@ async def patch_item( :keyword bool no_response: Indicates whether service should be instructed to skip sending response payloads. When not specified explicitly here, the default value will be determined from client-level options. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The patch operations failed or the item with given id does not exist. :returns: A CosmosDict representing the item after the patch operations went through. The dict will be empty if @@ -973,6 +997,8 @@ async def delete_item( :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], None], None] :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The item wasn't deleted successfully. @@ -1223,6 +1249,8 @@ async def delete_all_items_by_partition_key( :keyword str pre_trigger_include: trigger id to be used as pre operation trigger. :keyword str post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword Callable response_hook: A callable invoked with the response metadata. :rtype: None """ @@ -1278,6 +1306,8 @@ async def execute_item_batch( :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword Callable response_hook: A callable invoked with the response metadata. :returns: A CosmosList representing the items after the batch operations went through. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The batch failed to execute. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 49219533a7e6..9008b46bb1c1 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -768,6 +768,7 @@ async def Create( # Create will use WriteEndpoint since it uses POST operation request_params = _request_object.RequestObject(typ, documents._OperationType.Create) + request_params.set_excluded_location_from_options(options) result, last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) self.last_response_headers = last_response_headers @@ -907,6 +908,7 @@ async def Upsert( # Upsert will use WriteEndpoint since it uses POST operation request_params = _request_object.RequestObject(typ, documents._OperationType.Upsert) + request_params.set_excluded_location_from_options(options) result, last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) self.last_response_headers = last_response_headers # update session for write request @@ -1208,6 +1210,7 @@ async def Read( options) # Read will use ReadEndpoint since it uses GET operation request_params = _request_object.RequestObject(typ, documents._OperationType.Read) + request_params.set_excluded_location_from_options(options) result, last_response_headers = await self.__Get(path, request_params, headers, **kwargs) self.last_response_headers = last_response_headers if response_hook: @@ -1466,6 +1469,7 @@ async def PatchItem( documents._OperationType.Patch, options) # Patch will use WriteEndpoint since it uses PUT operation request_params = _request_object.RequestObject(typ, documents._OperationType.Patch) + request_params.set_excluded_location_from_options(options) request_data = {} if options.get("filterPredicate"): request_data["condition"] = options.get("filterPredicate") @@ -1570,6 +1574,7 @@ async def Replace( options) # Replace will use WriteEndpoint since it uses PUT operation request_params = _request_object.RequestObject(typ, documents._OperationType.Replace) + request_params.set_excluded_location_from_options(options) result, last_response_headers = await self.__Put(path, request_params, resource, headers, **kwargs) self.last_response_headers = last_response_headers @@ -1893,6 +1898,7 @@ async def DeleteResource( options) # Delete will use WriteEndpoint since it uses DELETE operation request_params = _request_object.RequestObject(typ, documents._OperationType.Delete) + request_params.set_excluded_location_from_options(options) result, last_response_headers = await self.__Delete(path, request_params, headers, **kwargs) self.last_response_headers = last_response_headers @@ -2006,6 +2012,7 @@ async def _Batch( headers = base.GetHeaders(self, initial_headers, "post", path, collection_id, "docs", documents._OperationType.Batch, options) request_params = _request_object.RequestObject("docs", documents._OperationType.Batch) + request_params.set_excluded_location_from_options(options) result = await self.__Post(path, request_params, batch_operations, headers, **kwargs) return cast(Tuple[List[Dict[str, Any]], CaseInsensitiveDict], result) @@ -2861,6 +2868,7 @@ def __GetBodiesFromQueryResult(result: Dict[str, Any]) -> List[Dict[str, Any]]: typ, documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed ) + request_params.set_excluded_location_from_options(options) headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, request_params.operation_type, options, partition_key_range_id) @@ -2890,6 +2898,7 @@ def __GetBodiesFromQueryResult(result: Dict[str, Any]) -> List[Dict[str, Any]]: # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) + request_params.set_excluded_location_from_options(options) req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, request_params.operation_type, options, partition_key_range_id) @@ -3259,6 +3268,7 @@ async def DeleteAllItemsByPartitionKey( headers = base.GetHeaders(self, initial_headers, "post", path, collection_id, "partitionkey", documents._OperationType.Delete, options) request_params = _request_object.RequestObject("partitionkey", documents._OperationType.Delete) + request_params.set_excluded_location_from_options(options) _, last_response_headers = await self.__Post(path=path, request_params=request_params, req_headers=headers, body=None, **kwargs) self.last_response_headers = last_response_headers diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 4d00a7ef5629..f576e97d8e0b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -25,7 +25,7 @@ import asyncio # pylint: disable=do-not-import-asyncio import logging -from asyncio import CancelledError +from asyncio import CancelledError # pylint: disable=do-not-import-asyncio from typing import Tuple from azure.core.exceptions import AzureError @@ -53,10 +53,8 @@ def __init__(self, client): self.DefaultEndpoint = client.url_connection self.refresh_time_interval_in_ms = self.get_refresh_time_interval_in_ms_stub() self.location_cache = LocationCache( - self.PreferredLocations, self.DefaultEndpoint, - self.EnableEndpointDiscovery, - client.connection_policy.UseMultipleWriteLocations + client.connection_policy ) self.startup = True self.refresh_task = None diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index efa5e7c09a50..a815c9110471 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -233,6 +233,8 @@ def read_item( # pylint:disable=docstring-missing-param :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :returns: A CosmosDict representing the item to be retrieved. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item couldn't be retrieved. :rtype: ~azure.cosmos.CosmosDict[str, Any] @@ -298,6 +300,8 @@ def read_all_items( # pylint:disable=docstring-missing-param :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :returns: An Iterable of items (dicts). :rtype: Iterable[Dict[str, Any]] """ @@ -364,6 +368,8 @@ def query_items_change_feed( ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An Iterable of items (dicts). @@ -403,6 +409,8 @@ def query_items_change_feed( ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An Iterable of items (dicts). @@ -429,6 +437,8 @@ def query_items_change_feed( request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. :paramtype priority: Literal["High", "Low"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An Iterable of items (dicts). @@ -466,6 +476,8 @@ def query_items_change_feed( ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :returns: An Iterable of items (dicts). @@ -501,6 +513,8 @@ def query_items_change_feed( ALL_VERSIONS_AND_DELETES: Query all versions and deleted items from either `start_time='Now'` or 'continuation' token. :paramtype mode: Literal["LatestVersion", "AllVersionsAndDeletes"] + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], Dict[str, Any]], None] :param Any args: args @@ -601,6 +615,8 @@ def query_items( # pylint:disable=docstring-missing-param :keyword bool populate_index_metrics: Used to obtain the index metrics to understand how the query engine used existing indexes and how it could use potential new indexes. Please note that this options will incur overhead, so it should be enabled only when debugging slow queries. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :returns: An Iterable of items (dicts). :rtype: ItemPaged[Dict[str, Any]] @@ -716,6 +732,8 @@ def replace_item( # pylint:disable=docstring-missing-param :keyword bool no_response: Indicates whether service should be instructed to skip sending response payloads. When not specified explicitly here, the default value will be determined from kwargs or when also not specified there from client-level kwargs. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The replace operation failed or the item with given id does not exist. :returns: A CosmosDict representing the item after replace went through. The dict will be empty if `no_response` @@ -790,6 +808,8 @@ def upsert_item( # pylint:disable=docstring-missing-param :keyword bool no_response: Indicates whether service should be instructed to skip sending response payloads. When not specified explicitly here, the default value will be determined from kwargs or when also not specified there from client-level kwargs. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. :returns: A CosmosDict representing the upserted item. The dict will be empty if `no_response` is specified. :rtype: ~azure.cosmos.CosmosDict[str, Any] @@ -879,6 +899,8 @@ def create_item( # pylint:disable=docstring-missing-param :keyword bool no_response: Indicates whether service should be instructed to skip sending response payloads. When not specified explicitly here, the default value will be determined from kwargs or when also not specified there from client-level kwargs. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. :returns: A CosmosDict representing the new item. The dict will be empty if `no_response` is specified. :rtype: ~azure.cosmos.CosmosDict[str, Any] @@ -970,6 +992,8 @@ def patch_item( :keyword bool no_response: Indicates whether service should be instructed to skip sending response payloads. When not specified explicitly here, the default value will be determined from kwargs or when also not specified there from client-level kwargs. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The patch operations failed or the item with given id does not exist. :returns: A CosmosDict representing the item after the patch operations went through. The dict will be empty @@ -1030,6 +1054,8 @@ def execute_item_batch( :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: [Callable[[Mapping[str, str], List[Dict[str, Any]]], None] :returns: A CosmosList representing the items after the batch operations went through. @@ -1102,6 +1128,8 @@ def delete_item( # pylint:disable=docstring-missing-param :keyword Literal["High", "Low"] priority: Priority based execution allows users to set a priority for each request. Once the user has reached their provisioned throughput, low priority requests are throttled before high priority requests start getting throttled. Feature must first be enabled at the account level. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], None], None] :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The item wasn't deleted successfully. @@ -1377,6 +1405,8 @@ def delete_all_items_by_partition_key( :keyword str pre_trigger_include: trigger id to be used as pre operation trigger. :keyword str post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. + :keyword list[str] excluded_locations: Excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword response_hook: A callable invoked with the response metadata. :paramtype response_hook: Callable[[Mapping[str, str], None], None] = None, :rtype: None diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 10543f97c47b..b7a6ea94bd2b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -93,6 +93,8 @@ def _build_connection_policy(kwargs: Dict[str, Any]) -> ConnectionPolicy: policy.ProxyConfiguration = kwargs.pop('proxy_config', policy.ProxyConfiguration) policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', policy.EnableEndpointDiscovery) policy.PreferredLocations = kwargs.pop('preferred_locations', policy.PreferredLocations) + # TODO: Consider storing callback method instead, such as 'Supplier' in JAVA SDK + policy.ExcludedLocations = kwargs.pop('excluded_locations', policy.ExcludedLocations) policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', policy.UseMultipleWriteLocations) # SSL config @@ -181,6 +183,8 @@ class CosmosClient: # pylint: disable=client-accepts-api-version-keyword :keyword bool enable_endpoint_discovery: Enable endpoint discovery for geo-replicated database accounts. (Default: True) :keyword list[str] preferred_locations: The preferred locations for geo-replicated database accounts. + :keyword list[str] excluded_locations: The excluded locations to be skipped from preferred locations. The locations + in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US' and so on. :keyword bool enable_diagnostics_logging: Enable the CosmosHttpLogging policy. Must be used along with a logger to work. :keyword ~logging.Logger logger: Logger to be used for collecting request diagnostics. Can be passed in at client diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index 40fbed24451f..9e04829be52f 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -308,6 +308,13 @@ class ConnectionPolicy: # pylint: disable=too-many-instance-attributes locations in this list are specified as the names of the azure Cosmos locations like, 'West US', 'East US', 'Central India' and so on. :vartype PreferredLocations: List[str] + :ivar ExcludedLocations: + Gets or sets the excluded locations for geo-replicated database + accounts. When ExcludedLocations is non-empty, the client will skip this + set of locations from the final location evaluation. The locations in + this list are specified as the names of the azure Cosmos locations like, + 'West US', 'East US', 'Central India' and so on. + :vartype ExcludedLocations: ~CosmosExcludedLocations :ivar RetryOptions: Gets or sets the retry options to be applied to all requests when retrying. @@ -347,6 +354,7 @@ def __init__(self) -> None: self.ProxyConfiguration: Optional[ProxyConfiguration] = None self.EnableEndpointDiscovery: bool = True self.PreferredLocations: List[str] = [] + self.ExcludedLocations: List[str] = [] self.RetryOptions: RetryOptions = RetryOptions() self.DisableSSLVerification: bool = False self.UseMultipleWriteLocations: bool = False diff --git a/sdk/cosmos/azure-cosmos/samples/excluded_locations.py b/sdk/cosmos/azure-cosmos/samples/excluded_locations.py new file mode 100644 index 000000000000..06228c1a8cea --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/excluded_locations.py @@ -0,0 +1,110 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from azure.cosmos import CosmosClient +from azure.cosmos.partition_key import PartitionKey +import config + +# ---------------------------------------------------------------------------------------------------------- +# Prerequisites - +# +# 1. An Azure Cosmos account - +# https://learn.microsoft.com/azure/cosmos-db/create-sql-api-python#create-a-database-account +# +# 2. Microsoft Azure Cosmos +# pip install azure-cosmos>=4.3.0b4 +# ---------------------------------------------------------------------------------------------------------- +# Sample - demonstrates how to use excluded locations in client level and request level +# ---------------------------------------------------------------------------------------------------------- +# Note: +# This sample creates a Container to your database account. +# Each time a Container is created the account will be billed for 1 hour of usage based on +# the provisioned throughput (RU/s) of that account. +# ---------------------------------------------------------------------------------------------------------- + +HOST = config.settings["host"] +MASTER_KEY = config.settings["master_key"] + +TENANT_ID = config.settings["tenant_id"] +CLIENT_ID = config.settings["client_id"] +CLIENT_SECRET = config.settings["client_secret"] + +DATABASE_ID = config.settings["database_id"] +CONTAINER_ID = config.settings["container_id"] +PARTITION_KEY = PartitionKey(path="/id") + + +def get_test_item(num): + test_item = { + 'id': 'Item_' + str(num), + 'test_object': True, + 'lastName': 'Smith' + } + return test_item + +def clean_up_db(client): + try: + client.delete_database(DATABASE_ID) + except Exception as e: + pass + +def excluded_locations_client_level_sample(): + preferred_locations = ['West US 3', 'West US', 'East US 2'] + excluded_locations = ['West US 3', 'West US'] + client = CosmosClient( + HOST, + MASTER_KEY, + preferred_locations=preferred_locations, + excluded_locations=excluded_locations + ) + clean_up_db(client) + + db = client.create_database(DATABASE_ID) + container = db.create_container(id=CONTAINER_ID, partition_key=PARTITION_KEY) + + # For write operations with single master account, write endpoint will be the default endpoint, + # since preferred_locations or excluded_locations are ignored and used + container.create_item(get_test_item(0)) + + # For read operations, read endpoints will be 'preferred_locations' - 'excluded_locations'. + # In our sample, ['West US 3', 'West US', 'East US 2'] - ['West US 3', 'West US'] => ['East US 2'], + # therefore 'East US 2' will be the read endpoint, and items will be read from 'East US 2' location + item = container.read_item(item='Item_0', partition_key='Item_0') + + clean_up_db(client) + +def excluded_locations_request_level_sample(): + preferred_locations = ['West US 3', 'West US', 'East US 2'] + excluded_locations_on_client = ['West US 3', 'West US'] + excluded_locations_on_request = ['West US 3'] + client = CosmosClient( + HOST, + MASTER_KEY, + preferred_locations=preferred_locations, + excluded_locations=excluded_locations_on_client + ) + clean_up_db(client) + + db = client.create_database(DATABASE_ID) + container = db.create_container(id=CONTAINER_ID, partition_key=PARTITION_KEY) + + # For write operations with single master account, write endpoint will be the default endpoint, + # since preferred_locations or excluded_locations are ignored and used + container.create_item(get_test_item(0)) + + # For read operations, read endpoints will be 'preferred_locations' - 'excluded_locations'. + # However, in our sample, since the excluded_locations` were passed with the read request, the `excluded_location` + # will be replaced with the locations from request, ['West US 3']. The `excluded_locations` on request always takes + # the highest priority! + # With the excluded_locations on request, the read endpoints will be ['West US', 'East US 2'] + # ['West US 3', 'West US', 'East US 2'] - ['West US 3'] => ['West US', 'East US 2'] + # Therefore, items will be read from 'West US' or 'East US 2' location + item = container.read_item(item='Item_0', partition_key='Item_0', excluded_locations=excluded_locations_on_request) + + clean_up_db(client) + +if __name__ == "__main__": + # excluded_locations_client_level_sample() + excluded_locations_request_level_sample() diff --git a/sdk/cosmos/azure-cosmos/tests/test_health_check.py b/sdk/cosmos/azure-cosmos/tests/test_health_check.py index 0d313e6c911c..75db9deacc41 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_health_check.py +++ b/sdk/cosmos/azure-cosmos/tests/test_health_check.py @@ -126,14 +126,14 @@ def test_health_check_timeouts_on_unavailable_endpoints(self, setup): locational_endpoint = _location_cache.LocationCache.GetLocationalEndpoint(TestHealthCheck.host, REGION_1) setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.mark_endpoint_unavailable_for_read( locational_endpoint, True) - self.original_preferred_locations = setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations - setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations = REGIONS + self.original_preferred_locations = setup[COLLECTION].client_connection.connection_policy.PreferredLocations + setup[COLLECTION].client_connection.connection_policy.PreferredLocations = REGIONS try: setup[COLLECTION].create_item(body={'id': 'item' + str(uuid.uuid4()), 'pk': 'pk'}) finally: _global_endpoint_manager._GlobalEndpointManager._GetDatabaseAccountStub = self.original_getDatabaseAccountStub _cosmos_client_connection.CosmosClientConnection._GetDatabaseAccountCheck = self.original_getDatabaseAccountCheck - setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations = self.original_preferred_locations + setup[COLLECTION].client_connection.connection_policy.PreferredLocations = self.original_preferred_locations class MockGetDatabaseAccountCheck(object): def __init__(self, client_connection=None, endpoint_unavailable=False): diff --git a/sdk/cosmos/azure-cosmos/tests/test_health_check_async.py b/sdk/cosmos/azure-cosmos/tests/test_health_check_async.py index ae2bf13fd8a7..a92eca0dd778 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_health_check_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_health_check_async.py @@ -153,8 +153,8 @@ async def test_health_check_success(self, setup, preferred_location, use_write_g # checks the background health check works as expected when all endpoints healthy self.original_getDatabaseAccountStub = _global_endpoint_manager_async._GlobalEndpointManager._GetDatabaseAccountStub self.original_getDatabaseAccountCheck = _cosmos_client_connection_async.CosmosClientConnection._GetDatabaseAccountCheck - self.original_preferred_locations = setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations - setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations = preferred_location + self.original_preferred_locations = setup[COLLECTION].client_connection.connection_policy.PreferredLocations + setup[COLLECTION].client_connection.connection_policy.PreferredLocations = preferred_location mock_get_database_account_check = self.MockGetDatabaseAccountCheck() _global_endpoint_manager_async._GlobalEndpointManager._GetDatabaseAccountStub = ( self.MockGetDatabaseAccount(REGIONS, use_write_global_endpoint, use_read_global_endpoint)) @@ -168,7 +168,7 @@ async def test_health_check_success(self, setup, preferred_location, use_write_g finally: _global_endpoint_manager_async._GlobalEndpointManager._GetDatabaseAccountStub = self.original_getDatabaseAccountStub _cosmos_client_connection_async.CosmosClientConnection._GetDatabaseAccountCheck = self.original_getDatabaseAccountCheck - setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations = self.original_preferred_locations + setup[COLLECTION].client_connection.connection_policy.PreferredLocations = self.original_preferred_locations expected_regional_routing_contexts = [] locational_endpoint = _location_cache.LocationCache.GetLocationalEndpoint(self.host, REGION_1) @@ -189,8 +189,8 @@ async def test_health_check_failure(self, setup, preferred_location, use_write_g self.original_getDatabaseAccountStub = _global_endpoint_manager_async._GlobalEndpointManager._GetDatabaseAccountStub _global_endpoint_manager_async._GlobalEndpointManager._GetDatabaseAccountStub = ( self.MockGetDatabaseAccount(REGIONS, use_write_global_endpoint, use_read_global_endpoint)) - self.original_preferred_locations = setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations - setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations = preferred_location + self.original_preferred_locations = setup[COLLECTION].client_connection.connection_policy.PreferredLocations + setup[COLLECTION].client_connection.connection_policy.PreferredLocations = preferred_location try: setup[COLLECTION].client_connection._global_endpoint_manager.startup = False @@ -201,7 +201,7 @@ async def test_health_check_failure(self, setup, preferred_location, use_write_g await asyncio.sleep(1) finally: _global_endpoint_manager_async._GlobalEndpointManager._GetDatabaseAccountStub = self.original_getDatabaseAccountStub - setup[COLLECTION].client_connection._global_endpoint_manager.location_cache.preferred_locations = self.original_preferred_locations + setup[COLLECTION].client_connection.connection_policy.PreferredLocations = self.original_preferred_locations if not use_write_global_endpoint: num_unavailable_endpoints = len(REGIONS) diff --git a/sdk/cosmos/azure-cosmos/tests/test_location_cache.py b/sdk/cosmos/azure-cosmos/tests/test_location_cache.py index a957094f1790..f65a1f1a3d21 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_location_cache.py +++ b/sdk/cosmos/azure-cosmos/tests/test_location_cache.py @@ -3,8 +3,10 @@ import time import unittest +from typing import Mapping, Any import pytest +from azure.cosmos import documents from azure.cosmos.documents import DatabaseAccount, _OperationType from azure.cosmos.http_constants import ResourceType @@ -35,15 +37,15 @@ def create_database_account(enable_multiple_writable_locations): return db_acc -def refresh_location_cache(preferred_locations, use_multiple_write_locations): - lc = LocationCache(preferred_locations=preferred_locations, - default_endpoint=default_endpoint, - enable_endpoint_discovery=True, - use_multiple_write_locations=use_multiple_write_locations) +def refresh_location_cache(preferred_locations, use_multiple_write_locations, connection_policy=documents.ConnectionPolicy()): + connection_policy.PreferredLocations = preferred_locations + connection_policy.UseMultipleWriteLocations = use_multiple_write_locations + lc = LocationCache(default_endpoint=default_endpoint, + connection_policy=connection_policy) return lc @pytest.mark.cosmosEmulator -class TestLocationCache(unittest.TestCase): +class TestLocationCache: def test_mark_endpoint_unavailable(self): lc = refresh_location_cache([], False) @@ -136,6 +138,140 @@ def test_resolve_request_endpoint_preferred_regions(self): assert read_resolved == write_resolved assert read_resolved == default_endpoint + @pytest.mark.parametrize("test_type",["OnClient", "OnRequest", "OnBoth"]) + def test_get_applicable_regional_endpoints_excluded_regions(self, test_type): + # Init test data + if test_type == "OnClient": + excluded_locations_on_client_list = [ + [location1_name], + [location1_name, location2_name], + [location1_name, location2_name, location3_name], + [location4_name], + [], + ] + excluded_locations_on_requests_list = [None] * 5 + elif test_type == "OnRequest": + excluded_locations_on_client_list = [[]] * 5 + excluded_locations_on_requests_list = [ + [location1_name], + [location1_name, location2_name], + [location1_name, location2_name, location3_name], + [location4_name], + [], + ] + else: + excluded_locations_on_client_list = [ + [location1_name], + [location1_name, location2_name, location3_name], + [location1_name, location2_name], + [location2_name], + [location1_name, location2_name, location3_name], + ] + excluded_locations_on_requests_list = [ + [location1_name], + [location1_name, location2_name], + [location1_name, location2_name, location3_name], + [location4_name], + [], + ] + + expected_read_endpoints_list = [ + [location2_endpoint], + [location1_endpoint], + [location1_endpoint], + [location1_endpoint, location2_endpoint], + [location1_endpoint, location2_endpoint], + ] + expected_write_endpoints_list = [ + [location2_endpoint, location3_endpoint], + [location3_endpoint], + [default_endpoint], + [location1_endpoint, location2_endpoint, location3_endpoint], + [location1_endpoint, location2_endpoint, location3_endpoint], + ] + + # Loop over each test cases + for excluded_locations_on_client, excluded_locations_on_requests, expected_read_endpoints, expected_write_endpoints in zip(excluded_locations_on_client_list, excluded_locations_on_requests_list, expected_read_endpoints_list, expected_write_endpoints_list): + # Init excluded_locations in ConnectionPolicy + connection_policy = documents.ConnectionPolicy() + connection_policy.ExcludedLocations = excluded_locations_on_client + + # Init location_cache + location_cache = refresh_location_cache([location1_name, location2_name, location3_name], True, + connection_policy) + database_account = create_database_account(True) + location_cache.perform_on_database_account_read(database_account) + + # Init requests and set excluded regions on requests + write_doc_request = RequestObject(ResourceType.Document, _OperationType.Create) + write_doc_request.excluded_locations = excluded_locations_on_requests + read_doc_request = RequestObject(ResourceType.Document, _OperationType.Read) + read_doc_request.excluded_locations = excluded_locations_on_requests + + # Test if read endpoints were correctly filtered on client level + read_doc_endpoint = location_cache._get_applicable_read_regional_endpoints(read_doc_request) + read_doc_endpoint = [regional_endpoint.get_primary() for regional_endpoint in read_doc_endpoint] + assert read_doc_endpoint == expected_read_endpoints + + # Test if write endpoints were correctly filtered on client level + write_doc_endpoint = location_cache._get_applicable_write_regional_endpoints(write_doc_request) + write_doc_endpoint = [regional_endpoint.get_primary() for regional_endpoint in write_doc_endpoint] + assert write_doc_endpoint == expected_write_endpoints + + def test_set_excluded_locations_for_requests(self): + # Init excluded_locations in ConnectionPolicy + excluded_locations_on_client = [location1_name, location2_name] + connection_policy = documents.ConnectionPolicy() + connection_policy.ExcludedLocations = excluded_locations_on_client + + # Init location_cache + location_cache = refresh_location_cache([location1_name, location2_name, location3_name], True, + connection_policy) + database_account = create_database_account(True) + location_cache.perform_on_database_account_read(database_account) + + # Test setting excluded locations + excluded_locations = [location1_name] + options: Mapping[str, Any] = {"excludedLocations": excluded_locations} + + expected_excluded_locations = excluded_locations + read_doc_request = RequestObject(ResourceType.Document, _OperationType.Create) + read_doc_request.set_excluded_location_from_options(options) + actual_excluded_locations = read_doc_request.excluded_locations + assert actual_excluded_locations == expected_excluded_locations + + expected_read_endpoints = [location2_endpoint] + read_doc_endpoint = location_cache._get_applicable_read_regional_endpoints(read_doc_request) + read_doc_endpoint = [regional_endpoint.get_primary() for regional_endpoint in read_doc_endpoint] + assert read_doc_endpoint == expected_read_endpoints + + + # Test setting excluded locations with invalid resource types + expected_excluded_locations = None + for resource_type in [ResourceType.Offer, ResourceType.Conflict]: + options: Mapping[str, Any] = {"excludedLocations": [location1_name]} + read_doc_request = RequestObject(resource_type, _OperationType.Create) + read_doc_request.set_excluded_location_from_options(options) + actual_excluded_locations = read_doc_request.excluded_locations + assert actual_excluded_locations == expected_excluded_locations + + expected_read_endpoints = [location1_endpoint] + read_doc_endpoint = location_cache._get_applicable_read_regional_endpoints(read_doc_request) + read_doc_endpoint = [regional_endpoint.get_primary() for regional_endpoint in read_doc_endpoint] + assert read_doc_endpoint == expected_read_endpoints + + + + # Test setting excluded locations with None value + expected_error_message = ("Excluded locations cannot be None. " + "If you want to remove all excluded locations, try passing an empty list.") + with pytest.raises(ValueError) as e: + options: Mapping[str, Any] = {"excludedLocations": None} + doc_request = RequestObject(ResourceType.Document, _OperationType.Create) + doc_request.set_excluded_location_from_options(options) + assert str( + e.value) == expected_error_message + if __name__ == "__main__": unittest.main() diff --git a/sdk/cosmos/azure-cosmos/tests/test_retry_policy_async.py b/sdk/cosmos/azure-cosmos/tests/test_retry_policy_async.py index be1683d1504d..4faef31c9495 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_retry_policy_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_retry_policy_async.py @@ -42,6 +42,7 @@ def __init__(self) -> None: self.ProxyConfiguration: Optional[ProxyConfiguration] = None self.EnableEndpointDiscovery: bool = True self.PreferredLocations: List[str] = [] + self.ExcludedLocations = None self.RetryOptions: RetryOptions = RetryOptions() self.DisableSSLVerification: bool = False self.UseMultipleWriteLocations: bool = False From 5bb9f1fb166a5a9803af8287eac54b506da3c8b5 Mon Sep 17 00:00:00 2001 From: Kushagra Thapar Date: Thu, 3 Apr 2025 14:47:32 -0700 Subject: [PATCH 02/23] Added multi-region tests --- sdk/cosmos/live-platform-matrix.json | 37 ++++++++++++++++++++++++++++ sdk/cosmos/test-resources.bicep | 6 +++++ 2 files changed, 43 insertions(+) diff --git a/sdk/cosmos/live-platform-matrix.json b/sdk/cosmos/live-platform-matrix.json index 485a15ca92e8..bca59256d05d 100644 --- a/sdk/cosmos/live-platform-matrix.json +++ b/sdk/cosmos/live-platform-matrix.json @@ -88,6 +88,43 @@ "TestMarkArgument": "cosmosLong" } } + }, + { + "WindowsConfig": { + "Windows2022_38_multi_region": { + "OSVmImage": "env:WINDOWSVMIMAGE", + "Pool": "env:WINDOWSPOOL", + "PythonVersion": "3.8", + "CoverageArg": "--disablecov", + "TestSamples": "false", + "TestMarkArgument": "cosmosMultiRegion", + "ArmConfig": { + "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" + } + }, + "Windows2022_310_multi_region": { + "OSVmImage": "env:WINDOWSVMIMAGE", + "Pool": "env:WINDOWSPOOL", + "PythonVersion": "3.10", + "CoverageArg": "--disablecov", + "TestSamples": "false", + "TestMarkArgument": "cosmosMultiRegion", + "ArmConfig": { + "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" + } + }, + "Windows2022_312_multi_region": { + "OSVmImage": "env:WINDOWSVMIMAGE", + "Pool": "env:WINDOWSPOOL", + "PythonVersion": "3.12", + "CoverageArg": "--disablecov", + "TestSamples": "false", + "TestMarkArgument": "cosmosMultiRegion", + "ArmConfig": { + "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" + } + } + } } ] } diff --git a/sdk/cosmos/test-resources.bicep b/sdk/cosmos/test-resources.bicep index 17d88b0be92a..61588a526eed 100644 --- a/sdk/cosmos/test-resources.bicep +++ b/sdk/cosmos/test-resources.bicep @@ -41,6 +41,12 @@ var multiRegionConfiguration = [ failoverPriority: 1 isZoneRedundant: false } + { + locationName: 'West US 2' + provisioningState: 'Succeeded' + failoverPriority: 2 + isZoneRedundant: false + } ] var locationsConfiguration = (enableMultipleRegions ? multiRegionConfiguration : singleRegionConfiguration) var roleDefinitionId = guid(baseName, 'roleDefinitionId') From 996217ae3fec5740cf0bc3eb180d2ba6af725953 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Thu, 3 Apr 2025 15:08:49 -0700 Subject: [PATCH 03/23] Fix _AddParitionKey to pass options to sub methods --- .../azure/cosmos/_cosmos_client_connection.py | 10 +++++++--- .../azure-cosmos/azure/cosmos/_request_object.py | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index d64da38defb1..acc9ac0010af 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -3265,7 +3265,7 @@ def _AddPartitionKey( options: Mapping[str, Any] ) -> Dict[str, Any]: collection_link = base.TrimBeginningAndEndingSlashes(collection_link) - partitionKeyDefinition = self._get_partition_key_definition(collection_link) + partitionKeyDefinition = self._get_partition_key_definition(collection_link, options) new_options = dict(options) # If the collection doesn't have a partition key definition, skip it as it's a legacy collection if partitionKeyDefinition: @@ -3367,7 +3367,11 @@ def _UpdateSessionIfRequired( # update session self.session.update_session(response_result, response_headers) - def _get_partition_key_definition(self, collection_link: str) -> Optional[Dict[str, Any]]: + def _get_partition_key_definition( + self, + collection_link: str, + options: Mapping[str, Any] + ) -> Optional[Dict[str, Any]]: partition_key_definition: Optional[Dict[str, Any]] # If the document collection link is present in the cache, then use the cached partitionkey definition if collection_link in self.__container_properties_cache: @@ -3375,7 +3379,7 @@ def _get_partition_key_definition(self, collection_link: str) -> Optional[Dict[s partition_key_definition = cached_container.get("partitionKey") # Else read the collection from backend and add it to the cache else: - container = self.ReadContainer(collection_link) + container = self.ReadContainer(collection_link, options) partition_key_definition = container.get("partitionKey") self.__container_properties_cache[collection_link] = _set_properties_cache(container) return partition_key_definition diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py index 94805934ce74..185aa1d89cb8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py @@ -57,7 +57,7 @@ def clear_route_to_location(self) -> None: def _can_set_excluded_location(self, options: Mapping[str, Any]) -> bool: # If resource types for requests are not one of the followings, excluded locations cannot be set - if self.resource_type.lower() not in ['docs', 'documents', 'partitionkey']: + if self.resource_type.lower() not in ['docs', 'documents', 'partitionkey', 'colls']: return False # If 'excludedLocations' wasn't in the options, excluded locations cannot be set From 41fc9176bec2687e41bc80e7f5254763754c0930 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Thu, 3 Apr 2025 15:10:39 -0700 Subject: [PATCH 04/23] Added initial live tests --- .../tests/test_excluded_locations.py | 478 ++++++++++++++++++ 1 file changed, 478 insertions(+) create mode 100644 sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py new file mode 100644 index 000000000000..01d1e9e9cf7e --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -0,0 +1,478 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. + +import logging +import unittest +import uuid +import test_config +import pytest + +import azure.cosmos.cosmos_client as cosmos_client +from azure.cosmos.partition_key import PartitionKey +from azure.cosmos.exceptions import CosmosResourceNotFoundError + + +class MockHandler(logging.Handler): + def __init__(self): + super(MockHandler, self).__init__() + self.messages = [] + + def reset(self): + self.messages = [] + + def emit(self, record): + self.messages.append(record.msg) + +MOCK_HANDLER = MockHandler() +CONFIG = test_config.TestConfig() +HOST = CONFIG.host +KEY = CONFIG.masterKey +DATABASE_ID = CONFIG.TEST_DATABASE_ID +CONTAINER_ID = CONFIG.TEST_SINGLE_PARTITION_CONTAINER_ID +PARTITION_KEY = CONFIG.TEST_CONTAINER_PARTITION_KEY +ITEM_ID = 'doc1' +ITEM_PK_VALUE = 'pk' +TEST_ITEM = {'id': ITEM_ID, PARTITION_KEY: ITEM_PK_VALUE} + +# L0 = "Default" +# L1 = "West US 3" +# L2 = "West US" +# L3 = "East US 2" +# L4 = "Central US" + +L0 = "Default" +L1 = "East US 2" +L2 = "East US" +L3 = "West US 2" +L4 = "Central US" + +CLIENT_ONLY_TEST_DATA = [ + # preferred_locations, client_excluded_locations, excluded_locations_request + # 0. No excluded location + [[L1, L2, L3], [], None], + # 1. Single excluded location + [[L1, L2, L3], [L1], None], + # 2. Multiple excluded locations + [[L1, L2, L3], [L1, L2], None], + # 3. Exclude all locations + [[L1, L2, L3], [L1, L2, L3], None], + # 4. Exclude a location not in preferred locations + [[L1, L2, L3], [L4], None], +] + +CLIENT_AND_REQUEST_TEST_DATA = [ + # preferred_locations, client_excluded_locations, excluded_locations_request + # 0. No client excluded locations + a request excluded location + [[L1, L2, L3], [], [L1]], + # 1. The same client and request excluded location + [[L1, L2, L3], [L1], [L1]], + # 2. Less request excluded locations + [[L1, L2, L3], [L1, L2], [L1]], + # 3. More request excluded locations + [[L1, L2, L3], [L1], [L1, L2]], + # 4. All locations were excluded + [[L1, L2, L3], [L1, L2, L3], [L1, L2, L3]], + # 5. No common excluded locations + [[L1, L2, L3], [L1], [L2, L3]], + # 6. Reqeust excluded location not in preferred locations + [[L1, L2, L3], [L1, L2, L3], [L4]], + # 7. Empty excluded locations, remove all client excluded locations + [[L1, L2, L3], [L1, L2], []], +] + +ALL_INPUT_TEST_DATA = CLIENT_ONLY_TEST_DATA + CLIENT_AND_REQUEST_TEST_DATA +# ALL_INPUT_TEST_DATA = CLIENT_ONLY_TEST_DATA +# ALL_INPUT_TEST_DATA = CLIENT_AND_REQUEST_TEST_DATA + +def read_item_test_data(): + client_only_output_data = [ + [L1], # 0 + [L2], # 1 + [L3], # 2 + [L1], # 3 + [L1] # 4 + ] + client_and_request_output_data = [ + [L2], # 0 + [L2], # 1 + [L2], # 2 + [L3], # 3 + [L1], # 4 + [L1], # 5 + [L1], # 6 + [L1], # 7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +def query_items_change_feed_test_data(): + client_only_output_data = [ + [L1, L1, L1], #0 + [L2, L2, L2], #1 + [L3, L3, L3], #2 + [L1, L1, L1], #3 + [L1, L1, L1] #4 + ] + client_and_request_output_data = [ + [L1, L1, L2], #0 + [L2, L2, L2], #1 + [L3, L3, L2], #2 + [L2, L2, L3], #3 + [L1, L1, L1], #4 + [L2, L2, L1], #5 + [L1, L1, L1], #6 + [L3, L3, L1], #7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +def replace_item_test_data(): + client_only_output_data = [ + [L1, L1], #0 + [L2, L2], #1 + [L3, L3], #2 + [L1, L0], #3 + [L1, L1] #4 + ] + client_and_request_output_data = [ + [L2, L2], #0 + [L2, L2], #1 + [L2, L2], #2 + [L3, L3], #3 + [L1, L0], #4 + [L1, L1], #5 + [L1, L1], #6 + [L1, L1], #7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +def patch_item_test_data(): + client_only_output_data = [ + [L1], #0 + [L2], #1 + [L3], #2 + [L0], #3 + [L1] #4 + ] + client_and_request_output_data = [ + [L2], #0 + [L2], #1 + [L2], #2 + [L3], #3 + [L0], #4 + [L1], #5 + [L1], #6 + [L1], #7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +@pytest.fixture(scope="class", autouse=True) +def setup_and_teardown(): + print("Setup: This runs before any tests") + logger = logging.getLogger("azure") + logger.addHandler(MOCK_HANDLER) + logger.setLevel(logging.DEBUG) + + container = cosmos_client.CosmosClient(HOST, KEY).get_database_client(DATABASE_ID).get_container_client(CONTAINER_ID) + container.create_item(body=TEST_ITEM) + + yield + # Code to run after tests + print("Teardown: This runs after all tests") + +@pytest.mark.cosmosMultiRegion +class TestExcludedLocations: + def _init_container(self, preferred_locations, client_excluded_locations, multiple_write_locations = True): + client = cosmos_client.CosmosClient(HOST, KEY, + preferred_locations=preferred_locations, + excluded_locations=client_excluded_locations, + multiple_write_locations=multiple_write_locations) + db = client.get_database_client(DATABASE_ID) + container = db.get_container_client(CONTAINER_ID) + MOCK_HANDLER.reset() + + return client, db, container + + def _verify_endpoint(self, client, expected_locations): + # get mapping for locations + location_mapping = (client.client_connection._global_endpoint_manager. + location_cache.account_locations_by_write_regional_routing_context) + default_endpoint = (client.client_connection._global_endpoint_manager. + location_cache.default_regional_routing_context.get_primary()) + + # get Request URL + msgs = MOCK_HANDLER.messages + req_urls = [url.replace("Request URL: '", "") for url in msgs if 'Request URL:' in url] + + # get location + actual_locations = [] + for req_url in req_urls: + if req_url.startswith(default_endpoint): + actual_locations.append(L0) + else: + for endpoint in location_mapping: + if req_url.startswith(endpoint): + location = location_mapping[endpoint] + actual_locations.append(location) + break + + assert actual_locations == expected_locations + + @pytest.mark.parametrize('test_data', read_item_test_data()) + def test_read_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + # Client setup + client, db, container = self._init_container(preferred_locations, client_excluded_locations) + + # API call: read_item + if request_excluded_locations is None: + container.read_item(ITEM_ID, ITEM_PK_VALUE) + else: + container.read_item(ITEM_ID, ITEM_PK_VALUE, excluded_locations=request_excluded_locations) + + # Verify endpoint locations + self._verify_endpoint(client, expected_locations) + + @pytest.mark.parametrize('test_data', read_item_test_data()) + def test_read_all_items(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + # Client setup + client, db, container = self._init_container(preferred_locations, client_excluded_locations) + + # API call: read_all_items + if request_excluded_locations is None: + list(container.read_all_items()) + else: + list(container.read_all_items(excluded_locations=request_excluded_locations)) + + # Verify endpoint locations + self._verify_endpoint(client, expected_locations) + + @pytest.mark.parametrize('test_data', read_item_test_data()) + def test_query_items(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + # Client setup and create an item + client, db, container = self._init_container(preferred_locations, client_excluded_locations) + + # API call: query_items + if request_excluded_locations is None: + list(container.query_items(None)) + else: + list(container.query_items(None, excluded_locations=request_excluded_locations)) + + # Verify endpoint locations + self._verify_endpoint(client, expected_locations) + + @pytest.mark.parametrize('test_data', query_items_change_feed_test_data()) + def test_query_items_change_feed(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + + # Client setup and create an item + client, db, container = self._init_container(preferred_locations, client_excluded_locations) + + # API call: query_items_change_feed + if request_excluded_locations is None: + list(container.query_items_change_feed()) + else: + list(container.query_items_change_feed(excluded_locations=request_excluded_locations)) + + # Verify endpoint locations + self._verify_endpoint(client, expected_locations) + + + @pytest.mark.parametrize('test_data', replace_item_test_data()) + def test_replace_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + # API call: replace_item + if request_excluded_locations is None: + container.replace_item(ITEM_ID, body=TEST_ITEM) + else: + container.replace_item(ITEM_ID, body=TEST_ITEM, excluded_locations=request_excluded_locations) + + # Verify endpoint locations + if multiple_write_locations: + self._verify_endpoint(client, expected_locations) + else: + self._verify_endpoint(client, [expected_locations[0], L1]) + + @pytest.mark.parametrize('test_data', replace_item_test_data()) + def test_upsert_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + # API call: upsert_item + body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} + if request_excluded_locations is None: + container.upsert_item(body=body) + else: + container.upsert_item(body=body, excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + self._verify_endpoint(client, expected_locations) + else: + self._verify_endpoint(client, [expected_locations[0], L1]) + + @pytest.mark.parametrize('test_data', replace_item_test_data()) + def test_create_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + # API call: create_item + body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} + if request_excluded_locations is None: + container.create_item(body=body) + else: + container.create_item(body=body, excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + self._verify_endpoint(client, expected_locations) + else: + self._verify_endpoint(client, [expected_locations[0], L1]) + + @pytest.mark.parametrize('test_data', patch_item_test_data()) + def test_patch_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = self._init_container(preferred_locations, client_excluded_locations, + multiple_write_locations) + + # API call: patch_item + operations = [ + {"op": "add", "path": "/test_data", "value": f'Data-{str(uuid.uuid4())}'}, + ] + if request_excluded_locations is None: + container.patch_item(item=ITEM_ID, partition_key=ITEM_PK_VALUE, + patch_operations=operations) + else: + container.patch_item(item=ITEM_ID, partition_key=ITEM_PK_VALUE, + patch_operations=operations, + excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + self._verify_endpoint(client, expected_locations) + else: + self._verify_endpoint(client, [L1]) + + @pytest.mark.parametrize('test_data', patch_item_test_data()) + def test_execute_item_batch(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = self._init_container(preferred_locations, client_excluded_locations, + multiple_write_locations) + + # API call: execute_item_batch + batch_operations = [] + for i in range(3): + batch_operations.append(("create", ({"id": f'Doc-{str(uuid.uuid4())}', PARTITION_KEY: ITEM_PK_VALUE},))) + + if request_excluded_locations is None: + container.execute_item_batch(batch_operations=batch_operations, + partition_key=ITEM_PK_VALUE,) + else: + container.execute_item_batch(batch_operations=batch_operations, + partition_key=ITEM_PK_VALUE, + excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + self._verify_endpoint(client, expected_locations) + else: + self._verify_endpoint(client, [L1]) + + @pytest.mark.parametrize('test_data', patch_item_test_data()) + def test_delete_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup + client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + #create before delete + item_id = f'doc2-{str(uuid.uuid4())}' + container.create_item(body={PARTITION_KEY: ITEM_PK_VALUE, 'id': item_id}) + MOCK_HANDLER.reset() + + # API call: read_item + if request_excluded_locations is None: + container.delete_item(item_id, ITEM_PK_VALUE) + else: + container.delete_item(item_id, ITEM_PK_VALUE, excluded_locations=request_excluded_locations) + + # Verify endpoint locations + if multiple_write_locations: + self._verify_endpoint(client, expected_locations) + else: + self._verify_endpoint(client, [L1]) + + # TODO: enable this test once we figure out how to enable delete_all_items_by_partition_key feature + # @pytest.mark.parametrize('test_data', patch_item_test_data()) + # def test_delete_all_items_by_partition_key(self, test_data): + # # Init test variables + # preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + # + # for multiple_write_locations in [True, False]: + # # Client setup + # client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + # + # #create before delete + # item_id = f'doc2-{str(uuid.uuid4())}' + # pk_value = f'temp_partition_key_value-{str(uuid.uuid4())}' + # container.create_item(body={PARTITION_KEY: pk_value, 'id': item_id}) + # MOCK_HANDLER.reset() + # + # # API call: read_item + # if request_excluded_locations is None: + # container.delete_all_items_by_partition_key(pk_value) + # else: + # container.delete_all_items_by_partition_key(pk_value, excluded_locations=request_excluded_locations) + # + # # Verify endpoint locations + # if multiple_write_locations: + # self._verify_endpoint(client, expected_locations) + # else: + # self._verify_endpoint(client, [L1]) + +if __name__ == "__main__": + unittest.main() From 07b8f39aeaba9a68e7656777bb0732655a20bf4a Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Thu, 3 Apr 2025 15:28:38 -0700 Subject: [PATCH 05/23] Updated live-platform-matrix for multi-region tests --- sdk/cosmos/live-platform-matrix.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/sdk/cosmos/live-platform-matrix.json b/sdk/cosmos/live-platform-matrix.json index bca59256d05d..b3242623be78 100644 --- a/sdk/cosmos/live-platform-matrix.json +++ b/sdk/cosmos/live-platform-matrix.json @@ -86,11 +86,7 @@ "CoverageArg": "--disablecov", "TestSamples": "false", "TestMarkArgument": "cosmosLong" - } - } - }, - { - "WindowsConfig": { + }, "Windows2022_38_multi_region": { "OSVmImage": "env:WINDOWSVMIMAGE", "Pool": "env:WINDOWSPOOL", @@ -110,7 +106,9 @@ "TestSamples": "false", "TestMarkArgument": "cosmosMultiRegion", "ArmConfig": { - "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" + "MultiMaster_MultiRegion": { + "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" + } } }, "Windows2022_312_multi_region": { @@ -121,7 +119,9 @@ "TestSamples": "false", "TestMarkArgument": "cosmosMultiRegion", "ArmConfig": { - "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" + "MultiMaster_MultiRegion": { + "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" + } } } } From 8495c5139742060f74301dd0441c8e5b4fff787a Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Fri, 4 Apr 2025 10:34:42 -0700 Subject: [PATCH 06/23] Add cosmosQuery mark to TestQuery --- sdk/cosmos/azure-cosmos/tests/test_query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_query.py b/sdk/cosmos/azure-cosmos/tests/test_query.py index 28262aa0f7e3..2a99263ed457 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_query.py +++ b/sdk/cosmos/azure-cosmos/tests/test_query.py @@ -17,7 +17,7 @@ from azure.cosmos.documents import _DistinctType from azure.cosmos.partition_key import PartitionKey - +@pytest.mark.cosmosQuery class TestQuery(unittest.TestCase): """Test to ensure escaping of non-ascii characters from partition key""" From b29980c0ed0feda7b3fb04adb5d4560bd813ccd8 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Fri, 4 Apr 2025 10:35:05 -0700 Subject: [PATCH 07/23] Correct spelling --- sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 01d1e9e9cf7e..7a93e6e89ec7 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -74,7 +74,7 @@ def emit(self, record): [[L1, L2, L3], [L1, L2, L3], [L1, L2, L3]], # 5. No common excluded locations [[L1, L2, L3], [L1], [L2, L3]], - # 6. Reqeust excluded location not in preferred locations + # 6. Request excluded location not in preferred locations [[L1, L2, L3], [L1, L2, L3], [L4]], # 7. Empty excluded locations, remove all client excluded locations [[L1, L2, L3], [L1, L2], []], From 5e79172c2da25361e80a098222b712542b1b19ea Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Fri, 4 Apr 2025 10:35:37 -0700 Subject: [PATCH 08/23] Fixed live platform matrix syntax --- sdk/cosmos/live-platform-matrix.json | 44 ++++++++-------------------- 1 file changed, 13 insertions(+), 31 deletions(-) diff --git a/sdk/cosmos/live-platform-matrix.json b/sdk/cosmos/live-platform-matrix.json index b3242623be78..494c5fc62cea 100644 --- a/sdk/cosmos/live-platform-matrix.json +++ b/sdk/cosmos/live-platform-matrix.json @@ -86,43 +86,25 @@ "CoverageArg": "--disablecov", "TestSamples": "false", "TestMarkArgument": "cosmosLong" - }, + } + } + }, + { + "DESIRED_CONSISTENCIES": "[\"Session\"]", + "ACCOUNT_CONSISTENCY": "Session", + "ArmConfig": { + "MultiMaster_MultiRegion": { + "ArmTemplateParameters": "@{ enableMultipleWriteLocations = $true; defaultConsistencyLevel = 'Session'; enableMultipleRegions = $true }" + } + }, + "WindowsConfig": { "Windows2022_38_multi_region": { "OSVmImage": "env:WINDOWSVMIMAGE", "Pool": "env:WINDOWSPOOL", "PythonVersion": "3.8", "CoverageArg": "--disablecov", "TestSamples": "false", - "TestMarkArgument": "cosmosMultiRegion", - "ArmConfig": { - "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" - } - }, - "Windows2022_310_multi_region": { - "OSVmImage": "env:WINDOWSVMIMAGE", - "Pool": "env:WINDOWSPOOL", - "PythonVersion": "3.10", - "CoverageArg": "--disablecov", - "TestSamples": "false", - "TestMarkArgument": "cosmosMultiRegion", - "ArmConfig": { - "MultiMaster_MultiRegion": { - "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" - } - } - }, - "Windows2022_312_multi_region": { - "OSVmImage": "env:WINDOWSVMIMAGE", - "Pool": "env:WINDOWSPOOL", - "PythonVersion": "3.12", - "CoverageArg": "--disablecov", - "TestSamples": "false", - "TestMarkArgument": "cosmosMultiRegion", - "ArmConfig": { - "MultiMaster_MultiRegion": { - "ArmTemplateParameters": "@{ enableMultipleRegions = $true }" - } - } + "TestMarkArgument": "cosmosMultiRegion" } } } From fd40cd724873ad9ab46520a18304a5a900fde7b1 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Fri, 4 Apr 2025 11:42:08 -0700 Subject: [PATCH 09/23] Changed Multi-regions --- .../tests/test_excluded_locations.py | 18 +++++++++--------- sdk/cosmos/live-platform-matrix.json | 6 ++---- sdk/cosmos/test-resources.bicep | 6 +++--- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 7a93e6e89ec7..2d17bad85ba5 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -34,18 +34,18 @@ def emit(self, record): ITEM_PK_VALUE = 'pk' TEST_ITEM = {'id': ITEM_ID, PARTITION_KEY: ITEM_PK_VALUE} -# L0 = "Default" -# L1 = "West US 3" -# L2 = "West US" -# L3 = "East US 2" -# L4 = "Central US" - L0 = "Default" -L1 = "East US 2" -L2 = "East US" -L3 = "West US 2" +L1 = "West US 3" +L2 = "West US" +L3 = "East US 2" L4 = "Central US" +# L0 = "Default" +# L1 = "East US 2" +# L2 = "East US" +# L3 = "West US 2" +# L4 = "Central US" + CLIENT_ONLY_TEST_DATA = [ # preferred_locations, client_excluded_locations, excluded_locations_request # 0. No excluded location diff --git a/sdk/cosmos/live-platform-matrix.json b/sdk/cosmos/live-platform-matrix.json index 494c5fc62cea..7a02486a8827 100644 --- a/sdk/cosmos/live-platform-matrix.json +++ b/sdk/cosmos/live-platform-matrix.json @@ -90,18 +90,16 @@ } }, { - "DESIRED_CONSISTENCIES": "[\"Session\"]", - "ACCOUNT_CONSISTENCY": "Session", "ArmConfig": { "MultiMaster_MultiRegion": { "ArmTemplateParameters": "@{ enableMultipleWriteLocations = $true; defaultConsistencyLevel = 'Session'; enableMultipleRegions = $true }" } }, "WindowsConfig": { - "Windows2022_38_multi_region": { + "Windows2022_312": { "OSVmImage": "env:WINDOWSVMIMAGE", "Pool": "env:WINDOWSPOOL", - "PythonVersion": "3.8", + "PythonVersion": "3.12", "CoverageArg": "--disablecov", "TestSamples": "false", "TestMarkArgument": "cosmosMultiRegion" diff --git a/sdk/cosmos/test-resources.bicep b/sdk/cosmos/test-resources.bicep index 61588a526eed..b05dead26737 100644 --- a/sdk/cosmos/test-resources.bicep +++ b/sdk/cosmos/test-resources.bicep @@ -30,19 +30,19 @@ var singleRegionConfiguration = [ ] var multiRegionConfiguration = [ { - locationName: 'East US 2' + locationName: 'West US 3' provisioningState: 'Succeeded' failoverPriority: 0 isZoneRedundant: false } { - locationName: 'East US' + locationName: 'West US' provisioningState: 'Succeeded' failoverPriority: 1 isZoneRedundant: false } { - locationName: 'West US 2' + locationName: 'East US 2' provisioningState: 'Succeeded' failoverPriority: 2 isZoneRedundant: false From 29305f46c2109abcceb80bd5e1b6737a124669c5 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Mon, 7 Apr 2025 15:58:01 -0700 Subject: [PATCH 10/23] Added client level ExcludedLocation for async --- sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py index 683f16288cd3..647f6d59f615 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py @@ -84,6 +84,7 @@ def _build_connection_policy(kwargs: Dict[str, Any]) -> ConnectionPolicy: policy.ProxyConfiguration = kwargs.pop('proxy_config', policy.ProxyConfiguration) policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', policy.EnableEndpointDiscovery) policy.PreferredLocations = kwargs.pop('preferred_locations', policy.PreferredLocations) + policy.ExcludedLocations = kwargs.pop('excluded_locations', policy.ExcludedLocations) policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', policy.UseMultipleWriteLocations) # SSL config From c77b4e726b8ab2d7c7f6426c21ef7afb1e7b10e4 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Mon, 7 Apr 2025 16:39:55 -0700 Subject: [PATCH 11/23] Update Live test settings --- sdk/cosmos/live-platform-matrix.json | 10 +++++----- sdk/cosmos/test-resources.bicep | 6 ------ 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/sdk/cosmos/live-platform-matrix.json b/sdk/cosmos/live-platform-matrix.json index 7a02486a8827..dc3ad3c32e17 100644 --- a/sdk/cosmos/live-platform-matrix.json +++ b/sdk/cosmos/live-platform-matrix.json @@ -90,11 +90,6 @@ } }, { - "ArmConfig": { - "MultiMaster_MultiRegion": { - "ArmTemplateParameters": "@{ enableMultipleWriteLocations = $true; defaultConsistencyLevel = 'Session'; enableMultipleRegions = $true }" - } - }, "WindowsConfig": { "Windows2022_312": { "OSVmImage": "env:WINDOWSVMIMAGE", @@ -104,6 +99,11 @@ "TestSamples": "false", "TestMarkArgument": "cosmosMultiRegion" } + }, + "ArmConfig": { + "MultiMaster_MultiRegion": { + "ArmTemplateParameters": "@{ enableMultipleWriteLocations = $true; defaultConsistencyLevel = 'Session'; enableMultipleRegions = $true }" + } } } ] diff --git a/sdk/cosmos/test-resources.bicep b/sdk/cosmos/test-resources.bicep index b05dead26737..88abe955f8d8 100644 --- a/sdk/cosmos/test-resources.bicep +++ b/sdk/cosmos/test-resources.bicep @@ -41,12 +41,6 @@ var multiRegionConfiguration = [ failoverPriority: 1 isZoneRedundant: false } - { - locationName: 'East US 2' - provisioningState: 'Succeeded' - failoverPriority: 2 - isZoneRedundant: false - } ] var locationsConfiguration = (enableMultipleRegions ? multiRegionConfiguration : singleRegionConfiguration) var roleDefinitionId = guid(baseName, 'roleDefinitionId') From d82fa74255e899556f48f8797ff8afbe7ad595bc Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Mon, 7 Apr 2025 16:40:32 -0700 Subject: [PATCH 12/23] Added Async tests --- .../tests/test_excluded_locations.py | 76 ++- .../tests/test_excluded_locations_async.py | 470 ++++++++++++++++++ 2 files changed, 504 insertions(+), 42 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 2d17bad85ba5..13e9ba713653 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -38,46 +38,42 @@ def emit(self, record): L1 = "West US 3" L2 = "West US" L3 = "East US 2" -L4 = "Central US" # L0 = "Default" # L1 = "East US 2" # L2 = "East US" # L3 = "West US 2" -# L4 = "Central US" CLIENT_ONLY_TEST_DATA = [ # preferred_locations, client_excluded_locations, excluded_locations_request # 0. No excluded location - [[L1, L2, L3], [], None], + [[L1, L2], [], None], # 1. Single excluded location - [[L1, L2, L3], [L1], None], - # 2. Multiple excluded locations - [[L1, L2, L3], [L1, L2], None], - # 3. Exclude all locations - [[L1, L2, L3], [L1, L2, L3], None], - # 4. Exclude a location not in preferred locations - [[L1, L2, L3], [L4], None], + [[L1, L2], [L1], None], + # 2. Exclude all locations + [[L1, L2], [L1, L2], None], + # 3. Exclude a location not in preferred locations + [[L1, L2], [L3], None], ] CLIENT_AND_REQUEST_TEST_DATA = [ # preferred_locations, client_excluded_locations, excluded_locations_request # 0. No client excluded locations + a request excluded location - [[L1, L2, L3], [], [L1]], + [[L1, L2], [], [L1]], # 1. The same client and request excluded location - [[L1, L2, L3], [L1], [L1]], + [[L1, L2], [L1], [L1]], # 2. Less request excluded locations - [[L1, L2, L3], [L1, L2], [L1]], + [[L1, L2], [L1, L2], [L1]], # 3. More request excluded locations - [[L1, L2, L3], [L1], [L1, L2]], + [[L1, L2], [L1], [L1, L2]], # 4. All locations were excluded - [[L1, L2, L3], [L1, L2, L3], [L1, L2, L3]], + [[L1, L2], [L1, L2], [L1, L2]], # 5. No common excluded locations - [[L1, L2, L3], [L1], [L2, L3]], + [[L1, L2], [L1], [L2]], # 6. Request excluded location not in preferred locations - [[L1, L2, L3], [L1, L2, L3], [L4]], + [[L1, L2], [L1, L2], [L3]], # 7. Empty excluded locations, remove all client excluded locations - [[L1, L2, L3], [L1, L2], []], + [[L1, L2], [L1, L2], []], ] ALL_INPUT_TEST_DATA = CLIENT_ONLY_TEST_DATA + CLIENT_AND_REQUEST_TEST_DATA @@ -88,15 +84,14 @@ def read_item_test_data(): client_only_output_data = [ [L1], # 0 [L2], # 1 - [L3], # 2 + [L1], # 2 [L1], # 3 - [L1] # 4 ] client_and_request_output_data = [ [L2], # 0 [L2], # 1 [L2], # 2 - [L3], # 3 + [L1], # 3 [L1], # 4 [L1], # 5 [L1], # 6 @@ -109,21 +104,20 @@ def read_item_test_data(): def query_items_change_feed_test_data(): client_only_output_data = [ - [L1, L1, L1], #0 - [L2, L2, L2], #1 - [L3, L3, L3], #2 - [L1, L1, L1], #3 - [L1, L1, L1] #4 + [L1, L1, L1, L1], #0 + [L2, L2, L2, L2], #1 + [L1, L1, L1, L1], #2 + [L1, L1, L1, L1] #3 ] client_and_request_output_data = [ - [L1, L1, L2], #0 - [L2, L2, L2], #1 - [L3, L3, L2], #2 - [L2, L2, L3], #3 - [L1, L1, L1], #4 - [L2, L2, L1], #5 - [L1, L1, L1], #6 - [L3, L3, L1], #7 + [L1, L1, L2, L2], #0 + [L2, L2, L2, L2], #1 + [L1, L1, L2, L2], #2 + [L2, L2, L1, L1], #3 + [L1, L1, L1, L1], #4 + [L2, L2, L1, L1], #5 + [L1, L1, L1, L1], #6 + [L1, L1, L1, L1], #7 ] all_output_test_data = client_only_output_data + client_and_request_output_data @@ -134,15 +128,14 @@ def replace_item_test_data(): client_only_output_data = [ [L1, L1], #0 [L2, L2], #1 - [L3, L3], #2 - [L1, L0], #3 - [L1, L1] #4 + [L1, L0], #2 + [L1, L1] #3 ] client_and_request_output_data = [ [L2, L2], #0 [L2, L2], #1 [L2, L2], #2 - [L3, L3], #3 + [L1, L0], #3 [L1, L0], #4 [L1, L1], #5 [L1, L1], #6 @@ -157,7 +150,6 @@ def patch_item_test_data(): client_only_output_data = [ [L1], #0 [L2], #1 - [L3], #2 [L0], #3 [L1] #4 ] @@ -165,7 +157,7 @@ def patch_item_test_data(): [L2], #0 [L2], #1 [L2], #2 - [L3], #3 + [L0], #3 [L0], #4 [L1], #5 [L1], #6 @@ -290,9 +282,9 @@ def test_query_items_change_feed(self, test_data): # API call: query_items_change_feed if request_excluded_locations is None: - list(container.query_items_change_feed()) + items = list(container.query_items_change_feed(start_time="Beginning")) else: - list(container.query_items_change_feed(excluded_locations=request_excluded_locations)) + items = list(container.query_items_change_feed(start_time="Beginning", excluded_locations=request_excluded_locations)) # Verify endpoint locations self._verify_endpoint(client, expected_locations) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py new file mode 100644 index 000000000000..7564071de4f9 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -0,0 +1,470 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. + +import logging +import unittest +import uuid +import test_config +import pytest +import pytest_asyncio + +from azure.cosmos.aio import CosmosClient +from azure.cosmos.partition_key import PartitionKey + + +class MockHandler(logging.Handler): + def __init__(self): + super(MockHandler, self).__init__() + self.messages = [] + + def reset(self): + self.messages = [] + + def emit(self, record): + self.messages.append(record.msg) + +MOCK_HANDLER = MockHandler() +CONFIG = test_config.TestConfig() +HOST = CONFIG.host +KEY = CONFIG.masterKey +DATABASE_ID = CONFIG.TEST_DATABASE_ID +CONTAINER_ID = CONFIG.TEST_SINGLE_PARTITION_CONTAINER_ID +PARTITION_KEY = CONFIG.TEST_CONTAINER_PARTITION_KEY +ITEM_ID = 'doc1' +ITEM_PK_VALUE = 'pk' +TEST_ITEM = {'id': ITEM_ID, PARTITION_KEY: ITEM_PK_VALUE} + +L0 = "Default" +L1 = "West US 3" +L2 = "West US" +L3 = "East US 2" + +# L0 = "Default" +# L1 = "East US 2" +# L2 = "East US" +# L3 = "West US 2" + +CLIENT_ONLY_TEST_DATA = [ + # preferred_locations, client_excluded_locations, excluded_locations_request + # 0. No excluded location + [[L1, L2], [], None], + # 1. Single excluded location + [[L1, L2], [L1], None], + # 2. Exclude all locations + [[L1, L2], [L1, L2], None], + # 3. Exclude a location not in preferred locations + [[L1, L2], [L3], None], +] + +CLIENT_AND_REQUEST_TEST_DATA = [ + # preferred_locations, client_excluded_locations, excluded_locations_request + # 0. No client excluded locations + a request excluded location + [[L1, L2], [], [L1]], + # 1. The same client and request excluded location + [[L1, L2], [L1], [L1]], + # 2. Less request excluded locations + [[L1, L2], [L1, L2], [L1]], + # 3. More request excluded locations + [[L1, L2], [L1], [L1, L2]], + # 4. All locations were excluded + [[L1, L2], [L1, L2], [L1, L2]], + # 5. No common excluded locations + [[L1, L2], [L1], [L2, L3]], + # 6. Request excluded location not in preferred locations + [[L1, L2], [L1, L2], [L3]], + # 7. Empty excluded locations, remove all client excluded locations + [[L1, L2], [L1, L2], []], +] + +ALL_INPUT_TEST_DATA = CLIENT_ONLY_TEST_DATA + CLIENT_AND_REQUEST_TEST_DATA + +def read_item_test_data(): + client_only_output_data = [ + [L1], # 0 + [L2], # 1 + [L1], # 2 + [L1] # 3 + ] + client_and_request_output_data = [ + [L2], # 0 + [L2], # 1 + [L2], # 2 + [L1], # 3 + [L1], # 4 + [L1], # 5 + [L1], # 6 + [L1], # 7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +def query_items_change_feed_test_data(): + client_only_output_data = [ + [L1, L1, L1], #0 + [L2, L2, L2], #1 + [L1, L1, L1], #2 + [L1, L1, L1] #3 + ] + client_and_request_output_data = [ + [L1, L2, L2], #0 + [L2, L2, L2], #1 + [L1, L2, L2], #2 + [L2, L1, L1], #3 + [L1, L1, L1], #4 + [L2, L1, L1], #5 + [L1, L1, L1], #6 + [L1, L1, L1], #7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +def replace_item_test_data(): + client_only_output_data = [ + [L1], #0 + [L2], #1 + [L0], #2 + [L1] #3 + ] + client_and_request_output_data = [ + [L2], #0 + [L2], #1 + [L2], #2 + [L0], #3 + [L0], #4 + [L1], #5 + [L1], #6 + [L1], #7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +def patch_item_test_data(): + client_only_output_data = [ + [L1], #0 + [L2], #1 + [L0], #2 + [L1] #3 + ] + client_and_request_output_data = [ + [L2], #0 + [L2], #1 + [L2], #2 + [L0], #3 + [L0], #4 + [L1], #5 + [L1], #6 + [L1], #7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + +@pytest_asyncio.fixture(scope="class", autouse=True) +async def setup_and_teardown(): + print("Setup: This runs before any tests") + logger = logging.getLogger("azure") + logger.addHandler(MOCK_HANDLER) + logger.setLevel(logging.DEBUG) + + test_client = CosmosClient(HOST, KEY) + container = test_client.get_database_client(DATABASE_ID).get_container_client(CONTAINER_ID) + await container.create_item(body=TEST_ITEM) + + yield + await test_client.close() + +@pytest.mark.cosmosMultiRegion +@pytest.mark.asyncio +@pytest.mark.usefixtures("setup_and_teardown") +class TestExcludedLocations: + async def _init_container(self, preferred_locations, client_excluded_locations, multiple_write_locations = True): + client = CosmosClient(HOST, KEY, + preferred_locations=preferred_locations, + excluded_locations=client_excluded_locations, + multiple_write_locations=multiple_write_locations) + db = await client.create_database_if_not_exists(DATABASE_ID) + container = await db.create_container_if_not_exists(CONTAINER_ID, PartitionKey(path='/' + PARTITION_KEY, kind='Hash')) + MOCK_HANDLER.reset() + + return client, db, container + + async def _verify_endpoint(self, client, expected_locations): + # get mapping for locations + location_mapping = (client.client_connection._global_endpoint_manager. + location_cache.account_locations_by_write_regional_routing_context) + default_endpoint = (client.client_connection._global_endpoint_manager. + location_cache.default_regional_routing_context.get_primary()) + + # get Request URL + msgs = MOCK_HANDLER.messages + req_urls = [url.replace("Request URL: '", "") for url in msgs if 'Request URL:' in url] + + # get location + actual_locations = [] + for req_url in req_urls: + if req_url.startswith(default_endpoint): + actual_locations.append(L0) + else: + for endpoint in location_mapping: + if req_url.startswith(endpoint): + location = location_mapping[endpoint] + actual_locations.append(location) + break + + assert actual_locations == expected_locations + + @pytest.mark.parametrize('test_data', read_item_test_data()) + async def test_read_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + # Client setup + client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + + # API call: read_item + if request_excluded_locations is None: + await container.read_item(ITEM_ID, ITEM_PK_VALUE) + else: + await container.read_item(ITEM_ID, ITEM_PK_VALUE, excluded_locations=request_excluded_locations) + + # Verify endpoint locations + await self._verify_endpoint(client, expected_locations) + + @pytest.mark.parametrize('test_data', read_item_test_data()) + async def test_read_all_items(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + # Client setup + client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + + # API call: read_all_items + if request_excluded_locations is None: + all_items = [item async for item in container.read_all_items()] + else: + all_items = [item async for item in container.read_all_items(excluded_locations=request_excluded_locations)] + + # Verify endpoint locations + await self._verify_endpoint(client, expected_locations) + + @pytest.mark.parametrize('test_data', read_item_test_data()) + async def test_query_items(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + # Client setup and create an item + client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + + # API call: query_items + if request_excluded_locations is None: + all_items = [item async for item in container.query_items(None)] + else: + all_items = [item async for item in container.query_items(None, excluded_locations=request_excluded_locations)] + + # Verify endpoint locations + await self._verify_endpoint(client, expected_locations) + + @pytest.mark.parametrize('test_data', query_items_change_feed_test_data()) + async def test_query_items_change_feed(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + + # Client setup and create an item + client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + + # API call: query_items_change_feed + if request_excluded_locations is None: + all_items = [item async for item in container.query_items_change_feed(start_time="Beginning")] + else: + all_items = [item async for item in container.query_items_change_feed(start_time="Beginning", excluded_locations=request_excluded_locations)] + + # Verify endpoint locations + await self._verify_endpoint(client, expected_locations) + + + @pytest.mark.parametrize('test_data', replace_item_test_data()) + async def test_replace_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + # API call: replace_item + if request_excluded_locations is None: + await container.replace_item(ITEM_ID, body=TEST_ITEM) + else: + await container.replace_item(ITEM_ID, body=TEST_ITEM, excluded_locations=request_excluded_locations) + + # Verify endpoint locations + if multiple_write_locations: + await self._verify_endpoint(client, expected_locations) + else: + await self._verify_endpoint(client, [L1]) + + @pytest.mark.parametrize('test_data', replace_item_test_data()) + async def test_upsert_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + # API call: upsert_item + body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} + if request_excluded_locations is None: + await container.upsert_item(body=body) + else: + await container.upsert_item(body=body, excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + await self._verify_endpoint(client, expected_locations) + else: + await self._verify_endpoint(client, [L1]) + + @pytest.mark.parametrize('test_data', replace_item_test_data()) + async def test_create_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + # API call: create_item + body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} + if request_excluded_locations is None: + await container.create_item(body=body) + else: + await container.create_item(body=body, excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + await self._verify_endpoint(client, expected_locations) + else: + await self._verify_endpoint(client, [L1]) + + @pytest.mark.parametrize('test_data', patch_item_test_data()) + async def test_patch_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = await self._init_container(preferred_locations, client_excluded_locations, + multiple_write_locations) + + # API call: patch_item + operations = [ + {"op": "add", "path": "/test_data", "value": f'Data-{str(uuid.uuid4())}'}, + ] + if request_excluded_locations is None: + await container.patch_item(item=ITEM_ID, partition_key=ITEM_PK_VALUE, + patch_operations=operations) + else: + await container.patch_item(item=ITEM_ID, partition_key=ITEM_PK_VALUE, + patch_operations=operations, + excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + await self._verify_endpoint(client, expected_locations) + else: + await self._verify_endpoint(client, [L1]) + + @pytest.mark.parametrize('test_data', patch_item_test_data()) + async def test_execute_item_batch(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup and create an item + client, db, container = await self._init_container(preferred_locations, client_excluded_locations, + multiple_write_locations) + + # API call: execute_item_batch + batch_operations = [] + for i in range(3): + batch_operations.append(("create", ({"id": f'Doc-{str(uuid.uuid4())}', PARTITION_KEY: ITEM_PK_VALUE},))) + + if request_excluded_locations is None: + await container.execute_item_batch(batch_operations=batch_operations, + partition_key=ITEM_PK_VALUE,) + else: + await container.execute_item_batch(batch_operations=batch_operations, + partition_key=ITEM_PK_VALUE, + excluded_locations=request_excluded_locations) + + # get location from mock_handler + if multiple_write_locations: + await self._verify_endpoint(client, expected_locations) + else: + await self._verify_endpoint(client, [L1]) + + @pytest.mark.parametrize('test_data', patch_item_test_data()) + async def test_delete_item(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup + client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + + #create before delete + item_id = f'doc2-{str(uuid.uuid4())}' + await container.create_item(body={PARTITION_KEY: ITEM_PK_VALUE, 'id': item_id}) + MOCK_HANDLER.reset() + + # API call: read_item + if request_excluded_locations is None: + await container.delete_item(item_id, ITEM_PK_VALUE) + else: + await container.delete_item(item_id, ITEM_PK_VALUE, excluded_locations=request_excluded_locations) + + # Verify endpoint locations + if multiple_write_locations: + await self._verify_endpoint(client, expected_locations) + else: + await self._verify_endpoint(client, [L1]) + + # TODO: enable this test once we figure out how to enable delete_all_items_by_partition_key feature + # @pytest.mark.parametrize('test_data', patch_item_test_data()) + # def test_delete_all_items_by_partition_key(self, test_data): + # # Init test variables + # preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + # + # for multiple_write_locations in [True, False]: + # # Client setup + # client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + # + # #create before delete + # item_id = f'doc2-{str(uuid.uuid4())}' + # pk_value = f'temp_partition_key_value-{str(uuid.uuid4())}' + # container.create_item(body={PARTITION_KEY: pk_value, 'id': item_id}) + # MOCK_HANDLER.reset() + # + # # API call: read_item + # if request_excluded_locations is None: + # container.delete_all_items_by_partition_key(pk_value) + # else: + # container.delete_all_items_by_partition_key(pk_value, excluded_locations=request_excluded_locations) + # + # # Verify endpoint locations + # if multiple_write_locations: + # self._verify_endpoint(client, expected_locations) + # else: + # self._verify_endpoint(client, [L1]) + +if __name__ == "__main__": + unittest.main() From 56108892418867fb21d6c7dad05c6ca0a2fbf982 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Mon, 7 Apr 2025 16:55:49 -0700 Subject: [PATCH 13/23] Add more live tests for all other Python versions --- sdk/cosmos/live-platform-matrix.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/sdk/cosmos/live-platform-matrix.json b/sdk/cosmos/live-platform-matrix.json index dc3ad3c32e17..6763c1c06562 100644 --- a/sdk/cosmos/live-platform-matrix.json +++ b/sdk/cosmos/live-platform-matrix.json @@ -91,6 +91,22 @@ }, { "WindowsConfig": { + "Windows2022_38": { + "OSVmImage": "env:WINDOWSVMIMAGE", + "Pool": "env:WINDOWSPOOL", + "PythonVersion": "3.8", + "CoverageArg": "--disablecov", + "TestSamples": "false", + "TestMarkArgument": "cosmosMultiRegion" + }, + "Windows2022_310": { + "OSVmImage": "env:WINDOWSVMIMAGE", + "Pool": "env:WINDOWSPOOL", + "PythonVersion": "3.10", + "CoverageArg": "--disablecov", + "TestSamples": "false", + "TestMarkArgument": "cosmosMultiRegion" + }, "Windows2022_312": { "OSVmImage": "env:WINDOWSVMIMAGE", "Pool": "env:WINDOWSPOOL", From f4cb8b3ba9c1507793af77547281741e221b7af1 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Mon, 7 Apr 2025 17:07:29 -0700 Subject: [PATCH 14/23] Fix Async test failure --- sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py | 2 +- sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 13e9ba713653..2159c6c97425 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -176,7 +176,7 @@ def setup_and_teardown(): logger.setLevel(logging.DEBUG) container = cosmos_client.CosmosClient(HOST, KEY).get_database_client(DATABASE_ID).get_container_client(CONTAINER_ID) - container.create_item(body=TEST_ITEM) + container.upsert_item(body=TEST_ITEM) yield # Code to run after tests diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index 7564071de4f9..b0079e753039 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -175,7 +175,7 @@ async def setup_and_teardown(): test_client = CosmosClient(HOST, KEY) container = test_client.get_database_client(DATABASE_ID).get_container_client(CONTAINER_ID) - await container.create_item(body=TEST_ITEM) + await container.upsert_item(body=TEST_ITEM) yield await test_client.close() From 4f081681f2cd87a103bf65edd4b61a12283cfcb7 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Tue, 8 Apr 2025 10:23:28 -0700 Subject: [PATCH 15/23] Fix live test failures --- .../tests/test_excluded_locations.py | 18 +++-- .../tests/test_excluded_locations_async.py | 66 ++++++++++--------- 2 files changed, 46 insertions(+), 38 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 2159c6c97425..9af367303107 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -168,6 +168,12 @@ def patch_item_test_data(): all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] return all_test_data +def _create_item_with_excluded_locations(container, body, excluded_locations): + if excluded_locations is None: + container.create_item(body=body) + else: + container.create_item(body=body, excluded_locations=excluded_locations) + @pytest.fixture(scope="class", autouse=True) def setup_and_teardown(): print("Setup: This runs before any tests") @@ -344,10 +350,7 @@ def test_create_item(self, test_data): # API call: create_item body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} - if request_excluded_locations is None: - container.create_item(body=body) - else: - container.create_item(body=body, excluded_locations=request_excluded_locations) + _create_item_with_excluded_locations(container, body, request_excluded_locations) # get location from mock_handler if multiple_write_locations: @@ -421,12 +424,13 @@ def test_delete_item(self, test_data): # Client setup client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) - #create before delete + # create before delete item_id = f'doc2-{str(uuid.uuid4())}' - container.create_item(body={PARTITION_KEY: ITEM_PK_VALUE, 'id': item_id}) + body = {PARTITION_KEY: ITEM_PK_VALUE, 'id': item_id} + _create_item_with_excluded_locations(container, body, request_excluded_locations) MOCK_HANDLER.reset() - # API call: read_item + # API call: delete_item if request_excluded_locations is None: container.delete_item(item_id, ITEM_PK_VALUE) else: diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index b0079e753039..dd6ce3776f68 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -80,20 +80,20 @@ def emit(self, record): def read_item_test_data(): client_only_output_data = [ - [L1], # 0 - [L2], # 1 - [L1], # 2 - [L1] # 3 + [L1, L1], # 0 + [L2, L2], # 1 + [L1, L1], # 2 + [L1, L1], # 3 ] client_and_request_output_data = [ - [L2], # 0 - [L2], # 1 - [L2], # 2 - [L1], # 3 - [L1], # 4 - [L1], # 5 - [L1], # 6 - [L1], # 7 + [L2, L2], # 0 + [L2, L2], # 1 + [L2, L2], # 2 + [L1, L1], # 3 + [L1, L1], # 4 + [L1, L1], # 5 + [L1, L1], # 6 + [L1, L1], # 7 ] all_output_test_data = client_only_output_data + client_and_request_output_data @@ -102,20 +102,20 @@ def read_item_test_data(): def query_items_change_feed_test_data(): client_only_output_data = [ - [L1, L1, L1], #0 - [L2, L2, L2], #1 - [L1, L1, L1], #2 - [L1, L1, L1] #3 + [L1, L1, L1, L1], #0 + [L2, L2, L2, L2], #1 + [L1, L1, L1, L1], #2 + [L1, L1, L1, L1] #3 ] client_and_request_output_data = [ - [L1, L2, L2], #0 - [L2, L2, L2], #1 - [L1, L2, L2], #2 - [L2, L1, L1], #3 - [L1, L1, L1], #4 - [L2, L1, L1], #5 - [L1, L1, L1], #6 - [L1, L1, L1], #7 + [L1, L2, L2, L2], #0 + [L2, L2, L2, L2], #1 + [L1, L2, L2, L2], #2 + [L2, L1, L1, L1], #3 + [L1, L1, L1, L1], #4 + [L2, L1, L1, L1], #5 + [L1, L1, L1, L1], #6 + [L1, L1, L1, L1], #7 ] all_output_test_data = client_only_output_data + client_and_request_output_data @@ -166,6 +166,12 @@ def patch_item_test_data(): all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] return all_test_data +async def _create_item_with_excluded_locations(container, body, excluded_locations): + if excluded_locations is None: + await container.create_item(body=body) + else: + await container.create_item(body=body, excluded_locations=excluded_locations) + @pytest_asyncio.fixture(scope="class", autouse=True) async def setup_and_teardown(): print("Setup: This runs before any tests") @@ -344,10 +350,7 @@ async def test_create_item(self, test_data): # API call: create_item body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} - if request_excluded_locations is None: - await container.create_item(body=body) - else: - await container.create_item(body=body, excluded_locations=request_excluded_locations) + await _create_item_with_excluded_locations(container, body, request_excluded_locations) # get location from mock_handler if multiple_write_locations: @@ -421,12 +424,13 @@ async def test_delete_item(self, test_data): # Client setup client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) - #create before delete + # create before delete item_id = f'doc2-{str(uuid.uuid4())}' - await container.create_item(body={PARTITION_KEY: ITEM_PK_VALUE, 'id': item_id}) + body = {PARTITION_KEY: ITEM_PK_VALUE, 'id': item_id} + await _create_item_with_excluded_locations(container, body, request_excluded_locations) MOCK_HANDLER.reset() - # API call: read_item + # API call: delete_item if request_excluded_locations is None: await container.delete_item(item_id, ITEM_PK_VALUE) else: From 4e2fd6b691478cd75efaf2d62f65b82f5cc23416 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Tue, 8 Apr 2025 10:46:37 -0700 Subject: [PATCH 16/23] Fix live test failures --- .../tests/test_excluded_locations_async.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index dd6ce3776f68..4a39b6a78c2c 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -80,20 +80,20 @@ def emit(self, record): def read_item_test_data(): client_only_output_data = [ - [L1, L1], # 0 - [L2, L2], # 1 - [L1, L1], # 2 - [L1, L1], # 3 + [L1], # 0 + [L2], # 1 + [L1], # 2 + [L1], # 3 ] client_and_request_output_data = [ - [L2, L2], # 0 - [L2, L2], # 1 - [L2, L2], # 2 - [L1, L1], # 3 - [L1, L1], # 4 - [L1, L1], # 5 - [L1, L1], # 6 - [L1, L1], # 7 + [L2], # 0 + [L2], # 1 + [L2], # 2 + [L1], # 3 + [L1], # 4 + [L1], # 5 + [L1], # 6 + [L1], # 7 ] all_output_test_data = client_only_output_data + client_and_request_output_data From e0dab2977be9f6519644aa15ef5569ddd14ad83c Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Tue, 8 Apr 2025 11:11:30 -0700 Subject: [PATCH 17/23] Fix live test failures --- .../tests/test_excluded_locations_async.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index 4a39b6a78c2c..e5c1dcfeed26 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -100,6 +100,28 @@ def read_item_test_data(): all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] return all_test_data +def read_all_item_test_data(): + client_only_output_data = [ + [L1, L1], # 0 + [L2, L2], # 1 + [L1, L1], # 2 + [L1, L1], # 3 + ] + client_and_request_output_data = [ + [L2, L2], # 0 + [L2, L2], # 1 + [L2, L2], # 2 + [L1, L1], # 3 + [L1, L1], # 4 + [L1, L1], # 5 + [L1, L1], # 6 + [L1, L1], # 7 + ] + all_output_test_data = client_only_output_data + client_and_request_output_data + + all_test_data = [input_data + [output_data] for input_data, output_data in zip(ALL_INPUT_TEST_DATA, all_output_test_data)] + return all_test_data + def query_items_change_feed_test_data(): client_only_output_data = [ [L1, L1, L1, L1], #0 @@ -243,7 +265,7 @@ async def test_read_item(self, test_data): # Verify endpoint locations await self._verify_endpoint(client, expected_locations) - @pytest.mark.parametrize('test_data', read_item_test_data()) + @pytest.mark.parametrize('test_data', read_all_item_test_data()) async def test_read_all_items(self, test_data): # Init test variables preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data @@ -260,7 +282,7 @@ async def test_read_all_items(self, test_data): # Verify endpoint locations await self._verify_endpoint(client, expected_locations) - @pytest.mark.parametrize('test_data', read_item_test_data()) + @pytest.mark.parametrize('test_data', read_all_item_test_data()) async def test_query_items(self, test_data): # Init test variables preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data From 798c12f513f2822e8e270ffc00111d05f57fdd28 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Tue, 8 Apr 2025 11:38:36 -0700 Subject: [PATCH 18/23] Add test_delete_all_items_by_partition_key --- .../tests/test_excluded_locations.py | 55 ++++++++++--------- 1 file changed, 28 insertions(+), 27 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 9af367303107..0a63136700c0 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -442,33 +442,34 @@ def test_delete_item(self, test_data): else: self._verify_endpoint(client, [L1]) - # TODO: enable this test once we figure out how to enable delete_all_items_by_partition_key feature - # @pytest.mark.parametrize('test_data', patch_item_test_data()) - # def test_delete_all_items_by_partition_key(self, test_data): - # # Init test variables - # preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data - # - # for multiple_write_locations in [True, False]: - # # Client setup - # client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) - # - # #create before delete - # item_id = f'doc2-{str(uuid.uuid4())}' - # pk_value = f'temp_partition_key_value-{str(uuid.uuid4())}' - # container.create_item(body={PARTITION_KEY: pk_value, 'id': item_id}) - # MOCK_HANDLER.reset() - # - # # API call: read_item - # if request_excluded_locations is None: - # container.delete_all_items_by_partition_key(pk_value) - # else: - # container.delete_all_items_by_partition_key(pk_value, excluded_locations=request_excluded_locations) - # - # # Verify endpoint locations - # if multiple_write_locations: - # self._verify_endpoint(client, expected_locations) - # else: - # self._verify_endpoint(client, [L1]) + @pytest.mark.parametrize('test_data', patch_item_test_data()) + def test_delete_all_items_by_partition_key(self, test_data): + # Init test variables + preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data + + for multiple_write_locations in [True, False]: + # Client setup + client, db, container = self._init_container(preferred_locations, client_excluded_locations, + multiple_write_locations) + + # create before delete + item_id = f'doc2-{str(uuid.uuid4())}' + pk_value = f'temp_partition_key_value-{str(uuid.uuid4())}' + body = {PARTITION_KEY: pk_value, 'id': item_id} + _create_item_with_excluded_locations(container, body, request_excluded_locations) + MOCK_HANDLER.reset() + + # API call: delete_item + if request_excluded_locations is None: + container.delete_all_items_by_partition_key(pk_value) + else: + container.delete_all_items_by_partition_key(pk_value, excluded_locations=request_excluded_locations) + + # Verify endpoint locations + if multiple_write_locations: + self._verify_endpoint(client, expected_locations) + else: + self._verify_endpoint(client, [L1]) if __name__ == "__main__": unittest.main() From 2c5b8fce52682014b4214f930a29f2b97b13e91c Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Tue, 8 Apr 2025 15:01:29 -0700 Subject: [PATCH 19/23] Remove test_delete_all_items_by_partition_key --- .../tests/test_excluded_locations.py | 29 ------------------- .../tests/test_excluded_locations_async.py | 28 ------------------ 2 files changed, 57 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 0a63136700c0..d99f9a3b3fda 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -442,34 +442,5 @@ def test_delete_item(self, test_data): else: self._verify_endpoint(client, [L1]) - @pytest.mark.parametrize('test_data', patch_item_test_data()) - def test_delete_all_items_by_partition_key(self, test_data): - # Init test variables - preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data - - for multiple_write_locations in [True, False]: - # Client setup - client, db, container = self._init_container(preferred_locations, client_excluded_locations, - multiple_write_locations) - - # create before delete - item_id = f'doc2-{str(uuid.uuid4())}' - pk_value = f'temp_partition_key_value-{str(uuid.uuid4())}' - body = {PARTITION_KEY: pk_value, 'id': item_id} - _create_item_with_excluded_locations(container, body, request_excluded_locations) - MOCK_HANDLER.reset() - - # API call: delete_item - if request_excluded_locations is None: - container.delete_all_items_by_partition_key(pk_value) - else: - container.delete_all_items_by_partition_key(pk_value, excluded_locations=request_excluded_locations) - - # Verify endpoint locations - if multiple_write_locations: - self._verify_endpoint(client, expected_locations) - else: - self._verify_endpoint(client, [L1]) - if __name__ == "__main__": unittest.main() diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index e5c1dcfeed26..109f9ff7207a 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -464,33 +464,5 @@ async def test_delete_item(self, test_data): else: await self._verify_endpoint(client, [L1]) - # TODO: enable this test once we figure out how to enable delete_all_items_by_partition_key feature - # @pytest.mark.parametrize('test_data', patch_item_test_data()) - # def test_delete_all_items_by_partition_key(self, test_data): - # # Init test variables - # preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data - # - # for multiple_write_locations in [True, False]: - # # Client setup - # client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) - # - # #create before delete - # item_id = f'doc2-{str(uuid.uuid4())}' - # pk_value = f'temp_partition_key_value-{str(uuid.uuid4())}' - # container.create_item(body={PARTITION_KEY: pk_value, 'id': item_id}) - # MOCK_HANDLER.reset() - # - # # API call: read_item - # if request_excluded_locations is None: - # container.delete_all_items_by_partition_key(pk_value) - # else: - # container.delete_all_items_by_partition_key(pk_value, excluded_locations=request_excluded_locations) - # - # # Verify endpoint locations - # if multiple_write_locations: - # self._verify_endpoint(client, expected_locations) - # else: - # self._verify_endpoint(client, [L1]) - if __name__ == "__main__": unittest.main() From 2b9b58fbc885781cda6c039bb231853d6c652b79 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Wed, 9 Apr 2025 19:40:18 -0700 Subject: [PATCH 20/23] Added missing doc for excluded_locations in async client --- sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py index 647f6d59f615..e5e526670629 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py @@ -162,6 +162,7 @@ class CosmosClient: # pylint: disable=client-accepts-api-version-keyword :keyword bool enable_endpoint_discovery: Enable endpoint discovery for geo-replicated database accounts. (Default: True) :keyword list[str] preferred_locations: The preferred locations for geo-replicated database accounts. + :keyword list[str] excluded_locations: The excluded locations to be skipped from preferred locations. The locations :keyword bool enable_diagnostics_logging: Enable the CosmosHttpLogging policy. Must be used along with a logger to work. :keyword ~logging.Logger logger: Logger to be used for collecting request diagnostics. Can be passed in at client From eead7506de94d82e907a4c95b2173b26ff171afc Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Wed, 9 Apr 2025 19:40:57 -0700 Subject: [PATCH 21/23] Remove duplicate functions --- .../tests/test_excluded_locations.py | 123 +++++++++--------- .../tests/test_excluded_locations_async.py | 101 ++++++-------- 2 files changed, 99 insertions(+), 125 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index d99f9a3b3fda..49b7f0553871 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -188,51 +188,50 @@ def setup_and_teardown(): # Code to run after tests print("Teardown: This runs after all tests") -@pytest.mark.cosmosMultiRegion -class TestExcludedLocations: - def _init_container(self, preferred_locations, client_excluded_locations, multiple_write_locations = True): - client = cosmos_client.CosmosClient(HOST, KEY, - preferred_locations=preferred_locations, - excluded_locations=client_excluded_locations, - multiple_write_locations=multiple_write_locations) - db = client.get_database_client(DATABASE_ID) - container = db.get_container_client(CONTAINER_ID) - MOCK_HANDLER.reset() - - return client, db, container - - def _verify_endpoint(self, client, expected_locations): - # get mapping for locations - location_mapping = (client.client_connection._global_endpoint_manager. - location_cache.account_locations_by_write_regional_routing_context) - default_endpoint = (client.client_connection._global_endpoint_manager. - location_cache.default_regional_routing_context.get_primary()) - - # get Request URL - msgs = MOCK_HANDLER.messages - req_urls = [url.replace("Request URL: '", "") for url in msgs if 'Request URL:' in url] - - # get location - actual_locations = [] - for req_url in req_urls: - if req_url.startswith(default_endpoint): - actual_locations.append(L0) - else: - for endpoint in location_mapping: - if req_url.startswith(endpoint): - location = location_mapping[endpoint] - actual_locations.append(location) - break +def _init_container(preferred_locations, client_excluded_locations, multiple_write_locations = True): + client = cosmos_client.CosmosClient(HOST, KEY, + preferred_locations=preferred_locations, + excluded_locations=client_excluded_locations, + multiple_write_locations=multiple_write_locations) + db = client.get_database_client(DATABASE_ID) + container = db.get_container_client(CONTAINER_ID) + MOCK_HANDLER.reset() + + return client, db, container + +def _verify_endpoint(messages, client, expected_locations): + # get mapping for locations + location_mapping = (client.client_connection._global_endpoint_manager. + location_cache.account_locations_by_write_regional_routing_context) + default_endpoint = (client.client_connection._global_endpoint_manager. + location_cache.default_regional_routing_context.get_primary()) + + # get Request URL + req_urls = [url.replace("Request URL: '", "") for url in messages if 'Request URL:' in url] + + # get location + actual_locations = [] + for req_url in req_urls: + if req_url.startswith(default_endpoint): + actual_locations.append(L0) + else: + for endpoint in location_mapping: + if req_url.startswith(endpoint): + location = location_mapping[endpoint] + actual_locations.append(location) + break - assert actual_locations == expected_locations + assert actual_locations == expected_locations +@pytest.mark.cosmosMultiRegion +class TestExcludedLocations: @pytest.mark.parametrize('test_data', read_item_test_data()) def test_read_item(self, test_data): # Init test variables preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data # Client setup - client, db, container = self._init_container(preferred_locations, client_excluded_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations) # API call: read_item if request_excluded_locations is None: @@ -241,7 +240,7 @@ def test_read_item(self, test_data): container.read_item(ITEM_ID, ITEM_PK_VALUE, excluded_locations=request_excluded_locations) # Verify endpoint locations - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', read_item_test_data()) def test_read_all_items(self, test_data): @@ -249,7 +248,7 @@ def test_read_all_items(self, test_data): preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data # Client setup - client, db, container = self._init_container(preferred_locations, client_excluded_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations) # API call: read_all_items if request_excluded_locations is None: @@ -258,7 +257,7 @@ def test_read_all_items(self, test_data): list(container.read_all_items(excluded_locations=request_excluded_locations)) # Verify endpoint locations - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', read_item_test_data()) def test_query_items(self, test_data): @@ -266,7 +265,7 @@ def test_query_items(self, test_data): preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data # Client setup and create an item - client, db, container = self._init_container(preferred_locations, client_excluded_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations) # API call: query_items if request_excluded_locations is None: @@ -275,7 +274,7 @@ def test_query_items(self, test_data): list(container.query_items(None, excluded_locations=request_excluded_locations)) # Verify endpoint locations - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', query_items_change_feed_test_data()) def test_query_items_change_feed(self, test_data): @@ -284,7 +283,7 @@ def test_query_items_change_feed(self, test_data): # Client setup and create an item - client, db, container = self._init_container(preferred_locations, client_excluded_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations) # API call: query_items_change_feed if request_excluded_locations is None: @@ -293,7 +292,7 @@ def test_query_items_change_feed(self, test_data): items = list(container.query_items_change_feed(start_time="Beginning", excluded_locations=request_excluded_locations)) # Verify endpoint locations - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', replace_item_test_data()) @@ -303,7 +302,7 @@ def test_replace_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: replace_item if request_excluded_locations is None: @@ -313,9 +312,9 @@ def test_replace_item(self, test_data): # Verify endpoint locations if multiple_write_locations: - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - self._verify_endpoint(client, [expected_locations[0], L1]) + _verify_endpoint(client, [expected_locations[0], L1]) @pytest.mark.parametrize('test_data', replace_item_test_data()) def test_upsert_item(self, test_data): @@ -324,7 +323,7 @@ def test_upsert_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: upsert_item body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} @@ -335,9 +334,9 @@ def test_upsert_item(self, test_data): # get location from mock_handler if multiple_write_locations: - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - self._verify_endpoint(client, [expected_locations[0], L1]) + _verify_endpoint(client, [expected_locations[0], L1]) @pytest.mark.parametrize('test_data', replace_item_test_data()) def test_create_item(self, test_data): @@ -346,7 +345,7 @@ def test_create_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: create_item body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} @@ -354,9 +353,9 @@ def test_create_item(self, test_data): # get location from mock_handler if multiple_write_locations: - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - self._verify_endpoint(client, [expected_locations[0], L1]) + _verify_endpoint(client, [expected_locations[0], L1]) @pytest.mark.parametrize('test_data', patch_item_test_data()) def test_patch_item(self, test_data): @@ -365,7 +364,7 @@ def test_patch_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = self._init_container(preferred_locations, client_excluded_locations, + client, db, container = _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: patch_item @@ -382,9 +381,9 @@ def test_patch_item(self, test_data): # get location from mock_handler if multiple_write_locations: - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) @pytest.mark.parametrize('test_data', patch_item_test_data()) def test_execute_item_batch(self, test_data): @@ -393,7 +392,7 @@ def test_execute_item_batch(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = self._init_container(preferred_locations, client_excluded_locations, + client, db, container = _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: execute_item_batch @@ -411,9 +410,9 @@ def test_execute_item_batch(self, test_data): # get location from mock_handler if multiple_write_locations: - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) @pytest.mark.parametrize('test_data', patch_item_test_data()) def test_delete_item(self, test_data): @@ -422,7 +421,7 @@ def test_delete_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup - client, db, container = self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # create before delete item_id = f'doc2-{str(uuid.uuid4())}' @@ -438,9 +437,9 @@ def test_delete_item(self, test_data): # Verify endpoint locations if multiple_write_locations: - self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) if __name__ == "__main__": unittest.main() diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index 109f9ff7207a..c2cbc307e2eb 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -10,7 +10,7 @@ from azure.cosmos.aio import CosmosClient from azure.cosmos.partition_key import PartitionKey - +from tests.test_excluded_locations import _verify_endpoint class MockHandler(logging.Handler): def __init__(self): @@ -208,53 +208,28 @@ async def setup_and_teardown(): yield await test_client.close() +async def _init_container(preferred_locations, client_excluded_locations, multiple_write_locations = True): + client = CosmosClient(HOST, KEY, + preferred_locations=preferred_locations, + excluded_locations=client_excluded_locations, + multiple_write_locations=multiple_write_locations) + db = await client.create_database_if_not_exists(DATABASE_ID) + container = await db.create_container_if_not_exists(CONTAINER_ID, PartitionKey(path='/' + PARTITION_KEY, kind='Hash')) + MOCK_HANDLER.reset() + + return client, db, container + @pytest.mark.cosmosMultiRegion @pytest.mark.asyncio @pytest.mark.usefixtures("setup_and_teardown") class TestExcludedLocations: - async def _init_container(self, preferred_locations, client_excluded_locations, multiple_write_locations = True): - client = CosmosClient(HOST, KEY, - preferred_locations=preferred_locations, - excluded_locations=client_excluded_locations, - multiple_write_locations=multiple_write_locations) - db = await client.create_database_if_not_exists(DATABASE_ID) - container = await db.create_container_if_not_exists(CONTAINER_ID, PartitionKey(path='/' + PARTITION_KEY, kind='Hash')) - MOCK_HANDLER.reset() - - return client, db, container - - async def _verify_endpoint(self, client, expected_locations): - # get mapping for locations - location_mapping = (client.client_connection._global_endpoint_manager. - location_cache.account_locations_by_write_regional_routing_context) - default_endpoint = (client.client_connection._global_endpoint_manager. - location_cache.default_regional_routing_context.get_primary()) - - # get Request URL - msgs = MOCK_HANDLER.messages - req_urls = [url.replace("Request URL: '", "") for url in msgs if 'Request URL:' in url] - - # get location - actual_locations = [] - for req_url in req_urls: - if req_url.startswith(default_endpoint): - actual_locations.append(L0) - else: - for endpoint in location_mapping: - if req_url.startswith(endpoint): - location = location_mapping[endpoint] - actual_locations.append(location) - break - - assert actual_locations == expected_locations - @pytest.mark.parametrize('test_data', read_item_test_data()) async def test_read_item(self, test_data): # Init test variables preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data # Client setup - client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations) # API call: read_item if request_excluded_locations is None: @@ -263,7 +238,7 @@ async def test_read_item(self, test_data): await container.read_item(ITEM_ID, ITEM_PK_VALUE, excluded_locations=request_excluded_locations) # Verify endpoint locations - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', read_all_item_test_data()) async def test_read_all_items(self, test_data): @@ -271,7 +246,7 @@ async def test_read_all_items(self, test_data): preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data # Client setup - client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations) # API call: read_all_items if request_excluded_locations is None: @@ -280,7 +255,7 @@ async def test_read_all_items(self, test_data): all_items = [item async for item in container.read_all_items(excluded_locations=request_excluded_locations)] # Verify endpoint locations - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', read_all_item_test_data()) async def test_query_items(self, test_data): @@ -288,7 +263,7 @@ async def test_query_items(self, test_data): preferred_locations, client_excluded_locations, request_excluded_locations, expected_locations = test_data # Client setup and create an item - client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations) # API call: query_items if request_excluded_locations is None: @@ -297,7 +272,7 @@ async def test_query_items(self, test_data): all_items = [item async for item in container.query_items(None, excluded_locations=request_excluded_locations)] # Verify endpoint locations - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', query_items_change_feed_test_data()) async def test_query_items_change_feed(self, test_data): @@ -306,7 +281,7 @@ async def test_query_items_change_feed(self, test_data): # Client setup and create an item - client, db, container = await self._init_container(preferred_locations, client_excluded_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations) # API call: query_items_change_feed if request_excluded_locations is None: @@ -315,7 +290,7 @@ async def test_query_items_change_feed(self, test_data): all_items = [item async for item in container.query_items_change_feed(start_time="Beginning", excluded_locations=request_excluded_locations)] # Verify endpoint locations - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) @pytest.mark.parametrize('test_data', replace_item_test_data()) @@ -325,7 +300,7 @@ async def test_replace_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: replace_item if request_excluded_locations is None: @@ -335,9 +310,9 @@ async def test_replace_item(self, test_data): # Verify endpoint locations if multiple_write_locations: - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - await self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) @pytest.mark.parametrize('test_data', replace_item_test_data()) async def test_upsert_item(self, test_data): @@ -346,7 +321,7 @@ async def test_upsert_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: upsert_item body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} @@ -357,9 +332,9 @@ async def test_upsert_item(self, test_data): # get location from mock_handler if multiple_write_locations: - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - await self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) @pytest.mark.parametrize('test_data', replace_item_test_data()) async def test_create_item(self, test_data): @@ -368,7 +343,7 @@ async def test_create_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: create_item body = {'pk': 'pk', 'id': f'doc2-{str(uuid.uuid4())}'} @@ -376,9 +351,9 @@ async def test_create_item(self, test_data): # get location from mock_handler if multiple_write_locations: - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - await self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) @pytest.mark.parametrize('test_data', patch_item_test_data()) async def test_patch_item(self, test_data): @@ -387,7 +362,7 @@ async def test_patch_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = await self._init_container(preferred_locations, client_excluded_locations, + client, db, container = await _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: patch_item @@ -404,9 +379,9 @@ async def test_patch_item(self, test_data): # get location from mock_handler if multiple_write_locations: - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - await self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) @pytest.mark.parametrize('test_data', patch_item_test_data()) async def test_execute_item_batch(self, test_data): @@ -415,7 +390,7 @@ async def test_execute_item_batch(self, test_data): for multiple_write_locations in [True, False]: # Client setup and create an item - client, db, container = await self._init_container(preferred_locations, client_excluded_locations, + client, db, container = await _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # API call: execute_item_batch @@ -433,9 +408,9 @@ async def test_execute_item_batch(self, test_data): # get location from mock_handler if multiple_write_locations: - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - await self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) @pytest.mark.parametrize('test_data', patch_item_test_data()) async def test_delete_item(self, test_data): @@ -444,7 +419,7 @@ async def test_delete_item(self, test_data): for multiple_write_locations in [True, False]: # Client setup - client, db, container = await self._init_container(preferred_locations, client_excluded_locations, multiple_write_locations) + client, db, container = await _init_container(preferred_locations, client_excluded_locations, multiple_write_locations) # create before delete item_id = f'doc2-{str(uuid.uuid4())}' @@ -460,9 +435,9 @@ async def test_delete_item(self, test_data): # Verify endpoint locations if multiple_write_locations: - await self._verify_endpoint(client, expected_locations) + _verify_endpoint(MOCK_HANDLER.messages, client, expected_locations) else: - await self._verify_endpoint(client, [L1]) + _verify_endpoint(client, [L1]) if __name__ == "__main__": unittest.main() From 67d312efff5de5060279a9a471cc502bc585a3f8 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Wed, 9 Apr 2025 19:42:38 -0700 Subject: [PATCH 22/23] test emulator --- sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py | 8 ++++++++ .../azure-cosmos/tests/test_excluded_locations_async.py | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py index 49b7f0553871..5bd852abca77 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations.py @@ -441,5 +441,13 @@ def test_delete_item(self, test_data): else: _verify_endpoint(client, [L1]) +@pytest.mark.cosmosEmulator +class TestExcludedLocationsEmulator: + def test_emulator_for_excluded_region(self): + expected_locations = [L1] + actual_locations = [L1] + + assert expected_locations == actual_locations + if __name__ == "__main__": unittest.main() diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index c2cbc307e2eb..3acc8ef1f938 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -439,5 +439,13 @@ async def test_delete_item(self, test_data): else: _verify_endpoint(client, [L1]) +@pytest.mark.cosmosEmulator +class TestExcludedLocationsEmulator: + def test_emulator_for_excluded_region(self): + expected_locations = [L1] + actual_locations = [L1] + + assert expected_locations == actual_locations + if __name__ == "__main__": unittest.main() From ea7f18970a9e9d79184ab8039e5b572a95a9c4b8 Mon Sep 17 00:00:00 2001 From: Allen Kim Date: Thu, 10 Apr 2025 01:17:32 -0700 Subject: [PATCH 23/23] Fix import --- sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py index 3acc8ef1f938..ffa5eda22d6f 100644 --- a/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py +++ b/sdk/cosmos/azure-cosmos/tests/test_excluded_locations_async.py @@ -10,7 +10,7 @@ from azure.cosmos.aio import CosmosClient from azure.cosmos.partition_key import PartitionKey -from tests.test_excluded_locations import _verify_endpoint +from test_excluded_locations import _verify_endpoint class MockHandler(logging.Handler): def __init__(self):