diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/dev.md b/sdk/cosmos/azure-cosmos/tests/workloads/dev.md new file mode 100644 index 000000000000..3df91a1e78c7 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/dev.md @@ -0,0 +1,41 @@ +## SDK Scale Testing +This directory contains the scale testing workloads for the SDK. The workloads are designed to test the performance +and scalability of the SDK under various conditions. There are different types of workloads and each will create a log +file when run. These logs are named in this format `--.log`. + +### Setup VM +1. Create a VM in Azure with the following configuration: + - 8 vCPUs + - 32 GB RAM + - Ubuntu + - Accelerated networking +2. Fork and clone this repository +3. Go to azure cosmos folder + - `cd azure-sdk-for-python/sdk/cosmos/azure-cosmos` +4. Install the required packages and create virtual environment + - `sudo apt-get update` + - `sudo apt-get install python3-pip` + - `sudo apt-get install python3.12-venv` + - `python3 -m venv azure-cosmosdb-sdk-environment` + - `source azure-cosmosdb-sdk-environment/bin/activate` + - `pip install -r dev_requirements.txt` +5. Checkout the branch with the changes to test. +6. Install azure-cosmos + - `pip install .` +7. Go to workloads folder + - `cd tests/workloads` +8. Fill out relevant configs in `workload_configs.py`: key, host, etc +9. Install envoy proxy https://www.envoyproxy.io/docs/envoy/latest/start/install +10. Update envoy_simple_config.yaml to have the correct account info. Replace <> with account name. +11. Go to envoy folder and start envoy + - `cd envoy` + - `mkdir logs` + - `envoy -c .yaml --log-level debug --log-path logs/debug.txt` +12. Run the setup workload to create the database and containers and insert data + - `python3 initial-setup.py` +13. Run the scale workloads + - `./run_workloads.sh ` + +### Monitor Run +- `ps -eaf | grep "python3"` to see the running processes +- `tail -f ` to see the logs in real time diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/envoy/envoy_complex_config.yaml b/sdk/cosmos/azure-cosmos/tests/workloads/envoy/envoy_complex_config.yaml new file mode 100644 index 000000000000..c745c2386b96 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/envoy/envoy_complex_config.yaml @@ -0,0 +1,372 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# cspell:disable +static_resources: + listeners: + - name: admin-listener + address: + socket_address: + protocol: TCP + address: "0.0.0.0" + port_value: 8080 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + proxy_status_config: + set_recommended_response_code: true + use_node_id: true + stat_prefix: ingress_http + route_config: + name: local_route + virtual_hosts: + - name: local_service + domains: + - "*" + routes: + - match: + path: "/ready" + route: + cluster: admin_port_cluster + - match: + path: "/metrics" + route: + cluster: admin_port_cluster + prefix_rewrite: "/stats/prometheus" + - match: + prefix: "/" + direct_response: + status: 404 + http_filters: + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + # Allow passing `x-envoy-expected-rq-timeout-ms` + respect_expected_rq_timeout: true + - name: main-listener + address: + socket_address: + address: 0.0.0.0 + port_value: 5100 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + access_log: + - name: envoy.access_loggers.stream + typed_config: + "@type": type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog + log_format: + json_format: + access_log_type: "%ACCESS_LOG_TYPE%" + bytes_received: "%BYTES_RECEIVED%" + bytes_sent: "%BYTES_SENT%" + connection_termination_details: "%CONNECTION_TERMINATION_DETAILS%" + downstream_transport_failure_reason: "%DOWNSTREAM_TRANSPORT_FAILURE_REASON%" + duration: "%DURATION%" + protocol: "%PROTOCOL%" + req_authority: "%REQ(:AUTHORITY)%" + req_duration: "%REQUEST_DURATION%" + req_line: "%REQ(X-ENVOY-ORIGINAL-METHOD?:METHOD)% %REQ(:SCHEME)%://%REQ(:AUTHORITY)%%REQ(X-ENVOY-ORIGINAL-PATH?:PATH)% %PROTOCOL%" + req_method: "%REQ(:METHOD)%" + req_content_length: "%REQ(CONTENT-LENGTH)%" + req_path: "%REQ(X-ENVOY-ORIGINAL-PATH?:PATH)%" + request_id: "%REQ(X-REQUEST-ID)%" + req_user_agent: "%REQ(USER-AGENT)%" + resp_cache_control: "%RESP(CACHE-CONTROL)%" + resp_code_details: "%RESPONSE_CODE_DETAILS%" + resp_code: "%RESPONSE_CODE%" + resp_content_type: "%RESP(CONTENT-TYPE)%" + resp_flags: "%RESPONSE_FLAGS%" + resp_tx_duration: "%RESPONSE_TX_DURATION%" + resp_upstream_service_time: "%RESP(X-ENVOY-UPSTREAM-SERVICE-TIME)%" + start_time: "%START_TIME%" + upstream_host: "%UPSTREAM_HOST%" + upstream_peer_tls_issuer: "%UPSTREAM_PEER_ISSUER%" + upstream_peer_tls_subject: "%UPSTREAM_PEER_SUBJECT%" + upstream_remote_address: "%UPSTREAM_REMOTE_ADDRESS%" + upstream_request_attempt_count: "%UPSTREAM_REQUEST_ATTEMPT_COUNT%" + upstream_tls_cipher: "%UPSTREAM_TLS_CIPHER%" + upstream_transport_failure_reason: "%UPSTREAM_TRANSPORT_FAILURE_REASON%" + x_datadog_parent_id: "%REQ(X-DATADOG-PARENT-ID)%" + x_datadog_sampling_priority: "%REQ(X-DATADOG-SAMPLING-PRIORITY)%" + x_datadog_trace_id: "%REQ(X-DATADOG-TRACE-ID)%" + stat_prefix: router_http + scheme_header_transformation: + scheme_to_overwrite: https + strip_any_host_port: true + proxy_status_config: + set_recommended_response_code: true + use_node_id: true + common_http_protocol_options: + max_connection_duration: 900s + drain_timeout: 330s + stream_idle_timeout: 30s + route_config: + name: local_route + response_headers_to_add: + - header: + key: "via" + value: "%ENVIRONMENT(POD_NAME)%" + virtual_hosts: + - name: local_account + domains: + - account.documents.azure.com + routes: + - match: + prefix: "/" + route: + cluster: account + timeout: 0.01s + retry_policy: + retry_on: "connect-failure,refused-stream,reset" + per_try_timeout: 0.01s + num_retries: 3 + - name: local_account-eastus2euap + domains: + - account-eastus2euap.documents.azure.com + routes: + - match: + prefix: "/" + route: + cluster: account-eastus2euap + timeout: 0.01s + retry_policy: + retry_on: "connect-failure,refused-stream,reset" + per_try_timeout: 0.01s + num_retries: 3 + - name: local_account-centraluseuap + domains: + - account-centraluseuap.documents.azure.com + routes: + - match: + prefix: "/" + route: + cluster: account-centraluseuap + timeout: 300s + retry_policy: + retry_on: "connect-failure,refused-stream,reset" + per_try_timeout: 300s + num_retries: 3 + - name: local_heise + domains: + - www.heise.de + routes: + - match: + prefix: "/" + route: + cluster: heise + timeout: 300s + retry_policy: + retry_on: "connect-failure,refused-stream,reset" + per_try_timeout: 300s + num_retries: 3 + http_filters: + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + clusters: + - name: account-eastus2euap + type: STRICT_DNS + lb_policy: LEAST_REQUEST + dns_lookup_family: V4_ONLY + connect_timeout: 0.005s + load_assignment: + cluster_name: account-eastus2euap + endpoints: + - lb_endpoints: + - endpoint: + address: + socket_address: + protocol: TCP + address: account-eastus2euap.documents.documents.azure.com + port_value: 443 + circuit_breakers: + thresholds: + priority: DEFAULT + max_connections: 10000 + max_pending_requests: 10000 + max_requests: 10000 + retry_budget: + budget_percent: + value: 10.0 + min_retry_concurrency: 10 + track_remaining: true + typed_extension_protocol_options: + envoy.extensions.upstreams.http.v3.HttpProtocolOptions: + "@type": type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions + common_http_protocol_options: + max_connection_duration: 120s + upstream_http_protocol_options: + auto_sni: true + auto_san_validation: true + auto_config: + http2_protocol_options: + max_concurrent_streams: 3 + initial_stream_window_size: 1048576 + transport_socket: + name: envoy.transport_sockets.tls # required to communicate in HTTPS + typed_config: + "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext + common_tls_context: + alpn_protocols: ["http/1.1"] + validation_context: + trust_chain_verification: ACCEPT_UNTRUSTED + - name: account + type: STRICT_DNS + lb_policy: LEAST_REQUEST + dns_lookup_family: V4_ONLY + connect_timeout: 0.001s + load_assignment: + cluster_name: account + endpoints: + - lb_endpoints: + - endpoint: + address: + socket_address: + protocol: TCP + address: account.documents.documents.azure.com + port_value: 443 + circuit_breakers: + thresholds: + priority: DEFAULT + max_connections: 10000 + max_pending_requests: 10000 + max_requests: 10000 + retry_budget: + budget_percent: + value: 10.0 + min_retry_concurrency: 10 + track_remaining: true + typed_extension_protocol_options: + envoy.extensions.upstreams.http.v3.HttpProtocolOptions: + "@type": type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions + common_http_protocol_options: + max_connection_duration: 120s + upstream_http_protocol_options: + auto_sni: true + auto_san_validation: true + auto_config: + http2_protocol_options: + max_concurrent_streams: 3 + initial_stream_window_size: 1048576 + transport_socket: + name: envoy.transport_sockets.tls # required to communicate in HTTPS + typed_config: + "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext + common_tls_context: + alpn_protocols: ["http/1.1"] + validation_context: + trust_chain_verification: ACCEPT_UNTRUSTED + - name: account-centraluseuap + type: STRICT_DNS + lb_policy: LEAST_REQUEST + dns_lookup_family: V4_ONLY + connect_timeout: 5s + load_assignment: + cluster_name: account-centraluseuap + endpoints: + - lb_endpoints: + - endpoint: + address: + socket_address: + protocol: TCP + address: account-centraluseuap.documents.documents.azure.com + port_value: 443 + circuit_breakers: + thresholds: + priority: DEFAULT + max_connections: 10000 + max_pending_requests: 10000 + max_requests: 10000 + retry_budget: + budget_percent: + value: 10.0 + min_retry_concurrency: 10 + track_remaining: true + typed_extension_protocol_options: + envoy.extensions.upstreams.http.v3.HttpProtocolOptions: + "@type": type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions + common_http_protocol_options: + max_connection_duration: 120s + upstream_http_protocol_options: + auto_sni: true + auto_san_validation: true + auto_config: + http2_protocol_options: + max_concurrent_streams: 3 + initial_stream_window_size: 1048576 + transport_socket: + name: envoy.transport_sockets.tls # required to communicate in HTTPS + typed_config: + "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext + common_tls_context: + alpn_protocols: ["http/1.1"] + validation_context: + trust_chain_verification: ACCEPT_UNTRUSTED + - name: heise + type: STRICT_DNS + dns_lookup_family: V4_ONLY + lb_policy: LEAST_REQUEST + connect_timeout: 5s + load_assignment: + cluster_name: heise + endpoints: + - lb_endpoints: + - endpoint: + address: + socket_address: + protocol: TCP + address: www.heise.de + port_value: 443 + circuit_breakers: + thresholds: + priority: DEFAULT + max_connections: 10000 + max_pending_requests: 10000 + max_requests: 10000 + retry_budget: + budget_percent: + value: 10.0 + min_retry_concurrency: 10 + track_remaining: true + typed_extension_protocol_options: + envoy.extensions.upstreams.http.v3.HttpProtocolOptions: + "@type": type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions + common_http_protocol_options: + max_connection_duration: 120s + upstream_http_protocol_options: + auto_sni: true + auto_san_validation: true + auto_config: + http2_protocol_options: + max_concurrent_streams: 3 + initial_stream_window_size: 1048576 + transport_socket: + name: envoy.transport_sockets.tls # required to communicate in HTTPS + typed_config: + "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext + common_tls_context: + alpn_protocols: ["http/1.1"] + validation_context: + trust_chain_verification: ACCEPT_UNTRUSTED + - name: admin_port_cluster + connect_timeout: 0.25s + type: LOGICAL_DNS + load_assignment: + cluster_name: admin_port_cluster + endpoints: + - lb_endpoints: + - endpoint: + address: + socket_address: + protocol: TCP + address: "127.0.0.1" + port_value: 9091 +admin: + address: + socket_address: + address: 127.0.0.1 + port_value: 9091 diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/envoy/envoy_simple_config.yaml b/sdk/cosmos/azure-cosmos/tests/workloads/envoy/envoy_simple_config.yaml new file mode 100644 index 000000000000..669595eb42c0 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/envoy/envoy_simple_config.yaml @@ -0,0 +1,53 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +static_resources: + listeners: + - name: listener_0 + address: + socket_address: + address: 0.0.0.0 + port_value: 5100 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + stat_prefix: ingress_http + access_log: + - name: envoy.access_loggers.stdout + typed_config: + "@type": type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog + http_filters: + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + route_config: + name: local_route + virtual_hosts: + - name: local_service + domains: ["*"] + routes: + - match: + prefix: "/" + route: + host_rewrite_literal: <>.documents.azure.com + cluster: <> + + clusters: + - name: <> + type: LOGICAL_DNS + dns_lookup_family: V4_ONLY + load_assignment: + cluster_name: <> + endpoints: + - lb_endpoints: + - endpoint: + address: + socket_address: + address: <>.documents.azure.com + port_value: 443 + transport_socket: + name: envoy.transport_sockets.tls + typed_config: + "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext + sni: www.envoyproxy.io diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/get-database-account-call.py b/sdk/cosmos/azure-cosmos/tests/workloads/get-database-account-call.py new file mode 100644 index 000000000000..40d6ca2d2811 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/get-database-account-call.py @@ -0,0 +1,43 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +from workload_utils import create_logger +from workload_configs import COSMOS_URI, COSMOS_KEY, PREFERRED_LOCATIONS + +sys.path.append(r"./") + +from azure.cosmos.aio import CosmosClient as AsyncClient +import asyncio + +import time +from datetime import datetime + + +async def run_workload(client_id: str): + async with AsyncClient(COSMOS_URI, COSMOS_KEY, preferred_locations=PREFERRED_LOCATIONS, + enable_diagnostics_logging=True, logger=logger, + user_agent=client_id + "-" + datetime.now().strftime( + "%Y%m%d-%H%M%S")) as client: + await asyncio.sleep(1) + + while True: + try: + database_account = await client._get_database_account() + logger.info("%s - Database account - writable locations: %s", + datetime.now().strftime("%Y%m%d-%H%M%S"), + database_account.WritableLocations) + logger.info("%s - Database account - readable locations: %s", + datetime.now().strftime("%Y%m%d-%H%M%S"), + database_account.ReadableLocations) + time.sleep(1) + except Exception as e: + logger.error(e) + raise e + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + asyncio.run(run_workload(prefix)) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/initial-setup.py b/sdk/cosmos/azure-cosmos/tests/workloads/initial-setup.py new file mode 100644 index 000000000000..5c7d9a87e797 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/initial-setup.py @@ -0,0 +1,44 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +from azure.cosmos import PartitionKey, ThroughputProperties +from workload_utils import create_logger +from workload_configs import COSMOS_URI, COSMOS_KEY, PREFERRED_LOCATIONS, COSMOS_CONTAINER, COSMOS_DATABASE, \ + NUMBER_OF_LOGICAL_PARTITIONS, PARTITION_KEY, THROUGHPUT + +sys.path.append(r"./") + +from azure.cosmos.aio import CosmosClient as AsyncClient +import asyncio + +from datetime import datetime + +async def write_item_concurrently_initial(container, num_upserts): + tasks = [] + for i in range(num_upserts): + tasks.append(container.upsert_item({"id": "test-" + str(i), "pk": "pk-" + str(i)})) + await asyncio.gather(*tasks) + + +async def run_workload(client_id: str): + async with AsyncClient(COSMOS_URI, COSMOS_KEY, preferred_locations=PREFERRED_LOCATIONS, + enable_diagnostics_logging=True, logger=logger, + user_agent=str(client_id) + "-" + datetime.now().strftime("%Y%m%d-%H%M%S")) as client: + db = await client.create_database_if_not_exists(COSMOS_DATABASE) + cont = await db.create_container_if_not_exists(COSMOS_CONTAINER, PartitionKey("/" + PARTITION_KEY), + offer_throughput=ThroughputProperties(THROUGHPUT)) + await asyncio.sleep(1) + + try: + await write_item_concurrently_initial(cont, NUMBER_OF_LOGICAL_PARTITIONS + 1) # Number of concurrent upserts + except Exception as e: + logger.error(e) + raise e + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + asyncio.run(run_workload(prefix)) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/r_proxy_workload.py b/sdk/cosmos/azure-cosmos/tests/workloads/r_proxy_workload.py new file mode 100644 index 000000000000..2ca80f7a7592 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/r_proxy_workload.py @@ -0,0 +1,48 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +import aiohttp + +from azure.cosmos import documents +from workload_utils import create_logger, read_item_concurrently, query_items_concurrently +from workload_configs import COSMOS_KEY, PREFERRED_LOCATIONS, CONCURRENT_REQUESTS, COSMOS_PROXY_URI, COSMOS_CONTAINER, \ + COSMOS_DATABASE, CONCURRENT_QUERIES +from workload_configs import USE_MULTIPLE_WRITABLE_LOCATIONS + +sys.path.append(r"/") + +from azure.cosmos.aio import CosmosClient as AsyncClient +from azure.core.pipeline.transport import AioHttpTransport +import asyncio + +from datetime import datetime + +async def run_workload(client_id, client_logger): + async with aiohttp.ClientSession(trust_env=True) as proxied_aio_http_session: + connectionPolicy = documents.ConnectionPolicy() + connectionPolicy.UseMultipleWriteLocations = USE_MULTIPLE_WRITABLE_LOCATIONS + + transport = AioHttpTransport(session=proxied_aio_http_session, session_owner=False) + async with AsyncClient(COSMOS_PROXY_URI, COSMOS_KEY, preferred_locations=PREFERRED_LOCATIONS, + enable_diagnostics_logging=True, logger=client_logger, transport=transport, + user_agent=str(client_id) + "-" + datetime.now().strftime( + "%Y%m%d-%H%M%S"), connection_policy=connectionPolicy) as client: + db = client.get_database_client(COSMOS_DATABASE) + cont = db.get_container_client(COSMOS_CONTAINER) + await asyncio.sleep(1) + + while True: + try: + await read_item_concurrently(cont, CONCURRENT_REQUESTS) + await query_items_concurrently(cont, CONCURRENT_QUERIES) + except Exception as e: + client_logger.info("Exception in application layer") + client_logger.error(e) + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + asyncio.run(run_workload(prefix, logger)) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_proxy_workload.py b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_proxy_workload.py new file mode 100644 index 000000000000..f1605cfbda2b --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_proxy_workload.py @@ -0,0 +1,49 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +import aiohttp + +from azure.cosmos import documents +from workload_utils import create_logger, upsert_item_concurrently, read_item_concurrently, query_items_concurrently +from workload_configs import COSMOS_KEY, PREFERRED_LOCATIONS, USE_MULTIPLE_WRITABLE_LOCATIONS, \ + CONCURRENT_REQUESTS, COSMOS_PROXY_URI, COSMOS_CONTAINER, COSMOS_DATABASE, CONCURRENT_QUERIES + +sys.path.append(r"/") + +from azure.cosmos.aio import CosmosClient as AsyncClient +from azure.core.pipeline.transport import AioHttpTransport +import asyncio + +import time +from datetime import datetime + +async def run_workload(client_id, client_logger): + connectionPolicy = documents.ConnectionPolicy() + connectionPolicy.UseMultipleWriteLocations = USE_MULTIPLE_WRITABLE_LOCATIONS + async with aiohttp.ClientSession(trust_env=True) as proxied_aio_http_session: + + transport = AioHttpTransport(session=proxied_aio_http_session, session_owner=False) + async with AsyncClient(COSMOS_PROXY_URI, COSMOS_KEY, preferred_locations=PREFERRED_LOCATIONS, + enable_diagnostics_logging=True, logger=client_logger, transport=transport, + user_agent=str(client_id) + "-" + datetime.now().strftime( + "%Y%m%d-%H%M%S"), connection_policy=connectionPolicy) as client: + db = client.get_database_client(COSMOS_DATABASE) + cont = db.get_container_client(COSMOS_CONTAINER) + time.sleep(1) + + while True: + try: + await upsert_item_concurrently(cont, CONCURRENT_REQUESTS) + await read_item_concurrently(cont, CONCURRENT_REQUESTS) + await query_items_concurrently(cont, CONCURRENT_QUERIES) + except Exception as e: + client_logger.info("Exception in application layer") + client_logger.error(e) + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + asyncio.run(run_workload(prefix, logger)) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_with_incorrect_client_workload.py b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_with_incorrect_client_workload.py new file mode 100644 index 000000000000..6c5f66c19d01 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_with_incorrect_client_workload.py @@ -0,0 +1,44 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +from azure.cosmos import documents +from workload_utils import upsert_item_concurrently, read_item_concurrently, query_items_concurrently, create_logger +from workload_configs import (COSMOS_URI, COSMOS_KEY, PREFERRED_LOCATIONS, + USE_MULTIPLE_WRITABLE_LOCATIONS, + CONCURRENT_REQUESTS, COSMOS_DATABASE, COSMOS_CONTAINER, + CONCURRENT_QUERIES) +sys.path.append(r"/") + +from azure.cosmos.aio import CosmosClient as AsyncClient +import asyncio + +from datetime import datetime + +async def run_workload(client_id, client_logger): + connectionPolicy = documents.ConnectionPolicy() + connectionPolicy.UseMultipleWriteLocations = USE_MULTIPLE_WRITABLE_LOCATIONS + client = AsyncClient(COSMOS_URI, COSMOS_KEY, + enable_diagnostics_logging=True, logger=client_logger, + user_agent=str(client_id) + "-" + datetime.now().strftime( + "%Y%m%d-%H%M%S"), preferred_locations=PREFERRED_LOCATIONS, + connection_policy=connectionPolicy) + db = client.get_database_client(COSMOS_DATABASE) + cont = db.get_container_client(COSMOS_CONTAINER) + await asyncio.sleep(1) + + while True: + try: + await upsert_item_concurrently(cont, CONCURRENT_REQUESTS) + await read_item_concurrently(cont, CONCURRENT_REQUESTS) + await query_items_concurrently(cont, CONCURRENT_QUERIES) + except Exception as e: + client_logger.info("Exception in application layer") + client_logger.error(e) + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + asyncio.run(run_workload(prefix, logger)) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_workload.py b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_workload.py new file mode 100644 index 000000000000..2665c36e644c --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_workload.py @@ -0,0 +1,42 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +from azure.cosmos import documents +from workload_utils import upsert_item_concurrently, read_item_concurrently, query_items_concurrently, create_logger +from workload_configs import (COSMOS_URI, COSMOS_KEY, PREFERRED_LOCATIONS, USE_MULTIPLE_WRITABLE_LOCATIONS, + CONCURRENT_REQUESTS, COSMOS_DATABASE, COSMOS_CONTAINER, CONCURRENT_QUERIES) +sys.path.append(r"/") + +from azure.cosmos.aio import CosmosClient as AsyncClient +import asyncio + +from datetime import datetime + +async def run_workload(client_id, client_logger): + connectionPolicy = documents.ConnectionPolicy() + connectionPolicy.UseMultipleWriteLocations = USE_MULTIPLE_WRITABLE_LOCATIONS + async with AsyncClient(COSMOS_URI, COSMOS_KEY, + enable_diagnostics_logging=True, logger=client_logger, + user_agent=str(client_id) + "-" + datetime.now().strftime( + "%Y%m%d-%H%M%S"), preferred_locations=PREFERRED_LOCATIONS, + connection_policy=connectionPolicy) as client: + db = client.get_database_client(COSMOS_DATABASE) + cont = db.get_container_client(COSMOS_CONTAINER) + await asyncio.sleep(1) + + while True: + try: + await upsert_item_concurrently(cont, CONCURRENT_REQUESTS) + await read_item_concurrently(cont, CONCURRENT_REQUESTS) + await query_items_concurrently(cont, CONCURRENT_QUERIES) + except Exception as e: + client_logger.info("Exception in application layer") + client_logger.error(e) + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + asyncio.run(run_workload(prefix, logger)) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_workload_sync.py b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_workload_sync.py new file mode 100644 index 000000000000..e25130388085 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/r_w_q_workload_sync.py @@ -0,0 +1,68 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +from workload_utils import create_logger, get_random_item +from workload_configs import (COSMOS_URI, COSMOS_KEY, PREFERRED_LOCATIONS, USE_MULTIPLE_WRITABLE_LOCATIONS, + CONCURRENT_REQUESTS, COSMOS_DATABASE, COSMOS_CONTAINER, CONCURRENT_QUERIES, + PARTITION_KEY) + +sys.path.append(r"/") + +from azure.cosmos import CosmosClient, documents + +import time +from datetime import datetime + +def upsert_item(container, num_upserts): + for _ in range(num_upserts): + container.upsert_item(get_random_item(), etag=None, match_condition=None) + + +def read_item(container, num_reads): + for _ in range(num_reads): + item = get_random_item() + container.read_item(item["id"], item[PARTITION_KEY], etag=None, match_condition=None) + + +def query_items(container, num_queries): + for _ in range(num_queries): + perform_query(container) + + +def perform_query(container): + random_item = get_random_item() + results = container.query_items(query="SELECT * FROM c where c.id=@id and c.pk=@pk", + parameters=[{"name": "@id", "value": random_item["id"]}, + {"name": "@pk", "value": random_item["pk"]}], + partition_key=random_item[PARTITION_KEY]) + items = [item for item in results] + + +def run_workload(client_id, client_logger): + connectionPolicy = documents.ConnectionPolicy() + connectionPolicy.UseMultipleWriteLocations = USE_MULTIPLE_WRITABLE_LOCATIONS + with CosmosClient(COSMOS_URI, COSMOS_KEY, + enable_diagnostics_logging=True, logger=client_logger, + user_agent=str(client_id) + "-" + datetime.now().strftime( + "%Y%m%d-%H%M%S"), preferred_locations=PREFERRED_LOCATIONS, + connection_policy=connectionPolicy) as client: + db = client.get_database_client(COSMOS_DATABASE) + cont = db.get_container_client(COSMOS_CONTAINER) + time.sleep(1) + + while True: + try: + upsert_item(cont, CONCURRENT_REQUESTS) + read_item(cont, CONCURRENT_REQUESTS) + query_items(cont, CONCURRENT_QUERIES) + except Exception as e: + client_logger.info("Exception in application layer") + client_logger.error(e) + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + run_workload(prefix, logger) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/r_workload.py b/sdk/cosmos/azure-cosmos/tests/workloads/r_workload.py new file mode 100644 index 000000000000..d09af5bea52f --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/r_workload.py @@ -0,0 +1,43 @@ +# The MIT License (MIT) +# Copyright (c) Microsoft Corporation. All rights reserved. +import os +import sys + +from azure.cosmos import documents +from workload_utils import create_logger, read_item_concurrently, query_items_concurrently +from workload_configs import (COSMOS_URI, COSMOS_KEY, PREFERRED_LOCATIONS, USE_MULTIPLE_WRITABLE_LOCATIONS, + CONCURRENT_REQUESTS, COSMOS_CONTAINER, COSMOS_DATABASE, CONCURRENT_QUERIES) + +sys.path.append(r"/") + +from azure.cosmos.aio import CosmosClient as AsyncClient +import asyncio + +from datetime import datetime + +async def run_workload(client_id, client_logger): + + connectionPolicy = documents.ConnectionPolicy() + connectionPolicy.UseMultipleWriteLocations = USE_MULTIPLE_WRITABLE_LOCATIONS + async with AsyncClient(COSMOS_URI, COSMOS_KEY, + enable_diagnostics_logging=True, logger=client_logger, + user_agent=str(client_id) + "-" + datetime.now().strftime( + "%Y%m%d-%H%M%S"), preferred_locations=PREFERRED_LOCATIONS, + connection_policy=connectionPolicy) as client: + db = client.get_database_client(COSMOS_DATABASE) + cont = db.get_container_client(COSMOS_CONTAINER) + await asyncio.sleep(1) + + while True: + try: + await read_item_concurrently(cont, CONCURRENT_REQUESTS) + await query_items_concurrently(cont, CONCURRENT_QUERIES) + except Exception as e: + client_logger.info("Exception in application layer") + client_logger.error(e) + + +if __name__ == "__main__": + file_name = os.path.basename(__file__) + prefix, logger = create_logger(file_name) + asyncio.run(run_workload(prefix, logger)) diff --git a/sdk/cosmos/azure-cosmos/tests/workloads/run_workloads.sh b/sdk/cosmos/azure-cosmos/tests/workloads/run_workloads.sh new file mode 100755 index 000000000000..d1f050ec6797 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/tests/workloads/run_workloads.sh @@ -0,0 +1,14 @@ +#!/bin/bash +if [ $# -eq 0 ]; then + echo "Usage: $0 num_runs" + exit 1 +fi + +num_runs=$1 + +# Loop over each Python file in the current directory ending with _workload.py +for file in ./*_workload.py; do + for (( i=0; i