Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@ exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,.venv,*/cd

# F401 - Unused imports -- this is the only way to have a file-wide rule exception
per-file-ignores =
# We utilize * imports on test files here to dynamically collect test cases
conftest.py: F401,F403
Original file line number Diff line number Diff line change
Expand Up @@ -192,19 +192,21 @@ public static class DocParams implements TransformerParams {
public String getTransformerConfigParameterArgPrefix() {
return DOC_CONFIG_PARAMETER_ARG_PREFIX;
}
private static final String DOC_CONFIG_PARAMETER_ARG_PREFIX = "doc-";
private static final String DOC_CONFIG_PARAMETER_ARG_PREFIX = "doc";

@Parameter(
required = false,
names = "--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "transformer-config-base64",
names = { "--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "-transformer-config-base64",
"--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "TransformerConfigBase64" },
arity = 1,
description = "Configuration of doc transformers. The same contents as --doc-transformer-config but " +
"Base64 encoded so that the configuration is easier to pass as a command line parameter.")
private String transformerConfigEncoded;

@Parameter(
required = false,
names = "--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "transformer-config",
names = { "--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "-transformer-config",
"--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "TransformerConfig" },
arity = 1,
description = "Configuration of doc transformers. Either as a string that identifies the "
+ "transformer that should be run (with default settings) or as json to specify options "
Expand All @@ -215,7 +217,8 @@ public String getTransformerConfigParameterArgPrefix() {

@Parameter(
required = false,
names = "--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "transformer-config-file",
names = { "--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "-transformer-config-file",
"--" + DOC_CONFIG_PARAMETER_ARG_PREFIX + "TransformerConfigFile" },
arity = 1,
description = "Path to the JSON configuration file of doc transformers.")
private String transformerConfigFile;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def __init__(self, config: Dict, client_options: Optional[ClientOptions] = None)
raise ValueError("Invalid config file for cluster", v.errors)

self.endpoint = config["endpoint"]
self.version = config.get("version", None)
self.allow_insecure = config.get("allow_insecure", False) if self.endpoint.startswith(
"https") else config.get("allow_insecure", True)
if 'no_auth' in config:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@
from console_link.models.command_result import CommandResult
from console_link.models.metadata import Metadata
from console_link.cli import Context
from common_operations import (get_document, create_document, create_index, check_doc_counts_match,
EXPECTED_BENCHMARK_DOCS)
from .common_utils import EXPECTED_BENCHMARK_DOCS
from .default_operations import DefaultOperationsLibrary

logger = logging.getLogger(__name__)
ops = DefaultOperationsLibrary()


def preload_data(source_cluster: Cluster, target_cluster: Cluster):
Expand All @@ -29,9 +30,9 @@ def preload_data(source_cluster: Cluster, target_cluster: Cluster):
# test_backfill_0001
index_name = f"test_backfill_0001_{pytest.unique_id}"
doc_id = "backfill_0001_doc"
create_index(cluster=source_cluster, index_name=index_name)
create_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id,
expected_status_code=HTTPStatus.CREATED)
ops.create_index(cluster=source_cluster, index_name=index_name)
ops.create_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id,
expected_status_code=HTTPStatus.CREATED)

# test_backfill_0002
run_test_benchmarks(source_cluster)
Expand Down Expand Up @@ -85,23 +86,23 @@ def test_backfill_0001_single_document(self):
target_cluster: Cluster = pytest.console_env.target_cluster

# Assert preloaded document exists
get_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, test_case=self)
ops.get_document(cluster=source_cluster, index_name=index_name, doc_id=doc_id, test_case=self)

# TODO Determine when backfill is completed

get_document(cluster=target_cluster, index_name=index_name, doc_id=doc_id, max_attempts=30, delay=30.0,
test_case=self)
ops.get_document(cluster=target_cluster, index_name=index_name, doc_id=doc_id, max_attempts=30, delay=30.0,
test_case=self)

def test_backfill_0002_sample_benchmarks(self):
source_cluster: Cluster = pytest.console_env.source_cluster
target_cluster: Cluster = pytest.console_env.target_cluster

# Confirm documents on source
check_doc_counts_match(cluster=source_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS,
test_case=self)
ops.check_doc_counts_match(cluster=source_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS,
test_case=self)

# TODO Determine when backfill is completed

# Confirm documents on target after backfill
check_doc_counts_match(cluster=target_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS,
max_attempts=30, delay=30.0, test_case=self)
ops.check_doc_counts_match(cluster=target_cluster, expected_index_details=EXPECTED_BENCHMARK_DOCS,
max_attempts=30, delay=30.0, test_case=self)
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import re


class ClusterVersion:
pattern = re.compile(r"^(ES|OS)_([0-9]+)\.([0-9]+|x|X)$")

def __init__(self, version_str: str):
match = self.pattern.match(version_str)
if not match:
raise ValueError(f"Invalid version format: {version_str}. Cluster versions must be in format ES_x.y or "
f"OS_x.y, where y is a number or 'x' for any minor version.")

self.cluster_type = match.group(1)
self.major_version = int(match.group(2))

minor_version = match.group(3)
if minor_version.lower() == 'x':
self.minor_version = 'x'
else:
self.minor_version = int(minor_version)

def __str__(self):
return f"{self.cluster_type}_{self.major_version}.{self.minor_version}"


ElasticsearchV5_X = ClusterVersion("ES_5.x")
ElasticsearchV6_X = ClusterVersion("ES_6.x")
ElasticsearchV7_X = ClusterVersion("ES_7.x")
OpensearchV1_X = ClusterVersion("OS_1.x")
OpensearchV2_X = ClusterVersion("OS_2.x")


def is_incoming_version_supported(limiting_version: ClusterVersion, incoming_version: ClusterVersion):
if (limiting_version.cluster_type == incoming_version.cluster_type and
limiting_version.major_version == incoming_version.major_version):
if isinstance(limiting_version.minor_version, str):
return True
else:
return limiting_version.minor_version == incoming_version.minor_version
return False

This file was deleted.

Loading
Loading