diff --git a/src/ansys/dpf/core/available_result.py b/src/ansys/dpf/core/available_result.py index 4168456ffa..3b08784298 100644 --- a/src/ansys/dpf/core/available_result.py +++ b/src/ansys/dpf/core/available_result.py @@ -3,6 +3,7 @@ =============== """ +from typing import List from warnings import warn from ansys.dpf.core.common import _remove_spaces, _make_as_function_name, natures from enum import Enum, unique @@ -250,6 +251,14 @@ def qualifier_labels(self) -> dict: """Returns a dictionary of available labels for each available qualifier.""" return self._qualifier_labels + @property + def qualifier_combinations(self) -> List[dict]: + """Returns the list of valid qualifier combinations for this result. + + Each combination is a dictionary which can be used for a result request. + """ + return [q.__dict__() for q in self.qualifiers] + _result_properties = { "S": {"location": "ElementalNodal", "scripting_name": "stress"}, diff --git a/src/ansys/dpf/core/examples/downloads.py b/src/ansys/dpf/core/examples/downloads.py index bbdde79534..c2a5cba3d1 100644 --- a/src/ansys/dpf/core/examples/downloads.py +++ b/src/ansys/dpf/core/examples/downloads.py @@ -10,7 +10,7 @@ EXAMPLE_REPO = "https://github.com/ansys/example-data/raw/master/" -def delete_downloads(): +def delete_downloads(verbose=True): """Delete all downloaded examples to free space or update the files""" from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH, examples @@ -34,7 +34,8 @@ def delete_downloads(): if not os.path.join(root, name) in not_to_remove: try: os.remove(os.path.join(root, name)) - print(f"deleting {os.path.join(root, name)}") + if verbose: + print(f"deleting {os.path.join(root, name)}") except Exception as e: warnings.warn( f"couldn't delete {os.path.join(root, name)} with error:\n {e.args}" @@ -43,7 +44,8 @@ def delete_downloads(): if len(dirs) == 0 and len(files) == 0: try: os.rmdir(root) - print(f"deleting {root}") + if verbose: + print(f"deleting {root}") except Exception as e: warnings.warn(f"couldn't delete {root} with error:\n {e.args}") diff --git a/src/ansys/dpf/core/result_info.py b/src/ansys/dpf/core/result_info.py index d860fe9627..6db4a936d2 100644 --- a/src/ansys/dpf/core/result_info.py +++ b/src/ansys/dpf/core/result_info.py @@ -127,8 +127,23 @@ def __str__(self): ] txt += "{0:^4} {1:^2} {2:<30}".format(*line) + "\n" + if self._server.meet_version("7.0"): + qualifiers_labels = self.available_qualifier_labels + if len(qualifiers_labels) > 0: + txt += "Available qualifier labels:\n" + for label in qualifiers_labels: + label_support = self.qualifier_label_support(label) + names_field = label_support.string_field_support_by_property("names") + label_names = names_field.data_as_list + label_values = names_field.scoping.ids.tolist() + txt += f" - {label}: " + for i, value in enumerate(label_values): + label_values[i] = label_names[i] + f" ({value})" + txt += f"{', '.join(label_values)}\n" + return txt except Exception as e: + raise e from ansys.dpf.core.core import _description return _description(self._internal_obj, self._server) diff --git a/tests/slow/test_remoteworkflow.py b/tests/slow/test_remoteworkflow.py index 86ff1050c8..09a762cab9 100644 --- a/tests/slow/test_remoteworkflow.py +++ b/tests/slow/test_remoteworkflow.py @@ -16,6 +16,7 @@ def test_simple_remote_workflow(simple_bar, local_server): data_sources1 = core.DataSources(simple_bar) wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1) average = core.operators.math.norm_fc(op) @@ -23,6 +24,7 @@ def test_simple_remote_workflow(simple_bar, local_server): wf.set_output_name("out", average.outputs.fields_container) local_wf = core.Workflow() + local_wf.progress_bar = False min_max = ops.min_max.min_max_fc() local_wf.add_operator(min_max) local_wf.set_input_name("in", min_max.inputs.fields_container) @@ -53,6 +55,7 @@ def test_multi_process_remote_workflow(): for i in files: data_sources1 = core.DataSources(files[i]) wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1) average = core.operators.math.norm_fc(op) @@ -75,6 +78,7 @@ def test_multi_process_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -95,6 +99,7 @@ def test_multi_process_remote_workflow(): def test_multi_process_connect_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -121,6 +126,7 @@ def test_multi_process_connect_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -141,6 +147,7 @@ def test_multi_process_connect_remote_workflow(): def test_multi_process_connect_operator_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -168,6 +175,7 @@ def test_multi_process_connect_operator_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -188,6 +196,7 @@ def test_multi_process_connect_operator_remote_workflow(): def test_multi_process_getoutput_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -214,6 +223,7 @@ def test_multi_process_getoutput_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -236,6 +246,7 @@ def test_multi_process_getoutput_remote_workflow(): def test_multi_process_chain_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -262,6 +273,7 @@ def test_multi_process_chain_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -293,6 +305,7 @@ def test_multi_process_chain_remote_workflow(): ) def test_remote_workflow_info(local_server): wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -318,6 +331,7 @@ def test_multi_process_local_remote_local_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False average = core.operators.math.norm_fc() wf.add_operators([average]) @@ -340,6 +354,7 @@ def test_multi_process_local_remote_local_remote_workflow(): remote_workflow = remote_workflow_prov.get_output(0, core.types.workflow) first_wf = core.Workflow() + first_wf.progress_bar = False op = ops.result.displacement() first_wf.add_operator(op) first_wf.set_input_name("data_sources", op.inputs.data_sources) @@ -351,6 +366,7 @@ def test_multi_process_local_remote_local_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -374,6 +390,7 @@ def test_multi_process_transparent_api_remote_workflow(): for i in files: data_sources1 = core.DataSources(files[i], server=local_servers[i]) wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1, server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -383,6 +400,7 @@ def test_multi_process_transparent_api_remote_workflow(): workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -406,6 +424,7 @@ def test_multi_process_with_names_transparent_api_remote_workflow(): for i in files: data_sources1 = core.DataSources(files[i], server=local_servers[i]) wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1, server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -415,6 +434,7 @@ def test_multi_process_with_names_transparent_api_remote_workflow(): workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -437,6 +457,7 @@ def test_multi_process_transparent_api_connect_local_datasources_remote_workflow workflows = [] for i in files: wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -446,6 +467,7 @@ def test_multi_process_transparent_api_connect_local_datasources_remote_workflow workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -470,6 +492,7 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow(): workflows = [] for i in files: wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -479,6 +502,7 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow(): workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -502,6 +526,7 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow(): def test_multi_process_transparent_api_create_on_local_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -510,6 +535,7 @@ def test_multi_process_transparent_api_create_on_local_remote_workflow(): wf.set_input_name("ds", op.inputs.data_sources) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -533,6 +559,7 @@ def test_multi_process_transparent_api_create_on_local_remote_workflow(): def test_multi_process_transparent_api_create_on_local_remote_ith_address_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) diff --git a/tests/test_animator.py b/tests/test_animator.py index 95b0d99f5a..52fe305898 100644 --- a/tests/test_animator.py +++ b/tests/test_animator.py @@ -69,6 +69,7 @@ def test_animator_animate(displacement_fields): loop_over_field.unit = frequencies.unit wf = Workflow() + wf.progress_bar = False extract_field_op = dpf.operators.utility.extract_field(displacement_fields) wf.set_input_name("loop_over", extract_field_op.inputs.indices) wf.set_output_name("to_render", extract_field_op.outputs.field) @@ -85,6 +86,7 @@ def test_animator_animate_raise_wrong_scale_factor(remove_gifs, displacement_fie loop_over_field.unit = frequencies.unit wf = Workflow() + wf.progress_bar = False extract_field_op = dpf.operators.utility.extract_field(displacement_fields) wf.set_input_name("loop_over", extract_field_op.inputs.indices) wf.set_output_name("to_render", extract_field_op.outputs.field) diff --git a/tests/test_cff.py b/tests/test_cff.py index 80cc641fcd..860d6b7b5b 100644 --- a/tests/test_cff.py +++ b/tests/test_cff.py @@ -22,7 +22,7 @@ def test_cff_model(server_type, fluent_multi_species): ) def test_results_cfx(cfx_heating_coil, server_type): model = dpf.Model(cfx_heating_coil(server=server_type), server=server_type) - print(model) + # print(model) result_names = [ "specific_heat", "epsilon", @@ -55,7 +55,7 @@ def test_results_cfx(cfx_heating_coil, server_type): ) def test_results_fluent(fluent_mixing_elbow_steady_state, server_type): model = dpf.Model(fluent_mixing_elbow_steady_state(server=server_type), server=server_type) - print(model) + # print(model) result_names = [ "epsilon", "enthalpy", diff --git a/tests/test_codegeneration.py b/tests/test_codegeneration.py index a5c62b0ea9..9882edf75b 100644 --- a/tests/test_codegeneration.py +++ b/tests/test_codegeneration.py @@ -286,7 +286,7 @@ def test_generated_operator_set_config(): inpt2.unit = "m" conf = op.math.add.default_config() - print(conf) + # print(conf) conf.set_work_by_index_option(True) op1 = op.math.add(config=conf) op1.inputs.fieldA.connect(inpt) diff --git a/tests/test_custom_type_field.py b/tests/test_custom_type_field.py index f349b50b9a..ce9f6821d0 100644 --- a/tests/test_custom_type_field.py +++ b/tests/test_custom_type_field.py @@ -27,7 +27,7 @@ def test_set_get_data_custom_type_field(server_type): data[i] = np.byte(b"2") field.data = data assert np.allclose(field.data, data) - print(field.data) + # print(field.data) @conftest.raises_for_servers_version_under("5.0") @@ -96,7 +96,7 @@ def test_set_get_field_def_custom_type_field(server_type): copy = field.field_definition - print(copy.dimensionality) + # print(copy.dimensionality) assert copy.dimensionality == dpf.core.Dimensionality([3], dpf.core.natures.vector) assert copy.location == core.locations.elemental assert copy.name == "thing" diff --git a/tests/test_cyclic_support.py b/tests/test_cyclic_support.py index 5a90d57aa2..ed14d07d32 100644 --- a/tests/test_cyclic_support.py +++ b/tests/test_cyclic_support.py @@ -117,6 +117,7 @@ def test_cyc_support_from_to_workflow(cyclic_lin_rst, server_type): cyc_support = result_info.cyclic_support op = dpf.operators.metadata.cyclic_mesh_expansion(server=server_type) wf = dpf.Workflow(server=server_type) + wf.progress_bar = False wf.set_input_name("sup", op.inputs.cyclic_support) wf.set_output_name("sup", op.outputs.cyclic_support) wf.connect("sup", cyc_support) diff --git a/tests/test_elements.py b/tests/test_elements.py index e59cd6f52b..4395cca9d2 100644 --- a/tests/test_elements.py +++ b/tests/test_elements.py @@ -136,7 +136,7 @@ def test_no_element_descriptor(): # descriptor = dpf.element_types.descriptor(89) # assert not descriptor descriptor = dpf.element_types.descriptor(dpf.element_types.General) - print(descriptor) + # print(descriptor) unknown_shape = "unknown_shape" assert descriptor.shape == unknown_shape assert dpf.element_types.descriptor(dpf.element_types.General).shape == unknown_shape diff --git a/tests/test_examples.py b/tests/test_examples.py index 0098ef13f1..493dd87ff2 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -136,7 +136,7 @@ def test_find_examples(example, server_type_remote_process): def test_delete_downloaded_files(): path = examples.download_multi_stage_cyclic_result(return_local_path=True) assert os.path.exists(path) - examples.delete_downloads() + examples.delete_downloads(verbose=False) assert not os.path.exists(path) path = examples.download_multi_stage_cyclic_result(return_local_path=True) assert os.path.exists(path) diff --git a/tests/test_factories.py b/tests/test_factories.py index 087b52923f..def4cbcc6a 100644 --- a/tests/test_factories.py +++ b/tests/test_factories.py @@ -271,7 +271,7 @@ def test_elemental_scoping(): def test_named_selection_scoping(model_with_ns): model = Model(model_with_ns) - print(model.metadata.available_named_selections) + # print(model.metadata.available_named_selections) scop = mesh_scoping_factory.named_selection_scoping("SELECTION", model) assert scop is not None assert len(scop.ids) != 0 diff --git a/tests/test_geometry.py b/tests/test_geometry.py index 83d5853fb8..2d732c3da7 100644 --- a/tests/test_geometry.py +++ b/tests/test_geometry.py @@ -28,7 +28,7 @@ def test_create_points(): points = rng.random((n_points, 3)) points = create_points(points) points.plot() - print(points) + # print(points) assert points.dimension == 3 assert len(points) == points.n_points == n_points @@ -57,7 +57,7 @@ def test_create_line_from_points(points_param): info += f"Starting point: {np.array(points[0])}\n" info += f"Ending point: {np.array(points[1])}\n" info += f"Line discretized with {line.n_points} points\n" - assert print(line) == print(info) + assert str(line) == str(info) assert line.length == np.linalg.norm(points) diff = np.array(points[1]) - np.array(points[0]) assert all(line.direction) == all(diff / np.linalg.norm(diff)) @@ -297,7 +297,7 @@ def test_plane_discretization(component): info_discretization += f" Height (y-dir): {height}\n" info_discretization += f" Num cells x-dir: {n_cells_x}\n" info_discretization += f" Num cells y-dir: {n_cells_y}\n" - assert print(plane) == print(info_discretization) + assert str(plane) == str(info_discretization) plane_mapping_data = [ diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 21900fc18a..8299999736 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -184,8 +184,8 @@ def test_launch_server_full_path(self, server_config): else: path = os.path.join(ansys_path, "aisol", "bin", "linx64") - print("trying to launch on ", path) - print(os.listdir(path)) + # print("trying to launch on ", path) + # print(os.listdir(path)) server = core.start_local_server(as_global=False, ansys_path=path, config=server_config) assert "server_port" in server.info diff --git a/tests/test_operator.py b/tests/test_operator.py index 90e2ac6d9c..b83597844f 100644 --- a/tests/test_operator.py +++ b/tests/test_operator.py @@ -90,7 +90,7 @@ def test_connect_bool_operator(server_type): def test_print_operator(): op = dpf.core.Operator("S") - print(op) + assert str(op) def test_connect_get_out_all_types_operator(server_type): diff --git a/tests/test_pathsupport.py b/tests/test_pathsupport.py index 19fd16d8d2..d862f9745b 100644 --- a/tests/test_pathsupport.py +++ b/tests/test_pathsupport.py @@ -27,7 +27,7 @@ def test_print_data_sources_path(allkindofcomplexity): path = Path(allkindofcomplexity) data_sources = dpf.core.DataSources() data_sources.set_result_file_path(path) - print(data_sources) + assert str(data_sources) assert data_sources.result_key == "rst" assert len(data_sources.result_files) == 1 assert os.path.normpath(data_sources.result_files[0]) == os.path.normpath(allkindofcomplexity) diff --git a/tests/test_propertyfield.py b/tests/test_propertyfield.py index b2e85eb4bc..16dc15b23c 100644 --- a/tests/test_propertyfield.py +++ b/tests/test_propertyfield.py @@ -132,6 +132,7 @@ def test_getoutput_property_field_workflow(simple_bar): op.inputs.mesh.connect(mesh) wf = dpf.core.Workflow() + wf.progress_bar = False wf.add_operator(op) wf.set_output_name("field_out", op, 3) @@ -145,6 +146,7 @@ def test_connect_property_field_workflow(): op = dpf.core.operators.utility.forward() wf = dpf.core.Workflow() + wf.progress_bar = False wf.add_operator(op) wf.set_input_name("field_in", op, 0) wf.connect("field_in", f_vec) @@ -195,6 +197,7 @@ def test_mutable_data_property_field(server_clayer, simple_bar): op.inputs.mesh.connect(mesh) wf = dpf.core.Workflow(server=server_clayer) + wf.progress_bar = False wf.add_operator(op) wf.set_output_name("field_out", op, 3) @@ -244,7 +247,7 @@ def test_print_property_field(server_type): scop = core.Scoping(ids=list_ids, location=locations.nodal, server=server_type) pfield.scoping = scop pfield.data = [1, 2, 4, 6, 7] - print(pfield) + # print(pfield) assert "Property Field" in str(pfield) assert "5" in str(pfield) assert "Nodal" in str(pfield) diff --git a/tests/test_python_plugins.py b/tests/test_python_plugins.py index 602ac75cbe..714145cbe6 100644 --- a/tests/test_python_plugins.py +++ b/tests/test_python_plugins.py @@ -183,6 +183,7 @@ def test_data_sources(server_type_remote_process, testfiles_dir): def test_workflow(server_type_remote_process, testfiles_dir): load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir) f = dpf.Workflow(server=server_type_remote_process) + f.progress_bar = False op = dpf.Operator("custom_forward_workflow", server=server_type_remote_process) op.connect(0, f) assert op.get_output(0, dpf.types.workflow) is not None diff --git a/tests/test_remote_workflow.py b/tests/test_remote_workflow.py index 25b1d01466..929fd206ca 100644 --- a/tests/test_remote_workflow.py +++ b/tests/test_remote_workflow.py @@ -19,6 +19,7 @@ def test_simple_remote_workflow(simple_bar, local_server): data_sources1 = core.DataSources(simple_bar) wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1) average = core.operators.math.norm_fc(op) @@ -26,6 +27,7 @@ def test_simple_remote_workflow(simple_bar, local_server): wf.set_output_name("out", average.outputs.fields_container) local_wf = core.Workflow() + local_wf.progress_bar = False min_max = ops.min_max.min_max_fc() local_wf.add_operator(min_max) local_wf.set_input_name("in", min_max.inputs.fields_container) @@ -58,6 +60,7 @@ def test_multi_process_remote_workflow(): for i in files: data_sources1 = core.DataSources(files[i]) wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1) average = core.operators.math.norm_fc(op) @@ -80,6 +83,7 @@ def test_multi_process_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -102,6 +106,7 @@ def test_multi_process_remote_workflow(): def test_multi_process_connect_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -128,6 +133,7 @@ def test_multi_process_connect_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -150,6 +156,7 @@ def test_multi_process_connect_remote_workflow(): def test_multi_process_connect_operator_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -177,6 +184,7 @@ def test_multi_process_connect_operator_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -199,6 +207,7 @@ def test_multi_process_connect_operator_remote_workflow(): def test_multi_process_getoutput_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -225,6 +234,7 @@ def test_multi_process_getoutput_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -248,6 +258,7 @@ def test_multi_process_getoutput_remote_workflow(): def test_multi_process_chain_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -274,6 +285,7 @@ def test_multi_process_chain_remote_workflow(): workflows.append(remote_workflow) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -307,6 +319,7 @@ def test_multi_process_chain_remote_workflow(): ) def test_remote_workflow_info(local_server): wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -335,6 +348,7 @@ def test_multi_process_local_remote_local_remote_workflow(server_type_remote_pro files = examples.download_distributed_files() wf = core.Workflow(server=server_type_remote_process) + wf.progress_bar = False average = core.operators.math.norm_fc(server=server_type_remote_process) wf.add_operators([average]) @@ -359,6 +373,7 @@ def test_multi_process_local_remote_local_remote_workflow(server_type_remote_pro remote_workflow = remote_workflow_prov.get_output(0, core.types.workflow) first_wf = core.Workflow(server=server_type_remote_process) + first_wf.progress_bar = False op = ops.result.displacement(server=server_type_remote_process) first_wf.add_operator(op) first_wf.set_input_name("data_sources", op.inputs.data_sources) @@ -370,6 +385,7 @@ def test_multi_process_local_remote_local_remote_workflow(server_type_remote_pro workflows.append(remote_workflow) local_wf = core.Workflow(server=server_type_remote_process) + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers(server=server_type_remote_process) min_max = ops.min_max.min_max_fc(merge, server=server_type_remote_process) local_wf.add_operator(merge) @@ -392,6 +408,7 @@ def test_multi_process_transparent_api_remote_workflow(): for i in files: data_sources1 = core.DataSources(files[i], server=local_servers[i]) wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1, server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -401,6 +418,7 @@ def test_multi_process_transparent_api_remote_workflow(): workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -423,6 +441,7 @@ def test_multi_process_with_names_transparent_api_remote_workflow(): for i in files: data_sources1 = core.DataSources(files[i], server=local_servers[i]) wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(data_sources=data_sources1, server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -432,6 +451,7 @@ def test_multi_process_with_names_transparent_api_remote_workflow(): workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -456,6 +476,7 @@ def test_multi_process_transparent_api_connect_local_datasources_remote_workflow workflows = [] for i in files: wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -465,6 +486,7 @@ def test_multi_process_transparent_api_connect_local_datasources_remote_workflow workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -491,6 +513,7 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow(): workflows = [] for i in files: wf = core.Workflow(server=local_servers[i]) + wf.progress_bar = False op = ops.result.displacement(server=local_servers[i]) average = core.operators.math.norm_fc(op, server=local_servers[i]) @@ -500,6 +523,7 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow(): workflows.append(wf) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -526,6 +550,7 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow(): def test_multi_process_transparent_api_create_on_local_remote_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -534,6 +559,7 @@ def test_multi_process_transparent_api_create_on_local_remote_workflow(): wf.set_input_name("ds", op.inputs.data_sources) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) @@ -556,6 +582,7 @@ def test_multi_process_transparent_api_create_on_local_remote_workflow(): def test_multi_process_transparent_api_create_on_local_remote_ith_address_workflow(): files = examples.download_distributed_files() wf = core.Workflow() + wf.progress_bar = False op = ops.result.displacement() average = core.operators.math.norm_fc(op) @@ -564,6 +591,7 @@ def test_multi_process_transparent_api_create_on_local_remote_ith_address_workfl wf.set_input_name("ds", op.inputs.data_sources) local_wf = core.Workflow() + local_wf.progress_bar = False merge = ops.utility.merge_fields_containers() min_max = ops.min_max.min_max_fc(merge) local_wf.add_operator(merge) diff --git a/tests/test_resultinfo.py b/tests/test_resultinfo.py index acc9b48a98..4d2dff30ba 100644 --- a/tests/test_resultinfo.py +++ b/tests/test_resultinfo.py @@ -1,5 +1,4 @@ import pytest -import platform from ansys import dpf from ansys.dpf.core import Model @@ -82,12 +81,11 @@ def test_repr_available_results_list(model): assert dpf.core.result_info.available_result.AvailableResult.__name__ in str(ar) -@pytest.mark.skipif(platform.system() == "Linux", reason="CFF not available for Linux InProcess.") @pytest.mark.skipif( not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Available with CFF starting 7.0" ) -def test_print_available_result_with_qualifiers(cfx_heating_coil): - model = Model(cfx_heating_coil()) +def test_print_available_result_with_qualifiers(cfx_heating_coil, server_type): + model = Model(cfx_heating_coil(server=server_type), server=server_type) ref = """DPF Result ---------- specific_heat @@ -100,10 +98,40 @@ def test_print_available_result_with_qualifiers(cfx_heating_coil): Available qualifier labels:""" # noqa: E501 ref2 = "'phase': 2" ref3 = "'zone': 5" - got = str(model.metadata.result_info.available_results[0]) + ar = model.metadata.result_info.available_results[0] + got = str(ar) assert ref in got assert ref2 in got assert ref3 in got + assert len(ar.qualifier_combinations) == 20 + + +@pytest.mark.skipif( + not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Available with CFF starting 7.0" +) +def test_print_result_info_with_qualifiers(cfx_heating_coil, server_type): + model = Model(cfx_heating_coil(server=server_type), server=server_type) + ref = """Static analysis +Unit system: SI: m, kg, N, s, V, A, K +Physics Type: Fluid +Available results: + - specific_heat: Nodal Specific Heat + - epsilon: Nodal Epsilon + - enthalpy: Nodal Enthalpy + - turbulent_kinetic_energy: Nodal Turbulent Kinetic Energy + - thermal_conductivity: Nodal Thermal Conductivity + - dynamic_viscosity: Nodal Dynamic Viscosity + - turbulent_viscosity: Nodal Turbulent Viscosity + - static_pressure: Nodal Static Pressure + - total_pressure: Nodal Total Pressure + - density: Nodal Density + - entropy: Nodal Entropy + - wall_shear_stress: Nodal Wall Shear Stress + - temperature: Nodal Temperature + - total_temperature: Nodal Total Temperature + - velocity: Nodal Velocity +Available qualifier labels:""" # noqa + assert ref in str(model.metadata.result_info) @pytest.mark.skipif(True, reason="Used to test memory leaks") diff --git a/tests/test_scoping.py b/tests/test_scoping.py index 63ba0b82da..29dfa72bf6 100644 --- a/tests/test_scoping.py +++ b/tests/test_scoping.py @@ -115,7 +115,7 @@ def test_print_scoping(): scop = Scoping() ids = [1, 2, 3, 5, 8, 9, 10] scop.ids = ids - print(scop) + assert str(scop) def test_documentation_string_on_scoping(server_type): diff --git a/tests/test_server.py b/tests/test_server.py index 8fb5347cbf..97a736e683 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -73,7 +73,7 @@ def reset_server_config(): def test__global_server(self, server_config): set_server_configuration(server_config) - print(dpf.core.SERVER_CONFIGURATION) + # print(dpf.core.SERVER_CONFIGURATION) shutdown_all_session_servers() _global_server() assert has_local_server() @@ -84,7 +84,7 @@ def test_set_server_configuration(self, server_config): def test_start_local_server(self, server_config): set_server_configuration(server_config) - print(dpf.core.SERVER_CONFIGURATION) + # print(dpf.core.SERVER_CONFIGURATION) start_local_server(timeout=20) assert has_local_server() shutdown_all_session_servers() @@ -98,7 +98,7 @@ def test_start_local_server_with_config(self, server_config): def test_shutdown_all_session_servers(self, server_config): set_server_configuration(server_config) - print(dpf.core.SERVER_CONFIGURATION) + # print(dpf.core.SERVER_CONFIGURATION) start_local_server(timeout=10.0) shutdown_all_session_servers() assert not has_local_server() diff --git a/tests/test_session.py b/tests/test_session.py index d7fbb3ecba..c272592b89 100644 --- a/tests/test_session.py +++ b/tests/test_session.py @@ -35,6 +35,7 @@ def test_logging(tmpdir, server_type): server_type.session.handle_events_with_file_logger(log_path, 2) wf = core.Workflow(server=server_type) + wf.progress_bar = False model = core.Model(result_file, server=server_type) stress = model.results.stress() to_nodal = core.operators.averaging.to_nodal_fc(stress, server=server_type) @@ -51,6 +52,7 @@ def test_logging(tmpdir, server_type): file_size = os.path.getsize(download_log_path) wf = core.Workflow(server=server_type) + wf.progress_bar = False model = core.Model(result_file, server=server_type) stress = model.results.stress() to_nodal = core.operators.averaging.to_nodal_fc(stress, server=server_type) @@ -74,6 +76,8 @@ def test_logging_remote(tmpdir, server_type_remote_process): server_type_remote_process.session.start_emitting_rpc_log() wf = core.Workflow(server=server_type_remote_process) + wf.progress_bar = False + wf.progress_bar = False model = core.Model(result_file, server=server_type_remote_process) stress = model.results.stress() to_nodal = core.operators.averaging.to_nodal_fc(stress, server=server_type_remote_process) @@ -90,6 +94,8 @@ def test_logging_remote(tmpdir, server_type_remote_process): file_size = os.path.getsize(download_log_path) wf = core.Workflow(server=server_type_remote_process) + wf.progress_bar = False + wf.progress_bar = False model = core.Model(result_file, server=server_type_remote_process) stress = model.results.stress() to_nodal = core.operators.averaging.to_nodal_fc(stress, server=server_type_remote_process) diff --git a/tests/test_stringfield.py b/tests/test_stringfield.py index 819779af1a..8cb6e6df79 100644 --- a/tests/test_stringfield.py +++ b/tests/test_stringfield.py @@ -94,19 +94,19 @@ def test_print_string_vector(server_type): data.append("bla") field.data = data d = field.data - print(d) + # print(d) assert "['bla', 'bla', 'bla'" in d.__str__() data = [] for _ in range(0, 2): data.append("bla") field.data = data d = field.data - print(d) + # print(d) assert "['bla', 'bla']" in d.__str__() data = [] field.data = data d = field.data - print(d) + # print(d) d.__str__() diff --git a/tests/test_timefreqsupport.py b/tests/test_timefreqsupport.py index 9472f447f6..2b1cb3100f 100644 --- a/tests/test_timefreqsupport.py +++ b/tests/test_timefreqsupport.py @@ -285,6 +285,7 @@ def test_workflow_connect_get_output_time_freq_support(velocity_acceleration): model = Model(velocity_acceleration) tf = model.metadata.time_freq_support wf = dpf.core.Workflow() + wf.progress_bar = False op = dpf.core.operators.utility.forward() wf.set_input_name("tf", op, 0) wf.set_output_name("tf", op, 0) diff --git a/tests/test_workflow.py b/tests/test_workflow.py index 7ba05541b5..ac93bfa996 100644 --- a/tests/test_workflow.py +++ b/tests/test_workflow.py @@ -14,6 +14,7 @@ def test_create_workflow(server_type): def test_connect_field_workflow(server_type): wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.Operator("min_max", server=server_type) inpt = dpf.core.Field(nentities=3, server=server_type) data = [1, 2, 3, 4, 5, 6, 7, 8, 9] @@ -33,6 +34,7 @@ def test_connect_field_workflow(server_type): assert np.allclose(f_out.data, [7.0, 8.0, 9.0]) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.set_input_name("field", op.inputs.field) wf.set_output_name("min", op.outputs.field_min) wf.set_output_name("max", op.outputs.field_max) @@ -45,6 +47,7 @@ def test_connect_field_workflow(server_type): def test_connect_list_workflow(velocity_acceleration, server_type): wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False model = dpf.core.Model(velocity_acceleration, server=server_type) op = model.operator("U") wf.add_operator(op) @@ -63,6 +66,7 @@ def test_connect_list_workflow(velocity_acceleration, server_type): def test_connect_fieldscontainer_workflow(server_type): wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.Operator("min_max_fc", server=server_type) wf.add_operator(op) fc = dpf.core.FieldsContainer(server=server_type) @@ -77,6 +81,7 @@ def test_connect_fieldscontainer_workflow(server_type): fc.add_field(mscop, field) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.set_input_name("fields_container", op, 0) wf.set_output_name("field", op, 0) wf.connect("fields_container", fc) @@ -86,6 +91,7 @@ def test_connect_fieldscontainer_workflow(server_type): def test_connect_fieldscontainer_2_workflow(server_type): wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.Operator("min_max_fc", server=server_type) wf.add_operator(op) fc = dpf.core.FieldsContainer(server=server_type) @@ -100,6 +106,7 @@ def test_connect_fieldscontainer_2_workflow(server_type): fc.add_field(mscop, field) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.set_input_name("fields_container", op.inputs.fields_container) wf.set_output_name("field", op.outputs.field_min) wf.connect("fields_container", fc) @@ -110,11 +117,13 @@ def test_connect_fieldscontainer_2_workflow(server_type): def test_connect_bool_workflow(server_type): op = dpf.core.Operator("S", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op) wf.set_input_name("bool", op, 5) wf.connect("bool", True) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op) wf.set_input_name("bool", op.inputs.bool_rotate_to_global) wf.connect("bool", True) @@ -133,6 +142,7 @@ def test_connect_scoping_workflow(server_type): scop2.ids = list(range(1, 5)) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op) wf.set_input_name("field", op, 0) wf.connect("field", field) @@ -157,6 +167,7 @@ def test_connect_scoping_2_workflow(server_type): scop2.ids = list(range(1, 5)) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op) wf.set_input_name("field", op.inputs.fields) wf.connect("field", field) @@ -174,6 +185,7 @@ def test_connect_datasources_workflow(fields_container_csv, server_type): data_sources.set_result_file_path(fields_container_csv) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op) wf.set_input_name("data_sources", op, 4) wf.connect("data_sources", data_sources) @@ -183,6 +195,7 @@ def test_connect_datasources_workflow(fields_container_csv, server_type): assert len(f_out.get_available_ids_for_label()) == 4 wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op) wf.set_input_name("data_sources", op.inputs.data_sources) wf.connect("data_sources", data_sources) @@ -204,6 +217,7 @@ def test_connect_operator_workflow(server_type): op2 = dpf.core.Operator("component_selector", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op2) wf.set_input_name("fields_container", op2, 0) wf.set_input_name("comp", op2, 1) @@ -227,6 +241,7 @@ def test_connect_operator_2_workflow(server_type): op2 = dpf.core.Operator("component_selector", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operator(op2) wf.set_input_name("field", op2.inputs.field) wf.set_input_name("comp", op2.inputs.component_number) @@ -250,6 +265,7 @@ def test_output_mesh_workflow(cyclic_lin_rst, cyclic_ds, server_type): expand = model.operator("cyclic_expansion") wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operators([support, expand]) wf.set_input_name("support", expand.inputs.cyclic_support) wf.set_input_name("fields", expand.inputs.fields_container) @@ -284,6 +300,7 @@ def test_outputs_bool_workflow(server_type): op = dpf.core.Operator("AreFieldsIdentical", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operators([op]) wf.set_input_name("fieldA", op.inputs.fieldA) wf.set_input_name("fieldB", op.inputs.fieldB) @@ -299,6 +316,7 @@ def test_outputs_bool_workflow(server_type): def test_connect_get_output_int_list_workflow(server_type): d = list(range(0, 1000000)) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.operators.utility.forward(d, server=server_type) wf.add_operators([op]) wf.set_input_name("in", op, 0) @@ -311,6 +329,7 @@ def test_connect_get_output_int_list_workflow(server_type): def test_connect_get_output_double_list_workflow(server_type): d = list(np.ones(500000)) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.operators.utility.forward(d, server=server_type) wf.add_operators([op]) wf.set_input_name("in", op, 0) @@ -325,6 +344,7 @@ def test_connect_get_output_double_list_workflow(server_type): ) def test_connect_label_space_workflow(server_type): wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.operators.utility.forward(server=server_type) wf.add_operators([op]) wf.set_input_name("in", op, 0) @@ -335,6 +355,7 @@ def test_connect_label_space_workflow(server_type): @conftest.raises_for_servers_version_under("5.0") def test_connect_get_output_string_field_workflow(server_type): wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.operators.utility.forward(server=server_type) wf.add_operators([op]) wf.set_input_name("in", op, 0) @@ -349,6 +370,7 @@ def test_connect_get_output_string_field_workflow(server_type): @conftest.raises_for_servers_version_under("5.0") def test_connect_get_output_custom_type_field_workflow(server_type): wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.operators.utility.forward(server=server_type) wf.add_operators([op]) wf.set_input_name("in", op, 0) @@ -369,6 +391,7 @@ def test_inputs_outputs_inputs_outputs_scopings_container_workflow( op = dpf.core.Operator("scoping::by_property", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operators([op]) wf.set_input_name("mesh", op.inputs.mesh) wf.set_input_name("prop", op.inputs.label1) @@ -380,6 +403,7 @@ def test_inputs_outputs_inputs_outputs_scopings_container_workflow( op = dpf.core.Operator("forward", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operators([op]) wf.set_input_name("a", op, 0) wf.set_output_name("a", op, 0) @@ -395,6 +419,7 @@ def test_inputs_outputs_inputs_outputs_meshes_container_workflow(allkindofcomple op = dpf.core.Operator("split_mesh", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operators([op]) wf.set_input_name("mesh", op.inputs.mesh) wf.set_input_name("prop", op.inputs.property) @@ -406,6 +431,7 @@ def test_inputs_outputs_inputs_outputs_meshes_container_workflow(allkindofcomple op = dpf.core.Operator("forward", server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False wf.add_operators([op]) wf.set_input_name("a", op, 0) wf.set_output_name("a", op, 0) @@ -419,6 +445,7 @@ def test_inputs_outputs_inputs_outputs_meshes_container_workflow(allkindofcomple def test_connect_get_output_data_tree_operator(server_type): d = dpf.core.DataTree({"name": "Paul"}, server=server_type) wf = dpf.core.Workflow(server=server_type) + wf.progress_bar = False op = dpf.core.operators.utility.forward(server=server_type) wf.set_input_name("in", op.inputs.any) wf.set_output_name("out", op.outputs.any)