Skip to content

Commit e5f0a8c

Browse files
committed
Merge branch 'release_24.0' into release_24.1
2 parents b39c3b2 + ee5f807 commit e5f0a8c

File tree

7 files changed

+127
-39
lines changed

7 files changed

+127
-39
lines changed

.github/workflows/pr-title-update.yml

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,26 +2,28 @@ name: Update PR title
22

33
on:
44
pull_request_target:
5-
types: [opened, edited]
6-
branches:
7-
- "release_**"
5+
types: [opened, edited, reopened]
86

97
jobs:
108
update-title:
9+
if: github.event.action != 'edited' || github.event.changes.base.ref.from != ''
1110
runs-on: ubuntu-latest
1211
permissions:
1312
pull-requests: write
1413
steps:
15-
- uses: actions/checkout@v4
1614
- name: Update PR title
1715
env:
1816
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
1917
PR_NUMBER: ${{ github.event.pull_request.number }}
2018
TARGET_BRANCH: "${{ github.base_ref }}"
2119
PR_TITLE: "${{ github.event.pull_request.title }}"
20+
REPO: "${{ github.repository }}"
2221
run: |
23-
VERSION=$(echo $TARGET_BRANCH | grep -oP '\d+\.\d+')
24-
if [[ -n "$VERSION" && ! "$PR_TITLE" =~ ^\[$VERSION\] ]]; then
25-
NEW_TITLE="[$VERSION] $PR_TITLE"
26-
gh pr edit $PR_NUMBER --title "$NEW_TITLE"
22+
VERSION=$(echo $TARGET_BRANCH | grep -oP '^release_\K\d+.\d+$' || true)
23+
NEW_TITLE=$(echo "$PR_TITLE" | sed -E "s/\[[0-9]+\.[0-9]+\] //")
24+
if [[ -n "$VERSION" ]]; then
25+
NEW_TITLE="[$VERSION] $NEW_TITLE"
26+
fi
27+
if [[ "$NEW_TITLE" != "$PR_TITLE" ]]; then
28+
gh pr edit $PR_NUMBER --repo "$REPO" --title "$NEW_TITLE"
2729
fi

lib/galaxy/managers/workflows.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,7 @@ def read_workflow_from_path(self, app, user, path, allow_in_directory=None) -> m
598598
import_options = ImportOptions()
599599
import_options.deduplicate_subworkflows = True
600600
as_dict = python_to_workflow(as_dict, galaxy_interface, workflow_directory=None, import_options=import_options)
601-
raw_description = RawWorkflowDescription(as_dict, path)
601+
raw_description = RawWorkflowDescription(as_dict)
602602
created_workflow = self.build_workflow_from_raw_description(trans, raw_description, WorkflowCreateOptions())
603603
return created_workflow.workflow
604604

@@ -937,8 +937,9 @@ def to_format_2(wf_dict, **kwds):
937937
return wf_dict
938938

939939
def _sync_stored_workflow(self, trans, stored_workflow):
940-
workflow_path = stored_workflow.from_path
941-
self.store_workflow_to_path(workflow_path, stored_workflow, stored_workflow.latest_workflow, trans=trans)
940+
if trans.user_is_admin:
941+
workflow_path = stored_workflow.from_path
942+
self.store_workflow_to_path(workflow_path, stored_workflow, stored_workflow.latest_workflow, trans=trans)
942943

943944
def store_workflow_artifacts(self, directory, filename_base, workflow, **kwd):
944945
modern_workflow_path = os.path.join(directory, f"{filename_base}.gxwf.yml")

lib/galaxy/model/__init__.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2630,7 +2630,7 @@ class PostJobAction(Base, RepresentById):
26302630
workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"), index=True)
26312631
action_type: Mapped[str] = mapped_column(String(255))
26322632
output_name: Mapped[Optional[str]] = mapped_column(String(255))
2633-
action_arguments: Mapped[Optional[bytes]] = mapped_column(MutableJSONType)
2633+
_action_arguments: Mapped[Optional[bytes]] = mapped_column(MutableJSONType)
26342634
workflow_step: Mapped[Optional["WorkflowStep"]] = relationship(
26352635
back_populates="post_job_actions",
26362636
primaryjoin=(lambda: WorkflowStep.id == PostJobAction.workflow_step_id),
@@ -2643,6 +2643,18 @@ def __init__(self, action_type, workflow_step=None, output_name=None, action_arg
26432643
self.workflow_step = workflow_step
26442644
ensure_object_added_to_session(self, object_in_session=workflow_step)
26452645

2646+
@property
2647+
def action_arguments(self):
2648+
if self.action_type in ("HideDatasetAction", "DeleteIntermediatesAction") and self._action_arguments is True:
2649+
# Fix up broken workflows resulting from imports with gxformat2 <= 0.20.0
2650+
return {}
2651+
else:
2652+
return self._action_arguments
2653+
2654+
@action_arguments.setter
2655+
def action_arguments(self, value: Dict[str, Any]):
2656+
self._action_arguments = value
2657+
26462658

26472659
class PostJobActionAssociation(Base, RepresentById):
26482660
__tablename__ = "post_job_action_association"
@@ -6900,11 +6912,6 @@ class HistoryDatasetCollectionAssociation(
69006912
primaryjoin=copied_from_history_dataset_collection_association_id == id,
69016913
remote_side=[id],
69026914
uselist=False,
6903-
back_populates="copied_to_history_dataset_collection_association",
6904-
)
6905-
copied_to_history_dataset_collection_association = relationship(
6906-
"HistoryDatasetCollectionAssociation",
6907-
back_populates="copied_from_history_dataset_collection_association",
69086915
)
69096916
implicit_input_collections: Mapped[List["ImplicitlyCreatedDatasetCollectionInput"]] = relationship(
69106917
primaryjoin=(

lib/galaxy/model/store/__init__.py

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -999,14 +999,16 @@ def _import_collection_copied_associations(
999999
# sense.
10001000
hdca_copied_from_sinks = object_import_tracker.hdca_copied_from_sinks
10011001
if copied_from_object_key in object_import_tracker.hdcas_by_key:
1002-
hdca.copied_from_history_dataset_collection_association = object_import_tracker.hdcas_by_key[
1003-
copied_from_object_key
1004-
]
1002+
source_hdca = object_import_tracker.hdcas_by_key[copied_from_object_key]
1003+
if source_hdca is not hdca:
1004+
# We may not have the copied source, in which case the first included HDCA in the chain
1005+
# acts as the source, so here we make sure we don't create a cycle.
1006+
hdca.copied_from_history_dataset_collection_association = source_hdca
10051007
else:
10061008
if copied_from_object_key in hdca_copied_from_sinks:
1007-
hdca.copied_from_history_dataset_collection_association = object_import_tracker.hdcas_by_key[
1008-
hdca_copied_from_sinks[copied_from_object_key]
1009-
]
1009+
source_hdca = object_import_tracker.hdcas_by_key[hdca_copied_from_sinks[copied_from_object_key]]
1010+
if source_hdca is not hdca:
1011+
hdca.copied_from_history_dataset_collection_association = source_hdca
10101012
else:
10111013
hdca_copied_from_sinks[copied_from_object_key] = dataset_collection_key
10121014

@@ -1072,7 +1074,7 @@ def attach_workflow_step(imported_object, attrs):
10721074
for step_attrs in invocation_attrs["steps"]:
10731075
imported_invocation_step = model.WorkflowInvocationStep()
10741076
imported_invocation_step.workflow_invocation = imported_invocation
1075-
ensure_object_added_to_session(imported_invocation, session=self.sa_session)
1077+
ensure_object_added_to_session(imported_invocation_step, session=self.sa_session)
10761078
attach_workflow_step(imported_invocation_step, step_attrs)
10771079
restore_times(imported_invocation_step, step_attrs)
10781080
imported_invocation_step.action = step_attrs["action"]
@@ -1926,12 +1928,14 @@ def __init__(
19261928
self.export_files = export_files
19271929
self.included_datasets: Dict[model.DatasetInstance, Tuple[model.DatasetInstance, bool]] = {}
19281930
self.dataset_implicit_conversions: Dict[model.DatasetInstance, model.ImplicitlyConvertedDatasetAssociation] = {}
1929-
self.included_collections: List[Union[model.DatasetCollection, model.HistoryDatasetCollectionAssociation]] = []
1931+
self.included_collections: Dict[
1932+
Union[model.DatasetCollection, model.HistoryDatasetCollectionAssociation],
1933+
Union[model.DatasetCollection, model.HistoryDatasetCollectionAssociation],
1934+
] = {}
19301935
self.included_libraries: List[model.Library] = []
19311936
self.included_library_folders: List[model.LibraryFolder] = []
19321937
self.included_invocations: List[model.WorkflowInvocation] = []
19331938
self.collection_datasets: Set[int] = set()
1934-
self.collections_attrs: List[Union[model.DatasetCollection, model.HistoryDatasetCollectionAssociation]] = []
19351939
self.dataset_id_to_path: Dict[int, Tuple[Optional[str], Optional[str]]] = {}
19361940

19371941
self.job_output_dataset_associations: Dict[int, Dict[str, model.DatasetInstance]] = {}
@@ -2298,8 +2302,7 @@ def export_collection(
22982302
def add_dataset_collection(
22992303
self, collection: Union[model.DatasetCollection, model.HistoryDatasetCollectionAssociation]
23002304
) -> None:
2301-
self.collections_attrs.append(collection)
2302-
self.included_collections.append(collection)
2305+
self.included_collections[collection] = collection
23032306

23042307
def add_implicit_conversion_dataset(
23052308
self,
@@ -2362,7 +2365,7 @@ def to_json(attributes):
23622365

23632366
collections_attrs_filename = os.path.join(export_directory, ATTRS_FILENAME_COLLECTIONS)
23642367
with open(collections_attrs_filename, "w") as collections_attrs_out:
2365-
collections_attrs_out.write(to_json(self.collections_attrs))
2368+
collections_attrs_out.write(to_json(self.included_collections.values()))
23662369

23672370
conversions_attrs_filename = os.path.join(export_directory, ATTRS_FILENAME_CONVERSIONS)
23682371
with open(conversions_attrs_filename, "w") as conversions_attrs_out:
@@ -2383,12 +2386,12 @@ def to_json(attributes):
23832386
#
23842387

23852388
# Get all jobs associated with included HDAs.
2386-
jobs_dict: Dict[str, model.Job] = {}
2389+
jobs_dict: Dict[int, model.Job] = {}
23872390
implicit_collection_jobs_dict = {}
23882391

23892392
def record_job(job):
2390-
if not job:
2391-
# No viable job.
2393+
if not job or job.id in jobs_dict:
2394+
# No viable job or job already recorded.
23922395
return
23932396

23942397
jobs_dict[job.id] = job
@@ -2414,10 +2417,11 @@ def record_associated_jobs(obj):
24142417
)
24152418
job_hda = hda
24162419
while job_hda.copied_from_history_dataset_association: # should this check library datasets as well?
2420+
# record job (if one exists) even if dataset was copied
2421+
# copy could have been created manually through UI/API or using database operation tool,
2422+
# in which case we have a relevant job to export.
2423+
record_associated_jobs(job_hda)
24172424
job_hda = job_hda.copied_from_history_dataset_association
2418-
if not job_hda.creating_job_associations:
2419-
# No viable HDA found.
2420-
continue
24212425

24222426
record_associated_jobs(job_hda)
24232427

lib/galaxy/tools/__init__.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3786,14 +3786,13 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
37863786
final_sorted_identifiers = [
37873787
element.element_identifier for element in elements1 if element.element_identifier in old_elements2_dict
37883788
]
3789-
# Raise Exception if it is empty
37903789
if len(final_sorted_identifiers) == 0:
37913790
# Create empty collections:
37923791
output_collections.create_collection(
3793-
next(iter(self.outputs.values())), "output1", elements={}, propagate_hda_tags=False
3792+
self.outputs["output1"], "output1", elements={}, propagate_hda_tags=False
37943793
)
37953794
output_collections.create_collection(
3796-
next(iter(self.outputs.values())), "output2", elements={}, propagate_hda_tags=False
3795+
self.outputs["output2"], "output2", elements={}, propagate_hda_tags=False
37973796
)
37983797
return
37993798

@@ -3811,7 +3810,7 @@ def output_with_selected_identifiers(old_elements_dict, output_label):
38113810
self._add_datasets_to_history(history, new_elements.values())
38123811
# Create collections:
38133812
output_collections.create_collection(
3814-
next(iter(self.outputs.values())), output_label, elements=new_elements, propagate_hda_tags=False
3813+
self.outputs[output_label], output_label, elements=new_elements, propagate_hda_tags=False
38153814
)
38163815

38173816
# Create outputs:

lib/galaxy/tools/harmonize_two_collections_list.xml

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,37 @@
171171
<output_collection name="output1" type="list" count="0"/>
172172
<output_collection name="output2" type="list" count="0"/>
173173
</test>
174+
<!-- test that collection types get propagated to outputs correctly -->
175+
<test>
176+
<param name="input1">
177+
<collection type="list">
178+
<element name="element_1" value="simple_line.txt" />
179+
</collection>
180+
</param>
181+
<param name="input2">
182+
<collection type="list:paired">
183+
<element name="element_1">
184+
<collection type="paired">
185+
<element name="forward" value="1.fastqsanger" ftype="fastqsanger" />
186+
<element name="reverse" value="1.fastqsanger" ftype="fastqsanger" />
187+
</collection>
188+
</element>
189+
</collection>
190+
</param>
191+
<output_collection name="output1" type="list" count="1">
192+
<element name="element_1">
193+
<assert_contents>
194+
<has_text_matching expression="^This is a line of text.\n$" />
195+
</assert_contents>
196+
</element>
197+
</output_collection>
198+
<output_collection name="output2" type="list:paired" count="1">
199+
<element name="element_1">
200+
<element name="forward" file="1.fastqsanger" ftype="fastqsanger" />
201+
<element name="reverse" file="1.fastqsanger" ftype="fastqsanger" />
202+
</element>
203+
</output_collection>
204+
</test>
174205
</tests>
175206
<help><![CDATA[
176207

test/integration/test_workflow_tasks.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
from galaxy_test.base import api_asserts
1818
from galaxy_test.base.api import UsesCeleryTasks
1919
from galaxy_test.base.populators import (
20+
DatasetCollectionPopulator,
2021
DatasetPopulator,
2122
RunJobsSummary,
2223
WorkflowPopulator,
@@ -27,6 +28,7 @@
2728

2829
class TestWorkflowTasksIntegration(PosixFileSourceSetup, IntegrationTestCase, UsesCeleryTasks, RunsWorkflowFixtures):
2930
dataset_populator: DatasetPopulator
31+
dataset_collection_populator: DatasetCollectionPopulator
3032
framework_tool_and_types = True
3133

3234
@classmethod
@@ -37,6 +39,7 @@ def handle_galaxy_config_kwds(cls, config):
3739
def setUp(self):
3840
super().setUp()
3941
self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
42+
self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
4043
self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
4144
self._write_file_fixtures()
4245

@@ -124,6 +127,47 @@ def test_export_import_invocation_with_step_parameter(self):
124127
invocation_details = self._export_and_import_workflow_invocation(summary, use_uris)
125128
self._rerun_imported_workflow(summary, invocation_details)
126129

130+
def test_export_import_invocation_with_copied_hdca_and_database_operation_tool(self):
131+
with self.dataset_populator.test_history() as history_id:
132+
self.dataset_collection_populator.create_list_in_history(history_id=history_id, wait=True).json()
133+
new_history = self.dataset_populator.copy_history(history_id=history_id).json()
134+
copied_collection = self.dataset_populator.get_history_collection_details(new_history["id"])
135+
workflow_id = self.workflow_populator.upload_yaml_workflow(
136+
"""class: GalaxyWorkflow
137+
inputs:
138+
input:
139+
type: collection
140+
collection_type: list
141+
steps:
142+
extract_dataset:
143+
tool_id: __EXTRACT_DATASET__
144+
in:
145+
input:
146+
source: input
147+
"""
148+
)
149+
inputs = {"input": {"src": "hdca", "id": copied_collection["id"]}}
150+
workflow_request = {"history": f"hist_id={new_history['id']}", "inputs_by": "name", "inputs": inputs}
151+
invocation = self.workflow_populator.invoke_workflow_raw(
152+
workflow_id, workflow_request, assert_ok=True
153+
).json()
154+
invocation_id = invocation["id"]
155+
self.workflow_populator.wait_for_invocation_and_jobs(history_id, workflow_id, invocation_id)
156+
jobs = self.workflow_populator.get_invocation_jobs(invocation_id)
157+
summary = RunJobsSummary(
158+
history_id=history_id,
159+
workflow_id=workflow_id,
160+
invocation_id=invocation["id"],
161+
inputs=inputs,
162+
jobs=jobs,
163+
invocation=invocation,
164+
workflow_request=workflow_request,
165+
)
166+
imported_invocation_details = self._export_and_import_workflow_invocation(summary)
167+
original_contents = self.dataset_populator.get_history_contents(new_history["id"])
168+
contents = self.dataset_populator.get_history_contents(imported_invocation_details["history_id"])
169+
assert len(contents) == len(original_contents) == 5
170+
127171
def _export_and_import_workflow_invocation(
128172
self, summary: RunJobsSummary, use_uris: bool = True, model_store_format="tgz"
129173
) -> Dict[str, Any]:

0 commit comments

Comments
 (0)