Skip to content

Commit b9f12b1

Browse files
author
Nissan Pow
committed
feat: add --dump-manifests flag to argo-workflows create
Output all Kubernetes manifests as JSON without connecting to the cluster or uploading a code package. Reuses export_all_json() to produce a dict with workflow_template, cron_workflow, and sensor keys. Suitable for GitOps workflows with kubectl apply, kustomize, or ArgoCD.
1 parent a397396 commit b9f12b1

1 file changed

Lines changed: 35 additions & 24 deletions

File tree

metaflow/plugins/argo/argo_workflows_cli.py

Lines changed: 35 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -707,13 +707,15 @@ def make_flow(
707707
enable_error_msg_capture,
708708
workflow_title,
709709
workflow_description,
710+
dump_manifests=False,
710711
):
711-
# TODO: Make this check less specific to Amazon S3 as we introduce
712-
# support for more cloud object stores.
713-
if obj.flow_datastore.TYPE not in ("azure", "gs", "s3"):
714-
raise MetaflowException(
715-
"Argo Workflows requires --datastore=s3 or --datastore=azure or --datastore=gs"
716-
)
712+
if not dump_manifests:
713+
# TODO: Make this check less specific to Amazon S3 as we introduce
714+
# support for more cloud object stores.
715+
if obj.flow_datastore.TYPE not in ("azure", "gs", "s3"):
716+
raise MetaflowException(
717+
"Argo Workflows requires --datastore=s3 or --datastore=azure or --datastore=gs"
718+
)
717719

718720
if (notify_on_error or notify_on_success) and not (
719721
notify_slack_webhook_url
@@ -754,30 +756,39 @@ def make_flow(
754756
)
755757
obj.graph = obj.flow._graph
756758

757-
# Save the code package in the flow datastore so that both user code and
758-
# metaflow package can be retrieved during workflow execution.
759-
obj.package = MetaflowPackage(
760-
obj.flow,
761-
obj.environment,
762-
obj.echo,
763-
suffixes=obj.package_suffixes,
764-
flow_datastore=obj.flow_datastore if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE else None,
765-
)
766-
767-
# This blocks until the package is created
768-
if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE:
769-
package_url = obj.package.package_url()
770-
package_sha = obj.package.package_sha()
759+
if dump_manifests:
760+
# Skip code package upload; use placeholders for manifest inspection.
761+
package_url = "__PLACEHOLDER_CODE_PACKAGE_URL__"
762+
package_sha = "__PLACEHOLDER_CODE_PACKAGE_SHA__"
763+
package_metadata = json.dumps({"version": 0})
771764
else:
772-
package_url, package_sha = obj.flow_datastore.save_data(
773-
[obj.package.blob], len_hint=1
774-
)[0]
765+
# Save the code package in the flow datastore so that both user code and
766+
# metaflow package can be retrieved during workflow execution.
767+
obj.package = MetaflowPackage(
768+
obj.flow,
769+
obj.environment,
770+
obj.echo,
771+
suffixes=obj.package_suffixes,
772+
flow_datastore=(
773+
obj.flow_datastore if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE else None
774+
),
775+
)
776+
777+
# This blocks until the package is created
778+
if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE:
779+
package_url = obj.package.package_url()
780+
package_sha = obj.package.package_sha()
781+
else:
782+
package_url, package_sha = obj.flow_datastore.save_data(
783+
[obj.package.blob], len_hint=1
784+
)[0]
785+
package_metadata = obj.package.package_metadata
775786

776787
return ArgoWorkflows(
777788
name,
778789
obj.graph,
779790
obj.flow,
780-
obj.package.package_metadata,
791+
package_metadata,
781792
package_sha,
782793
package_url,
783794
token,

0 commit comments

Comments
 (0)