Skip to content

Commit 75e6cc3

Browse files
committed
remove: odh-release for inference-scheduler repo
Signed-off-by: Wen Zhou <wenzhou@redhat.com>
1 parent 3265945 commit 75e6cc3

File tree

1 file changed

+32
-45
lines changed

1 file changed

+32
-45
lines changed

.github/workflows/odh-release.yaml

Lines changed: 32 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,18 @@ on:
44
workflow_dispatch:
55
inputs:
66
repository:
7-
description: 'Target repository (odh-model-controller, kserve, llm-d-inference-scheduler)'
7+
description: 'Target repository (odh-model-controller, kserve)'
88
required: true
99
type: choice
1010
options:
1111
- odh-model-controller
12-
- llm-d-inference-scheduler
1312
- kserve
1413
tag_name: # The new release tag to be created and used as the search value.
1514
description: 'New release tag (e.g., odh-v2.35)'
1615
required: true
1716
type: string
1817
next_tag_name: # The next tag to replace the current one in the Konflux files.
19-
description: 'Next development tag (e.g., odh-v2.36) - not required for llm-d-inference-scheduler'
18+
description: 'Next development tag (e.g., odh-v2.36)'
2019
required: false
2120
type: string
2221
target_branch:
@@ -89,13 +88,12 @@ jobs:
8988
errors.append(f"Tag '{tag_name}' does not match expected format 'odh-vX.Y' or 'odh-vX.Y.Z'")
9089
9190
# next_tag validation only required for repos that need Tekton bumping
92-
if repository != "llm-d-inference-scheduler":
93-
if not next_tag:
94-
errors.append("next_tag_name is required for this repository")
95-
elif not re.match(tag_pattern, next_tag):
96-
errors.append(f"Next tag '{next_tag}' does not match expected format 'odh-vX.Y' or 'odh-vX.Y.Z'")
97-
elif tag_name == next_tag:
98-
errors.append(f"tag_name and next_tag_name must be different (both are '{tag_name}')")
91+
if not next_tag:
92+
errors.append("next_tag_name is required for this repository")
93+
elif not re.match(tag_pattern, next_tag):
94+
errors.append(f"Next tag '{next_tag}' does not match expected format 'odh-vX.Y' or 'odh-vX.Y.Z'")
95+
elif tag_name == next_tag:
96+
errors.append(f"tag_name and next_tag_name must be different (both are '{tag_name}')")
9997
10098
# Fail fast if validation errors exist
10199
if errors:
@@ -136,38 +134,27 @@ jobs:
136134
import glob
137135
138136
repository = "${{ github.event.inputs.repository }}"
139-
140-
# Skip file discovery for llm-d-inference-scheduler
141-
if repository == "llm-d-inference-scheduler":
142-
print("Skipping file discovery for llm-d-inference-scheduler")
143-
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
144-
f.write(f"params_env_files=[]\n")
145-
f.write(f"tekton_files=[]\n")
146-
f.write(f"has_params_env=false\n")
147-
f.write(f"has_tekton=false\n")
148-
print("✓ Discovery skipped - no files to update for this repository")
149-
else:
150-
# Discover params.env files under config/
151-
params_env_files = glob.glob('config/**/params.env', recursive=True)
152-
153-
# Discover YAML files under .tekton/
154-
tekton_files = glob.glob('.tekton/**/*.yaml', recursive=True) + \
155-
glob.glob('.tekton/**/*.yml', recursive=True)
156-
157-
# Output results
158-
print(f"Found params.env files: {params_env_files}")
159-
print(f"Found tekton files: {tekton_files}")
160-
161-
# Write outputs
162-
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
163-
f.write(f"params_env_files={json.dumps(params_env_files)}\n")
164-
f.write(f"tekton_files={json.dumps(tekton_files)}\n")
165-
f.write(f"has_params_env={'true' if params_env_files else 'false'}\n")
166-
f.write(f"has_tekton={'true' if tekton_files else 'false'}\n")
167-
168-
print(f"\n✓ Discovery complete")
169-
print(f" params.env files: {len(params_env_files)}")
170-
print(f" Tekton files: {len(tekton_files)}")
137+
# Discover params.env files under config/
138+
params_env_files = glob.glob('config/**/params.env', recursive=True)
139+
140+
# Discover YAML files under .tekton/
141+
tekton_files = glob.glob('.tekton/**/*.yaml', recursive=True) + \
142+
glob.glob('.tekton/**/*.yml', recursive=True)
143+
144+
# Output results
145+
print(f"Found params.env files: {params_env_files}")
146+
print(f"Found tekton files: {tekton_files}")
147+
148+
# Write outputs
149+
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
150+
f.write(f"params_env_files={json.dumps(params_env_files)}\n")
151+
f.write(f"tekton_files={json.dumps(tekton_files)}\n")
152+
f.write(f"has_params_env={'true' if params_env_files else 'false'}\n")
153+
f.write(f"has_tekton={'true' if tekton_files else 'false'}\n")
154+
155+
print(f"\n✓ Discovery complete")
156+
print(f" params.env files: {len(params_env_files)}")
157+
print(f" Tekton files: {len(tekton_files)}")
171158
shell: python
172159

173160
create-release:
@@ -218,9 +205,9 @@ jobs:
218205
r'([\w.-]+(?:\.[\w.-]+)*/[\w.-]+/(?:odh-model-controller|odh-model-serving-api|mlserver)):((?:odh-model-serving-api-)?)(fast|v[\d.]+-latest|release-v[\d.]+|odh-v[\d.]+(?:-EA\d)?)'
219206
)
220207
else:
221-
# For kserve and others: update all matching images except llm-d-* images
208+
# For kserve and others: update all matching images
222209
image_pattern = re.compile(
223-
r'([\w.-]+(?:\.[\w.-]+)*/[\w.-]+/(?!llm-d-inference-scheduler|llm-d-routing-sidecar)[\w.-]+):()(fast|v[\d.]+-latest|release-v[\d.]+|odh-v[\d.]+(?:-EA\d)?)'
210+
r'([\w.-]+(?:\.[\w.-]+)*/[\w.-]+/[\w.-]+):()(fast|v[\d.]+-latest|release-v[\d.]+|odh-v[\d.]+(?:-EA\d)?)'
224211
)
225212
226213
updated_files = []
@@ -295,7 +282,7 @@ jobs:
295282
name: Bump Tekton tags for next release
296283
runs-on: ubuntu-latest
297284
needs: [validate-and-check, create-release]
298-
if: always() && needs.create-release.result == 'success' && needs.validate-and-check.outputs.has_tekton == 'true' && !(github.event.inputs.repository == 'llm-d-inference-scheduler' && github.event.inputs.next_tag_name == '')
285+
if: always() && needs.create-release.result == 'success' && needs.validate-and-check.outputs.has_tekton == 'true'
299286
outputs:
300287
pr_url: ${{ steps.create_pr.outputs.pr_url }}
301288
steps:

0 commit comments

Comments
 (0)