Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 23 additions & 30 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,30 +17,29 @@ jobs:
fail-fast: false
matrix:
python-version:
- 3.9
- '3.9'
- '3.10'
- '3.11'
- '3.12'
- '3.13'
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
python-version: ${{ matrix.python-version }}
# Install dev dependencies
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r test_requirements.txt
run: uv sync --locked
- name: Pre-commit checks
run: pre-commit run --all-files
run: uv run pre-commit run --all-files
- name: Test with pytest
# Create the ssh key file once for all testing
run: |
ssh-keygen -t ecdsa -m PEM -N '' -f /tmp/buildrunner-test-id_rsa
pytest -v -m "not serial" --numprocesses=auto --junitxml=test-reports/non-serial-test-results.xml
pytest -v -m "serial" --junitxml=test-reports/serial-test-results.xml
python scripts/combine_xml.py test-reports/serial-test-results.xml test-reports/non-serial-test-results.xml > test-reports/test-result.xml
uv run pytest -v -m "not serial" --numprocesses=auto --junitxml=test-reports/non-serial-test-results.xml
uv run pytest -v -m "serial" --junitxml=test-reports/serial-test-results.xml
uv run python scripts/combine_xml.py test-reports/serial-test-results.xml test-reports/non-serial-test-results.xml > test-reports/test-result.xml
- name: Publish test results
uses: EnricoMi/publish-unit-test-result-action/linux@v2
if: always()
Expand All @@ -60,15 +59,15 @@ jobs:
with:
# Fetch all history instead of the latest commit
fetch-depth: 0
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Write version file
run: python scripts/write-version.py
run: uv sync --locked
- name: Get current version
id: version-number
run: echo "CURRENT_VERSION=$( python -c 'from buildrunner.version import __version__; print(__version__)' )" >> $GITHUB_OUTPUT
run: echo "CURRENT_VERSION=$(uv version --short).$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
- name: Print current version
run: echo CURRENT_VERSION ${{ steps.version-number.outputs.CURRENT_VERSION }}
tag-commit:
Expand All @@ -91,24 +90,18 @@ jobs:
with:
# Fetch all history instead of the latest commit
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v2
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install wheel build
- name: Remove version file
# This is just in case something else created it, destroy it to get a fresh version
run: rm -f buildrunner/version.py
- name: Write version file
run: python scripts/write-version.py
run: uv sync --locked
- name: Set version
run: uv version --no-sync "$(uv version --short).$(git rev-list --count HEAD)"
- name: Build
run: python -m build
run: uv build
- name: Check upload
run: pip install twine && twine check dist/*
run: uv run --with twine twine check dist/*
- name: Publish to PyPi
uses: pypa/gh-action-pypi-publish@release/v1
# Only publish on pushes to main
Expand Down
2 changes: 0 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ venv*
.venv
.act*

buildrunner/version.py

buildrunner.results
curator.results

Expand Down
2 changes: 0 additions & 2 deletions .hgignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,5 @@ dist

.eggs

buildrunner/version.py

buildrunner.results
curator.results
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.7
rev: v0.12.7
hooks:
# Run the linter.
- id: ruff
- id: ruff-check
args: [ --fix ]
# Run the formatter.
- id: ruff-format
2 changes: 1 addition & 1 deletion .python-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.11.4
3.11
1 change: 0 additions & 1 deletion BASE_VERSION

This file was deleted.

7 changes: 7 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@
3.0
###

* 3.17

* Add python 3.13 support
* Migrate to uv for dependency management

* ... undocumented versions, see GitHub tagged releases ...

* 3.0

* Add python 3.11 support
Expand Down
35 changes: 13 additions & 22 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
FROM python:3.11-bookworm

ENV BUILDRUNNER_CONTAINER 1
ENV PIP_DEFAULT_TIMEOUT 60

# Install the docker client for multiplatform builds
RUN apt update && \
Expand Down Expand Up @@ -31,26 +30,18 @@ RUN \
python3-dev && \
apt clean all

# Install requirements first and then buildrunner itself for better docker image layer caching
# HACK - For some reason, 'python3 setup.py install' produces an error with 'jaraco-classes' package
# but replacing it with 'jaraco.classes' in the requirements.txt works. ¯\_(ツ)_/¯
COPY *requirements.txt /tmp/setup/
RUN \
cd /tmp/setup && \
python3 -m pip install -U pip && \
sed -i s/jaraco-classes/jaraco.classes/ requirements.txt && \
python3 -m pip install \
-r requirements.txt && \
python3 -m pip install \
-r test_requirements.txt && \
rm -rf /tmp/setup
COPY . /buildrunner-source
RUN \
cd /buildrunner-source && \
sed -i s/jaraco-classes/jaraco.classes/ requirements.txt && \
python3 scripts/write-version.py && \
pip install . && \
rm -rf /buildrunner-source
WORKDIR /app

# Install uv
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/

# Install dependencies first to leverage Docker cache
COPY pyproject.toml uv.lock README.rst /app/
RUN uv sync --locked --no-install-project --no-dev

# Install the project separately for optimal layer caching
COPY buildrunner /app/buildrunner
RUN uv sync --locked --no-dev

# The following will install docker-engine. It's not needed for the container to run,
# but was very helpful during development
Expand All @@ -63,5 +54,5 @@ RUN \
# apt-get update; \
# apt-get -y install docker-engine

ENTRYPOINT ["/usr/local/bin/buildrunner"]
ENTRYPOINT ["uv", "run", "buildrunner"]
CMD []
2 changes: 0 additions & 2 deletions MANIFEST.in

This file was deleted.

5 changes: 3 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ See `docs/installation <docs/installation.rst>`_.
Development
============

See `docs/development <docs/development.rst>`_.
See `docs/development <docs/development.rst>`__.

Global Configuration
====================
Expand Down Expand Up @@ -1202,7 +1202,8 @@ The test suite is located in the `tests subdirectory <tests>`_. These tests are
on every PR build and every build.

The test suite can be invoked manually from the top of the source directory by using
``pytest`` after installing all of the requirements and test requirements with ``pip``.
``uv run pytest`` after installing dependencies with ``uv sync``. See
`docs/development <docs/development.rst>`__ for more information.


.. Links
Expand Down
20 changes: 9 additions & 11 deletions buildrunner/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,17 +203,15 @@ def _get_config_context(
"BUILDRUNNER_STEPS": steps_to_run,
}
if vcs:
context.update(
{
"VCSINFO_NAME": str(vcs.name),
"VCSINFO_BRANCH": str(vcs.branch),
"VCSINFO_NUMBER": str(vcs.number),
"VCSINFO_ID": str(vcs.id),
"VCSINFO_SHORT_ID": str(vcs.id)[:7],
"VCSINFO_MODIFIED": str(vcs.modified),
"VCSINFO_RELEASE": str(vcs.release),
}
)
context.update({
"VCSINFO_NAME": str(vcs.name),
"VCSINFO_BRANCH": str(vcs.branch),
"VCSINFO_NUMBER": str(vcs.number),
"VCSINFO_ID": str(vcs.id),
"VCSINFO_SHORT_ID": str(vcs.id)[:7],
"VCSINFO_MODIFIED": str(vcs.modified),
"VCSINFO_RELEASE": str(vcs.release),
})

# Add the global env vars before any other context vars
for cur_context in contexts:
Expand Down
14 changes: 6 additions & 8 deletions buildrunner/config/fetch/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,12 @@ def _fetch_file(parsed_url, gh_config: GithubModel):

auth = (username, gh_config.app_token)
url = "/".join(
_clean_nones(
[
endpoint,
version,
"users",
username,
]
)
_clean_nones([
endpoint,
version,
"users",
username,
])
)
resp = requests.get(url, auth=auth, timeout=180)
if resp.status_code != 200:
Expand Down
1 change: 1 addition & 0 deletions buildrunner/config/fetch/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
NOTICE: Adobe permits you to use, modify, and distribute this file in accordance
with the terms of the Adobe license agreement accompanying it.
"""

from typing import Optional

from ..models import GlobalConfig
Expand Down
28 changes: 12 additions & 16 deletions buildrunner/config/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,22 +251,18 @@ def _fetch_template(
jtemplate = jenv.from_string(contents)

config_context = copy.deepcopy(env)
config_context.update(
{
"CONFIG_FILE": cfg_file,
"CONFIG_DIR": os.path.dirname(cfg_file),
"read_yaml_file": functools.partial(
jinja_context.read_yaml_file, env, _log_generated_file, log_file
),
"raise": jinja_context.raise_exception_jinja,
"strftime": functools.partial(jinja_context.strftime, build_time),
"env": os.environ,
# This is stored after the initial env is set
"DOCKER_REGISTRY": global_config.docker_registry
if global_config
else None,
}
)
config_context.update({
"CONFIG_FILE": cfg_file,
"CONFIG_DIR": os.path.dirname(cfg_file),
"read_yaml_file": functools.partial(
jinja_context.read_yaml_file, env, _log_generated_file, log_file
),
"raise": jinja_context.raise_exception_jinja,
"strftime": functools.partial(jinja_context.strftime, build_time),
"env": os.environ,
# This is stored after the initial env is set
"DOCKER_REGISTRY": global_config.docker_registry if global_config else None,
})

if ctx:
config_context.update(ctx)
Expand Down
6 changes: 3 additions & 3 deletions buildrunner/docker/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,9 @@ def start(self):
volumes=_volumes,
host_config=self.docker_client.create_host_config(binds=_binds),
labels=self.container_labels,
networking_config=self.docker_client.create_networking_config(
{self.network: self.docker_client.create_endpoint_config()}
)
networking_config=self.docker_client.create_networking_config({
self.network: self.docker_client.create_endpoint_config()
})
if self.network
else None,
)["Id"]
Expand Down
2 changes: 1 addition & 1 deletion buildrunner/docker/image_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def image_refs(self) -> List[str]:
return [f"{self.repo}:{tag}" for tag in self.tags]

def __str__(self) -> str:
return f'{self.repo}:{",".join(self.tags)}'
return f"{self.repo}:{','.join(self.tags)}"


class BuiltTaggedImage(BaseModel):
Expand Down
Loading
Loading