diff --git a/scripts/koji/koji_garbage_collector.sh b/scripts/koji/koji_garbage_collector.sh deleted file mode 100755 index f0a8c0a..0000000 --- a/scripts/koji/koji_garbage_collector.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -# This script removes old scratch builds but also untags old builds whose only tag is built-by-xcp-ng, -# then runs koji-gc so that they can be put in the trashcan if nothing references them anymore, -# and then at next run be deleted. - -# *** remove old scratch builds *** - -TOPDIR=/mnt/koji -TIMEARG="+30" -IFS=$'\n' -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -# koji-gc is able to flag builds for deletion, based on koji-gc policies defined in /etc/koji-gc/koji-gc.conf, -# but the presence of the "built-by-xcp-ng" tag prevents it from doing so. -# Consequently, we first look for such builds and untag them. -echo -echo "*** untag_lone_builds.py ***" -su -l kojiadmin -c "$SCRIPT_DIR/untag_lone_builds.py" - -# delete builds in the trashcan tag in a previous pass -echo -echo "*** koji-gc --action=delete ***" -su -l kojiadmin -c "koji-gc --action=delete" - -# mark new untagged builds for deletion (trashcan tag) -echo -echo "*** koji-gc --action=trash ***" -su -l kojiadmin -c "koji-gc --action=trash" - -echo -echo "*** Remove old scratch builds ***" - -# we completely remove those that are old enough -# scratch directories are /mnt/koji/scratch/$username/task_$taskid/ -# note that $username might contain a slash (e.g. host principals) -cd $TOPDIR/scratch/ -for x in $(find $TOPDIR/scratch/ -mindepth 2 -type d -name 'task_*' -prune -mtime $TIMEARG); do - find "$x" -xdev "!" -type d "!" -name "*.deleted" -printf '%s\t %p\n' -delete -exec touch {}.deleted \; -exec chown apache.apache {}.deleted \; -done - -echo -echo "*** Remove old tasks ***" - -# for tasks, try to remove as a unit -for x in $(find "$TOPDIR"/work/tasks/ -mindepth 2 -maxdepth 2 -type d -mtime $TIMEARG); do - # delete broken symlinks (that will be links to deleted scratch builds) but leave other symlinks alone - find "$x" -xdev -xtype l -printf '%s\t %p\n' -delete -exec touch {}.deleted \; -exec chown apache.apache {}.deleted \; - # Delete SRPMs from buildSRPMFromSCM tasks. - # There is a slight loss of information because the SRPM that is stored in the - # packages/ directory is the one coming from the buildArch task. - # In our case, they should be strictly equivalent, though. - find "$x" -xdev -type f -name "*.src.rpm" -printf '%s\t %p\n' -delete -exec touch {}.deleted \; -exec chown apache.apache {}.deleted \; -done - -# for anything else, just remove old stuff -# but don't remove the top level dirs (e.g. cli-build) -#for x in $(find "$TOPDIR"/work -maxdepth 1 -mindepth 1 \! -name tasks); do -# find "$x" -xdev '!' -type d -mtime $TIMEARG -print -# find "$x" -xdev '!' -type d -mtime $TIMEARG -print0 | xargs -0 -r rm -f -# find "$x" -xdev -depth -mindepth 1 -type d -empty -print0 | xargs -0 -r rmdir -# find "$x" -xdev -depth -mindepth 1 -type d -empty -print -#done - diff --git a/scripts/koji/sync_repo_from_koji.py b/scripts/koji/sync_repo_from_koji.py deleted file mode 100755 index 4ebea1c..0000000 --- a/scripts/koji/sync_repo_from_koji.py +++ /dev/null @@ -1,421 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import atexit -import glob -import os -import re -import shutil -import subprocess -import sys -import tempfile -from datetime import datetime - -USER_REPO_HTTPS = "https://koji.xcp-ng.org/repos/user/" - -RELEASE_VERSIONS = [ - '7.6', - '8.0', - '8.1', - '8.2', - '8.3', -] - -DEV_VERSIONS = [ -] - -VERSIONS = DEV_VERSIONS + RELEASE_VERSIONS - -# Not used, just here as memory and in the unlikely case we might need to update their repos again -DEAD_TAGS = [ - 'v7.6-base', - 'v7.6-updates', - 'v7.6-testing', - 'v8.0-base', - 'v8.0-updates', - 'v8.0-testing', - 'v8.1-base', - 'v8.1-updates', - 'v8.1-testing', -] - -TAGS = [ - 'v8.2-base', - 'v8.2-updates', - 'v8.2-candidates', - 'v8.2-testing', - 'v8.2-ci', - 'v8.2-incoming', - 'v8.2-lab', - 'v8.3-base', - 'v8.3-updates', - 'v8.3-candidates', - 'v8.3-testing', - 'v8.3-ci', - 'v8.3-incoming', - 'v8.3-lab', -] - -# tags in which we only keep the latest build for each package -RELEASE_TAGS = [ - 'v7.6-base', - 'v8.0-base', - 'v8.1-base', - 'v8.2-base', -# 'v8.3-base', # special case: we have a history of pre-release builds that users might need for troubleshooting -] - -# tags for which we want to export a stripped repo for offline updates -OFFLINE_TAGS = [ - 'v8.2-updates', - 'v8.2-v-linstor', - 'v8.3-updates', - 'v8.3-v-linstor', -] - -# Additional "user" tags. For them, repos are generated at a different place. -# Initialized empty: user tags are autodetected based on their name -U_TAGS = [] - -# Additional V-tags (V stands for "vates" or for "vendor"). For them, repos also are generated at a different place. -# Initialized empty: V-tags are autodetected based on their name -V_TAGS = [] - -KOJI_ROOT_DIR = '/mnt/koji' - -KEY_ID = "3fd3ac9e" - -def version_from_tag(tag): - matches = re.match(r'v(\d+\.\d+)', tag) - return matches.group(1) - -def repo_name_from_tag(tag): - version = version_from_tag(tag) - name = tag[len("v%s-" % version):] - if name.startswith('u-') or name.startswith('v-'): - name = name[2:] - return name - -def build_path_to_version(parent_dir, tag): - version = version_from_tag(tag) - major = version.split('.')[0] - return os.path.join(parent_dir, major, version) - -def build_path_to_repo(parent_dir, tag): - return os.path.join(build_path_to_version(parent_dir, tag), repo_name_from_tag(tag)) - -def sign_rpm(rpm): - # create temporary work directory - tmpdir = tempfile.mkdtemp(prefix=rpm) - current_dir = os.getcwd() - - try: - os.chdir(tmpdir) - - # download from koji - subprocess.check_call(['koji', 'download-build', '--debuginfo', '--noprogress', '--rpm', rpm]) - - # sign: requires a sign-rpm executable or alias in the PATH - subprocess.check_call(['sign-rpm', rpm], stdout=subprocess.DEVNULL) - - # import signature - subprocess.check_call(['koji', 'import-sig', rpm]) - - finally: - # clean up - os.chdir(current_dir) - shutil.rmtree(tmpdir) - -def write_repo(tag, dest_dir, tmp_root_dir, offline=False): - version = version_from_tag(tag) - repo_name = repo_name_from_tag(tag) - - # Hack for 7.6 because koji only handles its updates and updates_testing repos: - if version == '7.6': - if repo_name == 'testing': - repo_name = 'updates_testing' - elif repo_name != 'updates': - raise Exception("Fatal: koji should not have any changes outside testing and updates for 7.6!") - - path_to_repo = build_path_to_repo(dest_dir, tag) - path_to_tmp_repo = build_path_to_repo(tmp_root_dir, tag) - - # remove temporary repo if exists - if os.path.isdir(path_to_tmp_repo): - shutil.rmtree(path_to_tmp_repo) - - # create empty structure - print("\n-- Copy the RPMs from %s to %s" % (KOJI_ROOT_DIR, path_to_tmp_repo)) - os.makedirs(os.path.join(path_to_tmp_repo, 'x86_64/Packages')) - if not offline: - os.makedirs(os.path.join(path_to_tmp_repo, 'Source/SPackages')) - - print("Link to latest dist-repo: %s" % os.readlink('%s/repos-dist/%s/latest' % (KOJI_ROOT_DIR, tag))) - - # copy RPMs from koji - for f in glob.glob('%s/repos-dist/%s/latest/x86_64/Packages/*/*.rpm' % (KOJI_ROOT_DIR, tag)): - shutil.copy(f, os.path.join(path_to_tmp_repo, 'x86_64', 'Packages')) - - if not offline: - # and source RPMs - for f in glob.glob('%s/repos-dist/%s/latest/src/Packages/*/*.rpm' % (KOJI_ROOT_DIR, tag)): - shutil.copy(f, os.path.join(path_to_tmp_repo, 'Source', 'SPackages')) - - if offline: - # For offline update repos, in order to reduce the size, let's remove debuginfo packages - # and other big useless packages. - delete_patterns = [ - '*-debuginfo-*.rpm', - 'xs-opam-repo-*.rpm', # big and only used for builds - 'java-1.8.0-*.rpm', # old java, used to be pulled by linstor - ] - for delete_pattern in delete_patterns: - subprocess.check_call([ - 'find', os.path.join(path_to_tmp_repo, 'x86_64', 'Packages'), - '-name', delete_pattern, - '-delete', - ]) - - # generate repodata and sign - paths = [os.path.join(path_to_tmp_repo, 'x86_64')] - if not offline: - paths.append(os.path.join(path_to_tmp_repo, 'Source')) - for path in paths: - print("\n-- Generate repodata for %s" % path) - subprocess.check_call(['createrepo_c', path], stdout=subprocess.DEVNULL) - subprocess.check_call(['sign-file', os.path.join(path, 'repodata', 'repomd.xml')], stdout=subprocess.DEVNULL) - - # Synchronize to our final repository: - # - add new RPMs - # - remove RPMs that are not present anymore (for tags in RELEASE_TAGS) - # - do NOT change the creation nor modification stamps for existing RPMs that have not been modified - # (and usually there's no reason why they would have been modified without changing names) - # => using -c and omitting -t - # - sync updated repodata - print("\n-- Syncing to final repository %s" % path_to_repo) - if not os.path.exists(path_to_repo): - os.makedirs(path_to_repo) - subprocess.check_call(['rsync', '-crlpi', '--delete-after', path_to_tmp_repo + '/', path_to_repo]) - print() - shutil.rmtree(path_to_tmp_repo) - -def sign_unsigned_rpms(tag): - # get list of RPMs not signed by us by comparing the list that is signed with the full list - - # all RPMs for the tag - output = subprocess.check_output(['koji', 'list-tagged', tag, '--rpms']).decode() - rpms = set(output.strip().splitlines()) - - # only signed RPMs - # koji list-tagged v7.6-base --sigs | grep "^3fd3ac9e" | cut -c 10- - signed_rpms = set() - output = subprocess.check_output(['koji', 'list-tagged', tag, '--sigs']).decode() - for line in output.strip().splitlines(): - try: - key, rpm = line.split(' ') - except: - # couldn't unpack values... no signature. - continue - if key == KEY_ID: - signed_rpms.add(rpm) - - # diff and sort - unsigned_rpms = sorted(list(rpms.difference(signed_rpms))) - - if unsigned_rpms: - print("\nSigning unsigned RPMs first\n") - - for rpm in unsigned_rpms: - sign_rpm(rpm + '.rpm') - - for rpm in unsigned_rpms: - if rpm.endswith('.src'): - nvr = rpm[:-4] - # write signed file to koji's own repositories - subprocess.check_call(['koji', 'write-signed-rpm', KEY_ID, nvr]) - -def atexit_remove_lock(lock_file): - os.unlink(lock_file) - -def main(): - parser = argparse.ArgumentParser(description='Detect package changes in koji and update repository') - parser.add_argument('dest_dir', help='root directory of the destination repository') - parser.add_argument('u_dest_dir', help='root directory of the destination repository for user tags') - parser.add_argument('v_dest_dir', help='root directory of the destination repository for V-tags') - parser.add_argument('data_dir', help='directory where the script will write or read data from') - parser.add_argument('--quiet', action='store_true', - help='do not output anything unless there are changes to be considered') - parser.add_argument('--modify-stable-base', action='store_true', - help='allow modifying the base repository of a stable release') - args = parser.parse_args() - dest_dir = args.dest_dir - u_dest_dir = args.u_dest_dir - v_dest_dir = args.v_dest_dir - data_dir = args.data_dir - tmp_root_dir = os.path.join(data_dir, 'tmproot') - quiet = args.quiet - - lock_file = os.path.join(data_dir, 'lock') - - if os.path.exists(lock_file): - print("Lock file %s already exists. Aborting." % lock_file) - return - else: - open(lock_file, 'w').close() - atexit.register(atexit_remove_lock, lock_file) - - global U_TAGS, V_TAGS - U_TAGS += subprocess.check_output(['koji', 'list-tags', 'v*.*-u-*']).decode().strip().splitlines() - V_TAGS += subprocess.check_output(['koji', 'list-tags', 'v*.*-v-*']).decode().strip().splitlines() - - def dest_dir_for_tag(tag): - if tag in U_TAGS: - return u_dest_dir - if tag in V_TAGS: - return v_dest_dir - return dest_dir - - def offline_repo_dir(): - return os.path.join(v_dest_dir, 'offline') - - for version in VERSIONS: - for tag in TAGS + U_TAGS + V_TAGS: - if version_from_tag(tag) != version: - continue - - needs_update = False - - # get current list of packages from koji for this tag - tag_builds_koji = subprocess.check_output(['koji', 'list-tagged', '--quiet', tag]).decode() - - # read latest known list of builds in the tag if exists - tag_builds_filepath = os.path.join(data_dir, "%s-builds.txt" % tag) - if os.path.exists(tag_builds_filepath): - with open(tag_builds_filepath, 'r') as f: - tag_builds_txt = f.read() - if tag_builds_koji != tag_builds_txt: - needs_update = True - else: - needs_update = True - - msgs = ["\n*** %s" % tag] - if needs_update: - msgs.append("Repository update needed") - - if tag in RELEASE_TAGS and version not in DEV_VERSIONS: - if args.modify_stable_base: - msgs.append("Modification of base repository for stable release %s " % version - + "allowed through the --modify-stable-base switch.") - else: - if not quiet: - msgs.append("Not modifying base repository for stable release %s..." % version) - print('\n'.join(msgs)) - continue - - print('\n'.join(msgs)) - - # sign RPMs in the tag if needed - sign_unsigned_rpms(tag) - - # export the RPMs from koji - print("\n-- Make koji write the repository for tag %s" % tag) - with_non_latest = [] if tag in RELEASE_TAGS else ['--non-latest'] - sys.stdout.flush() - subprocess.check_call(['koji', 'dist-repo', tag, '3fd3ac9e', '--with-src', '--noinherit'] + with_non_latest) - - # write repository to the appropriate destination directory for the tag - write_repo(tag, dest_dir_for_tag(tag), tmp_root_dir) - - if tag in OFFLINE_TAGS: - print("\n-- Make koji write the offline repository for tag %s" % tag) - # Also generate a stripped repo for offline updates - sys.stdout.flush() - subprocess.check_call(['koji', 'dist-repo', tag, '3fd3ac9e', '--noinherit']) - write_repo(tag, offline_repo_dir(), tmp_root_dir, offline=True) - - # Wrap it up in a tarball - offline_repo_path = build_path_to_repo(offline_repo_dir(), tag) - offline_repo_path_parent = os.path.dirname(offline_repo_path) - offline_tarball_path_prefix = os.path.join( - offline_repo_path_parent, - "xcpng-%s-offline-%s" % (version.replace('.', '_'), repo_name_from_tag(tag)) - ) - offline_tarball = "%s-%s.tar" % (offline_tarball_path_prefix, datetime.now().strftime("%Y%m%d")) - print("\n-- Generate offline update tarball: %s" % offline_tarball) - subprocess.check_call(['rm', '-f', offline_tarball]) - subprocess.check_call([ - 'tar', - '-cf', offline_tarball, - '-C', offline_repo_path_parent, - os.path.basename(offline_repo_path) - ]) - - # Point the "latest" symlink at the tarball - latest_symlink = "%s-latest.tar" % offline_tarball_path_prefix - if os.path.exists(latest_symlink): - os.unlink(latest_symlink) - # relative symlink - os.symlink(os.path.basename(offline_tarball), latest_symlink) - - # And remove older tarballs - tarballs = glob.glob("%s-*.tar" % offline_tarball_path_prefix) - tarballs.remove(latest_symlink) - tarballs_sorted_by_mtime = sorted(tarballs, key=os.path.getmtime, reverse=True) - # Remove all but the latest three tarballs - for old_tarball in tarballs_sorted_by_mtime[3:]: - print("Removing old tarball: %s" % old_tarball) - os.remove(old_tarball) - - # Update SHA256SUMs - subprocess.check_call( - 'sha256sum *.tar > SHA256SUMS', - shell=True, - cwd=offline_repo_path_parent - ) - - # And sign them - subprocess.check_call( - ['sign-file', 'SHA256SUMS'], - cwd=offline_repo_path_parent, - stdout=subprocess.DEVNULL - ) - - # update data - with open(tag_builds_filepath, 'w') as f: - f.write(tag_builds_koji) - elif not quiet: - print('\n'.join(msgs)) - print("Already up to date") - - # Write repo files for U_TAGS - for version in VERSIONS: - contents = "# User repositories from XCP-ng developers. Meant for testing and troubleshooting purposes.\n" - last_tag = None - for tag in U_TAGS: - if version_from_tag(tag) != version: - continue - - last_tag = tag - repo_name = repo_name_from_tag(tag) - repo_path_https = build_path_to_repo(USER_REPO_HTTPS, tag) - contents += """[xcp-ng-{repo_name}] -name=xcp-ng-{repo_name} -baseurl={repo_path_https}/x86_64/ -enabled=0 -gpgcheck=1 -repo_gpgcheck=1 -metadata_expire=0 -gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-xcpng - -""".format(repo_name=repo_name, repo_path_https=repo_path_https) - - if last_tag is not None: - repo_filename = os.path.join( - build_path_to_version(dest_dir_for_tag(last_tag), last_tag), - 'xcpng-users.repo' - ) - with open(repo_filename, 'w') as f: - f.write(contents) - -if __name__ == "__main__": - main() diff --git a/scripts/koji/untag_lone_builds.py b/scripts/koji/untag_lone_builds.py deleted file mode 100755 index f01b26a..0000000 --- a/scripts/koji/untag_lone_builds.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python3 - -import koji - -config = koji.read_config("koji") -s = koji.ClientSession('https://kojihub.xcp-ng.org', config) -s.ssl_login(config['cert'], None, config['serverca']) - -# We look for builds tagged only with built-by-xcp-ng, and no other tag -# These are builds that we don't need in our history and that we could -# delete to recover some disk space. -# To do so, we just untag them, so that koji-gc later marks them for deletion -# if all conditions for this are met. -tag = 'built-by-xcp-ng' -tagged = s.listTagged(tag) -result = [] -with s.multicall() as m: - result = [m.listTags(binfo) for binfo in tagged] -loners = [] -for binfo, tinfos in zip(tagged, result): - tags = [tinfo['name'] for tinfo in tinfos.result] - if len(tags) == 1: - loners.append((binfo['id'], binfo['nvr'])) - -print("The following packages built by XCP-ng don't belong to any tag other than %s" % tag) -print("and will be removed from it so that they can be garbage-collected later.") -for id, nvr in loners: - print("Untagging build {} ({}) from {}.".format(id, nvr, tag)) - s.untagBuild(tag, id) diff --git a/scripts/koji/update_vendor_tags.py b/scripts/koji/update_vendor_tags.py deleted file mode 100755 index 2df3ad8..0000000 --- a/scripts/koji/update_vendor_tags.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import json -import os -import re -import subprocess -from subprocess import DEVNULL - -XS_buildhosts = [ - '1b68968c4e4e', - '1f202679a186', - 'ebbce0ae9691', - 'f1152ddb2921', - 'f7d02093adae', - 'ba090b49143f', - 'a50e0282e69b', - 'eff2d405bb69', - 'b10e2867ff51', -] - -XCPNG_buildhosts = [ - 'koji.xcp-ng.org' -] - -ALLOWED_TAGS = [ - 'built-by-xcp-ng', - 'built-by-xs', - 'built-by-centos', - 'built-by-epel', - 'built-by-linbit' -] - -def build_has_tag(build, tag): - out = subprocess.check_output(['koji', 'buildinfo', build]).decode() - matches = re.search(r'^Tags: .*\b%s\b' % tag, out, re.MULTILINE) - return matches is not None - -def update_vendor_tag_for_build(build, is_bootstrap=False): - # get the first binary RPM found for the build - output = subprocess.check_output(['koji', 'buildinfo', build]).decode() - srpm_path = "" - rpm_path = "" - for line in output.splitlines(): - first_element = line.split()[0] - if re.match(r'.+/src/.+\.src\.rpm', first_element): - srpm_path = "" - if re.match(r'.+\.rpm', first_element): - rpm_path = first_element - - if not rpm_path: - if is_bootstrap: - if not srpm_path: - raise Exception("No SRPM found for build %s" % build) - # accept to use the SRPM instead of the missing RPM - rpm_path = srpm_path - else: - raise Exception("No RPM found for build %s" % build) - - # get vendor information - output = subprocess.check_output( - ['rpm', '-qp', rpm_path, '--qf', '%{vendor};;%{buildhost}'], stderr=DEVNULL - ).decode() - vendor, buildhost = output.split(';;') - match = re.search('/packages/([^/]+)/', rpm_path) - assert match is not None - package = match.group(1) - - tag = None - if buildhost in XS_buildhosts: - tag = 'built-by-xs' - elif buildhost in XCPNG_buildhosts: - tag = 'built-by-xcp-ng' - elif vendor == 'Citrix Systems, Inc.': - tag = 'built-by-xs' - elif vendor in ('XCP-ng', 'XCP-ng community'): - tag = 'built-by-xcp-ng' - elif vendor == 'CentOS': - tag = 'built-by-centos' - elif vendor == 'Fedora Project': - tag = 'built-by-epel' - - if tag is None and is_bootstrap: - tag = 'built-by-xcp-ng' - - print("%s: %s, %s => %s" % (os.path.basename(rpm_path), vendor, buildhost, tag)) - - if tag is None: - # maybe the build already has a tag, in which case we won't do anything - for allowed_tag in ALLOWED_TAGS: - if build_has_tag(build, allowed_tag): - print("Build %s already has tag %s." % (build, allowed_tag)) - return - # else raise - raise Exception("Vendor and buildhost unknown: %s, %s" % (vendor, buildhost)) - - subprocess.check_call(['koji', 'add-pkg', tag, package, '--owner=kojiadmin']) # otherwise we can't tag the build - # check if build already has the tag, else it will fail - if not build_has_tag(build, tag): - subprocess.check_call(['koji', 'tag-build', tag, build]) - else: - print("Build %s already has tag %s." % (build, tag)) - -def main(): - parser = argparse.ArgumentParser(description='Update vendor tags for builds without one') - parser.add_argument('data_dir', help='directory where the script will write or read data from') - parser.add_argument('--bootstrap', action='store_true', - help='accepts unknown vendors and builds without a binary RPM') - parser.add_argument('--quiet', action='store_true', - help='do not output anything unless there are changes to be considered') - args = parser.parse_args() - quiet = args.quiet - - data_dir = os.path.join(args.data_dir, 'vendor_tags_update') - if os.path.isdir(args.data_dir) and not os.path.exists(data_dir): - print("Creating %s" % data_dir) - os.mkdir(os.path.join(data_dir)) - - # results in a dict similar to this: {"id": 2690, "ts": 1543249294.02143} - last_event = json.loads(subprocess.check_output(['koji', 'call', 'getLastEvent']).decode().replace("'", '"')) - - # read last known event from our data directory - last_sync_event_filepath = os.path.join(data_dir, 'last_sync_event') - - if os.path.exists(last_sync_event_filepath): - with open(last_sync_event_filepath) as f: - last_sync_event = json.loads(f.read()) - - if last_sync_event == last_event: - if not quiet: - print("No update needed.") - return - else: - timestamp = last_sync_event['ts'] - else: - timestamp = 0 # first update ever - - # get the list of builds since last event - output = subprocess.check_output(['koji', 'list-builds', '--quiet', '--state=COMPLETE', - '--type=rpm', '--after=%s' % timestamp]).decode() - - for line in output.splitlines(): - build = line.split()[0] - update_vendor_tag_for_build(build, args.bootstrap) - - # store last update event info - with open(last_sync_event_filepath, 'w') as f: - f.write(json.dumps(last_event)) - -if __name__ == "__main__": - main()