|
| 1 | +#!/usr/bin/env python3 |
| 2 | + |
| 3 | +from pathlib import Path |
| 4 | +from fabric.api import prefix, run, settings, execute # type: ignore |
| 5 | +import sys |
| 6 | + |
| 7 | +import fabric_cfg |
| 8 | +from ci_variables import ci_env, remote_fsim_dir, remote_cy_dir |
| 9 | +from github_common import move_and_commit_gh_file |
| 10 | +from utils import print_last_firesim_log |
| 11 | + |
| 12 | +from typing import List, Tuple |
| 13 | + |
| 14 | +URL_PREFIX = f"https://raw.githubusercontent.com/{ci_env['GH_ORG']}/{ci_env['GH_REPO']}" |
| 15 | + |
| 16 | +shared_build_dir = "/scratch/buildbot/FIRESIM_BUILD_DIR" |
| 17 | + |
| 18 | +from_chipyard_firesim_build_recipes = "sims/firesim-staging/sample_config_build_recipes.yaml" |
| 19 | +from_chipyard_firesim_hwdb = ci_env['CHIPYARD_HWDB_PATH'] |
| 20 | +# this must point to build recipe in clone setup for firesim s.t. the makefrag it points to itself points to the working clone |
| 21 | +setup_clone_firesim_build_recipes = f"{remote_cy_dir}/{from_chipyard_firesim_build_recipes}" |
| 22 | +workspace_firesim_hwdb = f"{ci_env['GITHUB_WORKSPACE']}/{from_chipyard_firesim_hwdb}" |
| 23 | +assert Path(setup_clone_firesim_build_recipes).exists() |
| 24 | +assert Path(workspace_firesim_hwdb).exists() |
| 25 | + |
| 26 | +# host assumptions: |
| 27 | +# - firesim's machine-launch-script requirements are already installed (i.e. sudo scripts on all machines) |
| 28 | +# - XILINX_VITIS, XILINX_XRT, XILINX_VIVADO are setup (in environtment - LD_LIBRARY_PATH/PATH/etc) |
| 29 | +# priority == roughly the more powerful and available |
| 30 | +# ipaddr, buildtool:version, use unique build dir, unique build dir path, priority (0 is highest)(unused by code but used to track which machine has most resources) |
| 31 | +build_hosts = [ |
| 32 | + ( "localhost", "vivado:2022.1", False, "", 0), |
| 33 | + ("buildbot1@as4", "vivado:2022.1", True, "/scratch/buildbot1/FIRESIM_BUILD_DIR", 0), |
| 34 | + ("buildbot2@as4", "vivado:2022.1", True, "/scratch/buildbot2/FIRESIM_BUILD_DIR", 0), |
| 35 | + ( "a17", "vitis:2022.1", False, "", 0), |
| 36 | + ("buildbot1@a17", "vitis:2022.1", True, "/scratch/buildbot1/FIRESIM_BUILD_DIR", 0), |
| 37 | + ("buildbot2@a17", "vitis:2021.1", True, "/scratch/buildbot2/FIRESIM_BUILD_DIR", 0), |
| 38 | + ("buildbot3@a17", "vitis:2021.1", True, "/scratch/buildbot3/FIRESIM_BUILD_DIR", 0), |
| 39 | + ("buildbot4@a17", "vitis:2021.1", True, "/scratch/buildbot4/FIRESIM_BUILD_DIR", 0), |
| 40 | + ( "firesim1", "vitis:2021.1", False, "", 1), |
| 41 | + ( "jktgz", "vivado:2023.1", False, "", 2), |
| 42 | + ( "jktqos", "vivado:2023.1", False, "", 2), |
| 43 | +] |
| 44 | + |
| 45 | +def positive_hash(any) -> int: |
| 46 | + return hash(any) % 2**sys.hash_info.width |
| 47 | + |
| 48 | +# add builds to run into a config_build.yaml |
| 49 | +def modify_config_build(in_config_build_yaml, out_config_build_yaml, hwdb_entries_to_gen: List[str]) -> None: |
| 50 | + global shared_build_dir |
| 51 | + |
| 52 | + # comment out old lines |
| 53 | + build_yaml_lines = open(in_config_build_yaml).read().split("\n") |
| 54 | + with open(out_config_build_yaml, "w") as byf: |
| 55 | + for line in build_yaml_lines: |
| 56 | + if "- midas" in line: |
| 57 | + # comment out midasexample lines |
| 58 | + byf.write("# " + line + '\n') |
| 59 | + elif 'default_build_dir:' in line: |
| 60 | + byf.write(line.replace('null', shared_build_dir) + '\n') |
| 61 | + else: |
| 62 | + byf.write(line + '\n') |
| 63 | + |
| 64 | + # add new builds to run |
| 65 | + build_yaml_lines = open(out_config_build_yaml).read().split("\n") |
| 66 | + with open(out_config_build_yaml, "w") as byf: |
| 67 | + for line in build_yaml_lines: |
| 68 | + if "builds_to_run:" in line and not "#" in line: |
| 69 | + byf.write(line + '\n') |
| 70 | + start_space_idx = line.index('b') |
| 71 | + for hwdb_to_gen in hwdb_entries_to_gen: |
| 72 | + byf.write((' ' * (start_space_idx + 4)) + f"- {hwdb_to_gen}" + '\n') |
| 73 | + else: |
| 74 | + byf.write(line + '\n') |
| 75 | + |
| 76 | +# add hosts for builds to run into a config_build.yaml |
| 77 | +def add_host_list(in_build_yaml: str, out_build_yaml: str, hostlist: List[Tuple[str, bool, str]]) -> None: |
| 78 | + build_yaml_lines = open(in_build_yaml).read().split("\n") |
| 79 | + with open(out_build_yaml, "w") as byf: |
| 80 | + for line in build_yaml_lines: |
| 81 | + if "build_farm_hosts:" in line and not "#" in line: |
| 82 | + byf.write(line + '\n') |
| 83 | + start_space_idx = line.index('b') |
| 84 | + for host, use_unique, unique_build_dir in hostlist: |
| 85 | + if use_unique: |
| 86 | + byf.write((' ' * (start_space_idx + 4)) + f"- {host}:" + '\n') |
| 87 | + byf.write((' ' * (start_space_idx + 8)) + f"override_build_dir: {unique_build_dir}" + '\n') |
| 88 | + else: |
| 89 | + byf.write((' ' * (start_space_idx + 4)) + f"- {host}" + '\n') |
| 90 | + elif '- localhost' in line and not '#' in line: |
| 91 | + byf.write("# " + line + '\n') |
| 92 | + else: |
| 93 | + byf.write(line + '\n') |
| 94 | + |
| 95 | +# replace hwdb entry in config_hwdb.yaml with a link |
| 96 | +def replace_in_hwdb(hwdb_file: str, hwdb_entry_name: str, link: str) -> None: |
| 97 | + # replace the sample hwdb's bit line only |
| 98 | + sample_hwdb_lines = open(hwdb_file).read().split('\n') |
| 99 | + |
| 100 | + with open(hwdb_file, "w") as sample_hwdb_file: |
| 101 | + match_bit = False |
| 102 | + for line in sample_hwdb_lines: |
| 103 | + if hwdb_entry_name in line.strip().split(' ')[0].replace(':', ''): |
| 104 | + # hwdb entry matches key name |
| 105 | + match_bit = True |
| 106 | + sample_hwdb_file.write(line + '\n') |
| 107 | + elif match_bit == True: |
| 108 | + if ("bitstream_tar:" in line.strip().split(' ')[0]): |
| 109 | + # only replace this bit |
| 110 | + match_bit = False |
| 111 | + |
| 112 | + new_bit_line = f" bitstream_tar: {link}" |
| 113 | + print(f"Replacing {line.strip()} with {new_bit_line}") |
| 114 | + |
| 115 | + # print out the bit line |
| 116 | + sample_hwdb_file.write(new_bit_line + '\n') |
| 117 | + else: |
| 118 | + raise Exception("::ERROR:: Something went wrong") |
| 119 | + else: |
| 120 | + # if no match print other lines |
| 121 | + sample_hwdb_file.write(line + '\n') |
| 122 | + |
| 123 | + if match_bit == True: |
| 124 | + raise Exception(f"::ERROR:: Unable to replace URL for {hwdb_entry_name} in {hwdb_file}") |
| 125 | + |
| 126 | + # strip newlines from end of file |
| 127 | + with open(hwdb_file, "r+") as sample_hwdb_file: |
| 128 | + content = sample_hwdb_file.read() |
| 129 | + content = content.rstrip('\n') |
| 130 | + sample_hwdb_file.seek(0) |
| 131 | + |
| 132 | + sample_hwdb_file.write(content) |
| 133 | + sample_hwdb_file.truncate() |
| 134 | + |
| 135 | +def run_local_buildbitstreams(): |
| 136 | + """Runs local buildbitstreams""" |
| 137 | + |
| 138 | + global workspace_firesim_hwdb |
| 139 | + |
| 140 | + with prefix(f"cd {remote_fsim_dir}"): |
| 141 | + with prefix('source sourceme-manager.sh --skip-ssh-setup'): |
| 142 | + |
| 143 | + def build_upload(build_yaml: str, hwdb_entries: List[str], platforms: List[str]) -> List[str]: |
| 144 | + global URL_PREFIX |
| 145 | + global setup_clone_firesim_build_recipes |
| 146 | + |
| 147 | + print(f"Printing {build_yaml}...") |
| 148 | + run(f"cat {build_yaml}") |
| 149 | + |
| 150 | + rc = 0 |
| 151 | + with settings(warn_only=True): |
| 152 | + # pty=False needed to avoid issues with screen -ls stalling in fabric |
| 153 | + build_result = run(f"timeout 10h firesim buildbitstream -b {build_yaml} -r {setup_clone_firesim_build_recipes} --forceterminate", pty=False) |
| 154 | + rc = build_result.return_code |
| 155 | + |
| 156 | + if rc != 0: |
| 157 | + print(f"Buildbitstream failed.") |
| 158 | + print_last_firesim_log(200) |
| 159 | + raise Exception(f"Failed with code: {rc}") |
| 160 | + |
| 161 | + hwdb_entry_dir = f"{remote_fsim_dir}/deploy/built-hwdb-entries" |
| 162 | + links = [] |
| 163 | + |
| 164 | + for hwdb_entry_name, platform in zip(hwdb_entries, platforms): |
| 165 | + hwdb_entry = f"{hwdb_entry_dir}/{hwdb_entry_name}" |
| 166 | + |
| 167 | + print(f"Printing {hwdb_entry}...") |
| 168 | + run(f"cat {hwdb_entry}") |
| 169 | + |
| 170 | + with open(hwdb_entry, 'r') as hwdbef: |
| 171 | + lines = hwdbef.readlines() |
| 172 | + for line in lines: |
| 173 | + if "bitstream_tar:" in line: |
| 174 | + file_path = Path(line.strip().split(' ')[1].replace('file://', '')) # 2nd element (i.e. the path) (no URI) |
| 175 | + file_name = f"{platform}/{hwdb_entry_name}.tar.gz" |
| 176 | + run(f"shasum -a 256 {file_path}") |
| 177 | + sha = move_and_commit_gh_file(file_path, file_name, f"{ci_env['GITHUB_WORKSPACE']}/{ci_env['GH_REPO']}", f"Committing files from {ci_env['GITHUB_REPOSITORY']}:{ci_env['GITHUB_SHA']}") |
| 178 | + link = f"{URL_PREFIX}/{sha}/{file_name}" |
| 179 | + print(f"Uploaded bitstream_tar for {hwdb_entry_name} to {link}") |
| 180 | + links.append(link) |
| 181 | + break |
| 182 | + |
| 183 | + return links |
| 184 | + |
| 185 | + |
| 186 | + def do_builds(batch_hwdbs, hwdb_file_to_replace): |
| 187 | + assert len(build_hosts) >= len(batch_hwdbs), f"Need at least {len(batch_hwdbs)} build_hosts to run builds" |
| 188 | + |
| 189 | + # map hwdb tuple to build_hosts |
| 190 | + hwdb_2_host = {} |
| 191 | + for hwdb, platform, buildtool_version in batch_hwdbs: |
| 192 | + for host_name, host_buildtool_version, host_use_unique, host_unique_build_dir, host_prio in build_hosts: |
| 193 | + if host_buildtool_version == buildtool_version: |
| 194 | + if not host_name in [h[0] for h in hwdb_2_host.values()]: |
| 195 | + hwdb_2_host[hwdb] = (host_name, host_use_unique, host_unique_build_dir) |
| 196 | + break |
| 197 | + |
| 198 | + assert len(hwdb_2_host) == len(batch_hwdbs), "Unable to map build_hosts to hwdb build" |
| 199 | + |
| 200 | + hwdbs_ordered = [hwdb[0] for hwdb in batch_hwdbs] |
| 201 | + platforms_ordered = [hwdb[1] for hwdb in batch_hwdbs] |
| 202 | + hosts_ordered = hwdb_2_host.values() |
| 203 | + |
| 204 | + print("Mappings") |
| 205 | + print(f"HWDBS: {hwdbs_ordered}") |
| 206 | + print(f"Platforms: {platforms_ordered}") |
| 207 | + print(f"build_hosts: {hosts_ordered}") |
| 208 | + |
| 209 | + og_build_yaml = f"{remote_fsim_dir}/deploy/config_build.yaml" |
| 210 | + intermediate_build_yaml = f"{remote_fsim_dir}/deploy/config_build_{positive_hash(tuple(hwdbs_ordered))}.yaml" |
| 211 | + final_build_yaml = f"{remote_fsim_dir}/deploy/config_build_{positive_hash(tuple(hosts_ordered))}.yaml" |
| 212 | + |
| 213 | + modify_config_build(og_build_yaml, intermediate_build_yaml, hwdbs_ordered) |
| 214 | + add_host_list(intermediate_build_yaml, final_build_yaml, hosts_ordered) |
| 215 | + links = build_upload(final_build_yaml, hwdbs_ordered, platforms_ordered) |
| 216 | + for hwdb, link in zip(hwdbs_ordered, links): |
| 217 | + replace_in_hwdb(hwdb_file_to_replace, hwdb, link) |
| 218 | + |
| 219 | + print(f"Printing {hwdb_file_to_replace}...") |
| 220 | + run(f"cat {hwdb_file_to_replace}") |
| 221 | + |
| 222 | + # wipe old data |
| 223 | + print("Cleaning old build directories") |
| 224 | + for host_name, host_use_unique, host_unique_build_dir in hosts_ordered: |
| 225 | + if host_use_unique: |
| 226 | + run(f"ssh {host_name} rm -rf {host_unique_build_dir}") |
| 227 | + else: |
| 228 | + run(f"ssh {host_name} rm -rf {shared_build_dir}") |
| 229 | + |
| 230 | + # note: next two statements can be duplicated to run different builds in phases |
| 231 | + # i.e. run 4 agfis in 1st phase, then 6 in next |
| 232 | + |
| 233 | + # order of following list roughly corresponds to build host to use. |
| 234 | + # i.e. if 1st hwdb in list wants a host with V0 of tools, it will get the 1st host with V0 of tools |
| 235 | + # in the build_hosts list |
| 236 | + |
| 237 | + # hwdb_entry_name, platform_name, buildtool:version |
| 238 | + batch_hwdbs_in = [ |
| 239 | + # hwdb's to verify FPGA builds |
| 240 | + |
| 241 | + # TODO: disable due to not having xrt installed on localhost |
| 242 | + #("vitis_firesim_rocket_singlecore_no_nic", "vitis", "vitis:2022.1"), |
| 243 | + |
| 244 | + ("nitefury_firesim_rocket_singlecore_no_nic", "rhsresearch_nitefury_ii", "vitis:2022.1"), |
| 245 | + ("alveo_u200_firesim_rocket_singlecore_no_nic", "xilinx_alveo_u200", "vitis:2021.1"), |
| 246 | + ("alveo_u250_firesim_rocket_singlecore_no_nic", "xilinx_alveo_u250", "vitis:2021.1"), |
| 247 | + ("alveo_u280_firesim_rocket_singlecore_no_nic", "xilinx_alveo_u280", "vitis:2021.1"), |
| 248 | + |
| 249 | + # TODO: disable due to not having a license |
| 250 | + #("xilinx_vcu118_firesim_rocket_singlecore_4GB_no_nic", "xilinx_vcu118", "vivado:2023.1"), |
| 251 | + |
| 252 | + # extra hwdb's to run CI with |
| 253 | + ("alveo_u250_firesim_rocket_quadcore_no_nic", "xilinx_alveo_u250", "vivado:2022.1"), |
| 254 | + ("alveo_u250_firesim_boom_singlecore_no_nic", "xilinx_alveo_u250", "vivado:2022.1"), |
| 255 | + ("alveo_u250_firesim_rocket_singlecore_nic", "xilinx_alveo_u250", "vivado:2022.1"), |
| 256 | + |
| 257 | + # extra hwdb's |
| 258 | + ("alveo_u250_firesim_gemmini_rocket_singlecore_no_nic", "xilinx_alveo_u250", "vitis:2021.1"), |
| 259 | + ] |
| 260 | + |
| 261 | + # replace hwdb entries in workspace area |
| 262 | + do_builds(batch_hwdbs_in, workspace_firesim_hwdb) |
| 263 | + |
| 264 | +if __name__ == "__main__": |
| 265 | + execute(run_local_buildbitstreams, hosts=["localhost"]) |
0 commit comments