Skip to content

Commit e3ecf6d

Browse files
committed
Merge remote-tracking branch 'origin/main' into obus_switch
2 parents 3c657aa + bdf3723 commit e3ecf6d

File tree

260 files changed

+14737
-3303
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

260 files changed

+14737
-3303
lines changed

.gitattributes

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
conda-reqs/conda-lock-reqs/conda-requirements-esp-tools-linux-64.conda-lock.yml linguist-generated=true
1+
conda-reqs/conda-lock-reqs/conda-requirements-riscv-tools-linux-64-lean.conda-lock.yml linguist-generated=true
22
conda-reqs/conda-lock-reqs/conda-requirements-riscv-tools-linux-64.conda-lock.yml linguist-generated=true

.github/PULL_REQUEST_TEMPLATE.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,3 +42,12 @@ Provide a brief description of the PR immediately below this comment, if the tit
4242
- [ ] (If applicable) Did you add a test demonstrating the PR?
4343
<!-- Do this if this PR is a bugfix that should be applied to the latest release -->
4444
- [ ] (If applicable) Did you mark the PR as `Please Backport`?
45+
46+
47+
**CI Help**:
48+
Add the following labels to modify the CI for a set of features.
49+
Generally, a label added only affect subsequent changes to the PR (i.e. new commits, force pushing, closing/reopening).
50+
See `ci:*` for full list of labels:
51+
- `ci:fpga-deploy` - Run FPGA-based E2E testing
52+
- `ci:local-fpga-buildbitstream-deploy` - Build local FPGA bitstreams for platforms that are released
53+
- `ci:disable` - Disable CI

.github/actions/run-tests/action.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ runs:
1919
- name: Init submodules (since only the RTL is cached)
2020
run: |
2121
conda activate ${{ env.conda-env-name-no-time }}-$(date --date "${{ env.workflow-timestamp }}" +%Y%m%d)
22+
git submodule sync
2223
./scripts/init-submodules-no-riscv-tools.sh
2324
shell: bash -leo pipefail {0}
2425

Lines changed: 265 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,265 @@
1+
#!/usr/bin/env python3
2+
3+
from pathlib import Path
4+
from fabric.api import prefix, run, settings, execute # type: ignore
5+
import sys
6+
7+
import fabric_cfg
8+
from ci_variables import ci_env, remote_fsim_dir, remote_cy_dir
9+
from github_common import move_and_commit_gh_file
10+
from utils import print_last_firesim_log
11+
12+
from typing import List, Tuple
13+
14+
URL_PREFIX = f"https://raw.githubusercontent.com/{ci_env['GH_ORG']}/{ci_env['GH_REPO']}"
15+
16+
shared_build_dir = "/scratch/buildbot/FIRESIM_BUILD_DIR"
17+
18+
from_chipyard_firesim_build_recipes = "sims/firesim-staging/sample_config_build_recipes.yaml"
19+
from_chipyard_firesim_hwdb = ci_env['CHIPYARD_HWDB_PATH']
20+
# this must point to build recipe in clone setup for firesim s.t. the makefrag it points to itself points to the working clone
21+
setup_clone_firesim_build_recipes = f"{remote_cy_dir}/{from_chipyard_firesim_build_recipes}"
22+
workspace_firesim_hwdb = f"{ci_env['GITHUB_WORKSPACE']}/{from_chipyard_firesim_hwdb}"
23+
assert Path(setup_clone_firesim_build_recipes).exists()
24+
assert Path(workspace_firesim_hwdb).exists()
25+
26+
# host assumptions:
27+
# - firesim's machine-launch-script requirements are already installed (i.e. sudo scripts on all machines)
28+
# - XILINX_VITIS, XILINX_XRT, XILINX_VIVADO are setup (in environtment - LD_LIBRARY_PATH/PATH/etc)
29+
# priority == roughly the more powerful and available
30+
# ipaddr, buildtool:version, use unique build dir, unique build dir path, priority (0 is highest)(unused by code but used to track which machine has most resources)
31+
build_hosts = [
32+
( "localhost", "vivado:2022.1", False, "", 0),
33+
("buildbot1@as4", "vivado:2022.1", True, "/scratch/buildbot1/FIRESIM_BUILD_DIR", 0),
34+
("buildbot2@as4", "vivado:2022.1", True, "/scratch/buildbot2/FIRESIM_BUILD_DIR", 0),
35+
( "a17", "vitis:2022.1", False, "", 0),
36+
("buildbot1@a17", "vitis:2022.1", True, "/scratch/buildbot1/FIRESIM_BUILD_DIR", 0),
37+
("buildbot2@a17", "vitis:2021.1", True, "/scratch/buildbot2/FIRESIM_BUILD_DIR", 0),
38+
("buildbot3@a17", "vitis:2021.1", True, "/scratch/buildbot3/FIRESIM_BUILD_DIR", 0),
39+
("buildbot4@a17", "vitis:2021.1", True, "/scratch/buildbot4/FIRESIM_BUILD_DIR", 0),
40+
( "firesim1", "vitis:2021.1", False, "", 1),
41+
( "jktgz", "vivado:2023.1", False, "", 2),
42+
( "jktqos", "vivado:2023.1", False, "", 2),
43+
]
44+
45+
def positive_hash(any) -> int:
46+
return hash(any) % 2**sys.hash_info.width
47+
48+
# add builds to run into a config_build.yaml
49+
def modify_config_build(in_config_build_yaml, out_config_build_yaml, hwdb_entries_to_gen: List[str]) -> None:
50+
global shared_build_dir
51+
52+
# comment out old lines
53+
build_yaml_lines = open(in_config_build_yaml).read().split("\n")
54+
with open(out_config_build_yaml, "w") as byf:
55+
for line in build_yaml_lines:
56+
if "- midas" in line:
57+
# comment out midasexample lines
58+
byf.write("# " + line + '\n')
59+
elif 'default_build_dir:' in line:
60+
byf.write(line.replace('null', shared_build_dir) + '\n')
61+
else:
62+
byf.write(line + '\n')
63+
64+
# add new builds to run
65+
build_yaml_lines = open(out_config_build_yaml).read().split("\n")
66+
with open(out_config_build_yaml, "w") as byf:
67+
for line in build_yaml_lines:
68+
if "builds_to_run:" in line and not "#" in line:
69+
byf.write(line + '\n')
70+
start_space_idx = line.index('b')
71+
for hwdb_to_gen in hwdb_entries_to_gen:
72+
byf.write((' ' * (start_space_idx + 4)) + f"- {hwdb_to_gen}" + '\n')
73+
else:
74+
byf.write(line + '\n')
75+
76+
# add hosts for builds to run into a config_build.yaml
77+
def add_host_list(in_build_yaml: str, out_build_yaml: str, hostlist: List[Tuple[str, bool, str]]) -> None:
78+
build_yaml_lines = open(in_build_yaml).read().split("\n")
79+
with open(out_build_yaml, "w") as byf:
80+
for line in build_yaml_lines:
81+
if "build_farm_hosts:" in line and not "#" in line:
82+
byf.write(line + '\n')
83+
start_space_idx = line.index('b')
84+
for host, use_unique, unique_build_dir in hostlist:
85+
if use_unique:
86+
byf.write((' ' * (start_space_idx + 4)) + f"- {host}:" + '\n')
87+
byf.write((' ' * (start_space_idx + 8)) + f"override_build_dir: {unique_build_dir}" + '\n')
88+
else:
89+
byf.write((' ' * (start_space_idx + 4)) + f"- {host}" + '\n')
90+
elif '- localhost' in line and not '#' in line:
91+
byf.write("# " + line + '\n')
92+
else:
93+
byf.write(line + '\n')
94+
95+
# replace hwdb entry in config_hwdb.yaml with a link
96+
def replace_in_hwdb(hwdb_file: str, hwdb_entry_name: str, link: str) -> None:
97+
# replace the sample hwdb's bit line only
98+
sample_hwdb_lines = open(hwdb_file).read().split('\n')
99+
100+
with open(hwdb_file, "w") as sample_hwdb_file:
101+
match_bit = False
102+
for line in sample_hwdb_lines:
103+
if hwdb_entry_name in line.strip().split(' ')[0].replace(':', ''):
104+
# hwdb entry matches key name
105+
match_bit = True
106+
sample_hwdb_file.write(line + '\n')
107+
elif match_bit == True:
108+
if ("bitstream_tar:" in line.strip().split(' ')[0]):
109+
# only replace this bit
110+
match_bit = False
111+
112+
new_bit_line = f" bitstream_tar: {link}"
113+
print(f"Replacing {line.strip()} with {new_bit_line}")
114+
115+
# print out the bit line
116+
sample_hwdb_file.write(new_bit_line + '\n')
117+
else:
118+
raise Exception("::ERROR:: Something went wrong")
119+
else:
120+
# if no match print other lines
121+
sample_hwdb_file.write(line + '\n')
122+
123+
if match_bit == True:
124+
raise Exception(f"::ERROR:: Unable to replace URL for {hwdb_entry_name} in {hwdb_file}")
125+
126+
# strip newlines from end of file
127+
with open(hwdb_file, "r+") as sample_hwdb_file:
128+
content = sample_hwdb_file.read()
129+
content = content.rstrip('\n')
130+
sample_hwdb_file.seek(0)
131+
132+
sample_hwdb_file.write(content)
133+
sample_hwdb_file.truncate()
134+
135+
def run_local_buildbitstreams():
136+
"""Runs local buildbitstreams"""
137+
138+
global workspace_firesim_hwdb
139+
140+
with prefix(f"cd {remote_fsim_dir}"):
141+
with prefix('source sourceme-manager.sh --skip-ssh-setup'):
142+
143+
def build_upload(build_yaml: str, hwdb_entries: List[str], platforms: List[str]) -> List[str]:
144+
global URL_PREFIX
145+
global setup_clone_firesim_build_recipes
146+
147+
print(f"Printing {build_yaml}...")
148+
run(f"cat {build_yaml}")
149+
150+
rc = 0
151+
with settings(warn_only=True):
152+
# pty=False needed to avoid issues with screen -ls stalling in fabric
153+
build_result = run(f"timeout 10h firesim buildbitstream -b {build_yaml} -r {setup_clone_firesim_build_recipes} --forceterminate", pty=False)
154+
rc = build_result.return_code
155+
156+
if rc != 0:
157+
print(f"Buildbitstream failed.")
158+
print_last_firesim_log(200)
159+
raise Exception(f"Failed with code: {rc}")
160+
161+
hwdb_entry_dir = f"{remote_fsim_dir}/deploy/built-hwdb-entries"
162+
links = []
163+
164+
for hwdb_entry_name, platform in zip(hwdb_entries, platforms):
165+
hwdb_entry = f"{hwdb_entry_dir}/{hwdb_entry_name}"
166+
167+
print(f"Printing {hwdb_entry}...")
168+
run(f"cat {hwdb_entry}")
169+
170+
with open(hwdb_entry, 'r') as hwdbef:
171+
lines = hwdbef.readlines()
172+
for line in lines:
173+
if "bitstream_tar:" in line:
174+
file_path = Path(line.strip().split(' ')[1].replace('file://', '')) # 2nd element (i.e. the path) (no URI)
175+
file_name = f"{platform}/{hwdb_entry_name}.tar.gz"
176+
run(f"shasum -a 256 {file_path}")
177+
sha = move_and_commit_gh_file(file_path, file_name, f"{ci_env['GITHUB_WORKSPACE']}/{ci_env['GH_REPO']}", f"Committing files from {ci_env['GITHUB_REPOSITORY']}:{ci_env['GITHUB_SHA']}")
178+
link = f"{URL_PREFIX}/{sha}/{file_name}"
179+
print(f"Uploaded bitstream_tar for {hwdb_entry_name} to {link}")
180+
links.append(link)
181+
break
182+
183+
return links
184+
185+
186+
def do_builds(batch_hwdbs, hwdb_file_to_replace):
187+
assert len(build_hosts) >= len(batch_hwdbs), f"Need at least {len(batch_hwdbs)} build_hosts to run builds"
188+
189+
# map hwdb tuple to build_hosts
190+
hwdb_2_host = {}
191+
for hwdb, platform, buildtool_version in batch_hwdbs:
192+
for host_name, host_buildtool_version, host_use_unique, host_unique_build_dir, host_prio in build_hosts:
193+
if host_buildtool_version == buildtool_version:
194+
if not host_name in [h[0] for h in hwdb_2_host.values()]:
195+
hwdb_2_host[hwdb] = (host_name, host_use_unique, host_unique_build_dir)
196+
break
197+
198+
assert len(hwdb_2_host) == len(batch_hwdbs), "Unable to map build_hosts to hwdb build"
199+
200+
hwdbs_ordered = [hwdb[0] for hwdb in batch_hwdbs]
201+
platforms_ordered = [hwdb[1] for hwdb in batch_hwdbs]
202+
hosts_ordered = hwdb_2_host.values()
203+
204+
print("Mappings")
205+
print(f"HWDBS: {hwdbs_ordered}")
206+
print(f"Platforms: {platforms_ordered}")
207+
print(f"build_hosts: {hosts_ordered}")
208+
209+
og_build_yaml = f"{remote_fsim_dir}/deploy/config_build.yaml"
210+
intermediate_build_yaml = f"{remote_fsim_dir}/deploy/config_build_{positive_hash(tuple(hwdbs_ordered))}.yaml"
211+
final_build_yaml = f"{remote_fsim_dir}/deploy/config_build_{positive_hash(tuple(hosts_ordered))}.yaml"
212+
213+
modify_config_build(og_build_yaml, intermediate_build_yaml, hwdbs_ordered)
214+
add_host_list(intermediate_build_yaml, final_build_yaml, hosts_ordered)
215+
links = build_upload(final_build_yaml, hwdbs_ordered, platforms_ordered)
216+
for hwdb, link in zip(hwdbs_ordered, links):
217+
replace_in_hwdb(hwdb_file_to_replace, hwdb, link)
218+
219+
print(f"Printing {hwdb_file_to_replace}...")
220+
run(f"cat {hwdb_file_to_replace}")
221+
222+
# wipe old data
223+
print("Cleaning old build directories")
224+
for host_name, host_use_unique, host_unique_build_dir in hosts_ordered:
225+
if host_use_unique:
226+
run(f"ssh {host_name} rm -rf {host_unique_build_dir}")
227+
else:
228+
run(f"ssh {host_name} rm -rf {shared_build_dir}")
229+
230+
# note: next two statements can be duplicated to run different builds in phases
231+
# i.e. run 4 agfis in 1st phase, then 6 in next
232+
233+
# order of following list roughly corresponds to build host to use.
234+
# i.e. if 1st hwdb in list wants a host with V0 of tools, it will get the 1st host with V0 of tools
235+
# in the build_hosts list
236+
237+
# hwdb_entry_name, platform_name, buildtool:version
238+
batch_hwdbs_in = [
239+
# hwdb's to verify FPGA builds
240+
241+
# TODO: disable due to not having xrt installed on localhost
242+
#("vitis_firesim_rocket_singlecore_no_nic", "vitis", "vitis:2022.1"),
243+
244+
("nitefury_firesim_rocket_singlecore_no_nic", "rhsresearch_nitefury_ii", "vitis:2022.1"),
245+
("alveo_u200_firesim_rocket_singlecore_no_nic", "xilinx_alveo_u200", "vitis:2021.1"),
246+
("alveo_u250_firesim_rocket_singlecore_no_nic", "xilinx_alveo_u250", "vitis:2021.1"),
247+
("alveo_u280_firesim_rocket_singlecore_no_nic", "xilinx_alveo_u280", "vitis:2021.1"),
248+
249+
# TODO: disable due to not having a license
250+
#("xilinx_vcu118_firesim_rocket_singlecore_4GB_no_nic", "xilinx_vcu118", "vivado:2023.1"),
251+
252+
# extra hwdb's to run CI with
253+
("alveo_u250_firesim_rocket_quadcore_no_nic", "xilinx_alveo_u250", "vivado:2022.1"),
254+
("alveo_u250_firesim_boom_singlecore_no_nic", "xilinx_alveo_u250", "vivado:2022.1"),
255+
("alveo_u250_firesim_rocket_singlecore_nic", "xilinx_alveo_u250", "vivado:2022.1"),
256+
257+
# extra hwdb's
258+
("alveo_u250_firesim_gemmini_rocket_singlecore_no_nic", "xilinx_alveo_u250", "vitis:2021.1"),
259+
]
260+
261+
# replace hwdb entries in workspace area
262+
do_builds(batch_hwdbs_in, workspace_firesim_hwdb)
263+
264+
if __name__ == "__main__":
265+
execute(run_local_buildbitstreams, hosts=["localhost"])

.github/scripts/build-extra-tests.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,5 +7,6 @@ set -ex
77
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
88
source $SCRIPT_DIR/defaults.sh
99

10-
make -C $LOCAL_CHIPYARD_DIR/tests clean
11-
make -C $LOCAL_CHIPYARD_DIR/tests
10+
cmake $LOCAL_CHIPYARD_DIR/tests/ -S $LOCAL_CHIPYARD_DIR/tests/ -B $LOCAL_CHIPYARD_DIR/tests/build/ -D CMAKE_BUILD_TYPE=Debug
11+
cmake --build $LOCAL_CHIPYARD_DIR/tests/build/ --target clean
12+
cmake --build $LOCAL_CHIPYARD_DIR/tests/build/ --target all

.github/scripts/check-commit.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ search () {
4646
}
4747

4848

49-
submodules=("cva6" "boom" "ibex" "gemmini" "icenet" "nvdla" "rocket-chip" "rocket-chip-blocks" "rocket-chip-inclusive-cache" "testchipip" "riscv-sodor" "mempress" "bar-fetchers" "shuttle" "constellation" "fft-generator" "hardfloat" "caliptra-aes-acc" "rocc-acc-utils" "diplomacy" "rerocc" "compress-acc")
49+
submodules=("cva6" "boom" "ibex" "gemmini" "icenet" "nvdla" "rocket-chip" "rocket-chip-blocks" "rocket-chip-inclusive-cache" "testchipip" "riscv-sodor" "mempress" "bar-fetchers" "shuttle" "constellation" "fft-generator" "hardfloat" "caliptra-aes-acc" "rocc-acc-utils" "diplomacy" "rerocc" "compress-acc" "saturn" "ara" "vexiiriscv")
5050
dir="generators"
5151
branches=("master" "main" "dev")
5252
search
@@ -83,7 +83,7 @@ dir="software"
8383
branches=("master" "dev")
8484
search
8585

86-
submodules=("DRAMSim2" "axe" "dsptools" "dsptools-chisel3" "rocket-dsp-utils" "torture" "fixedpoint" "fixedpoint-chisel3" "cde" "midas-targetutils")
86+
submodules=("DRAMSim2" "axe" "dsptools" "rocket-dsp-utils" "torture" "fixedpoint" "cde" "firrtl2")
8787
dir="tools"
8888
branches=("master" "dev" "main")
8989
search

.github/scripts/ci_variables.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
import os
2+
3+
# This package contains utilities that rely on environment variable
4+
# definitions present only on the CI container instance.
5+
6+
GITHUB_ACTIONS_ENV_VAR_NAME = 'GITHUB_ACTIONS'
7+
RUN_LOCAL = os.environ.get(GITHUB_ACTIONS_ENV_VAR_NAME, 'false') == 'false'
8+
9+
# When running locally (not in a CI pipeline) run commands out of the clone hosting this file.
10+
# Should be equivalent to GITHUB_WORKSPACE if running locally
11+
local_cy_dir = os.path.normpath((os.path.realpath(__file__)) + "/../../..")
12+
13+
def get_ci_value(env_var: str, default_value: str = "") -> str:
14+
if RUN_LOCAL:
15+
return default_value
16+
else:
17+
return os.environ.get(env_var, default_value)
18+
19+
# Create a env. dict that is populated from the environment or from defaults.
20+
ci_env = {
21+
# If not running under a CI pipeline defaults are provided that
22+
# will suffice to run scripts that do not use GHA API calls.
23+
# To manually provide environment variable settings, export GITHUB_ACTIONS=true, and provide
24+
# values for all of the environment variables listed.
25+
GITHUB_ACTIONS_ENV_VAR_NAME: 'false' if RUN_LOCAL else 'true', # type: ignore
26+
# This is used as a unique tag for all instances launched in a workflow
27+
'GITHUB_RUN_ID': get_ci_value('GITHUB_RUN_ID'),
28+
# Self explanatory
29+
'GITHUB_SHA': get_ci_value('GITHUB_SHA'),
30+
# Multiple clones of the Chipyard repository exists on a CI machine. We expect state
31+
# to persist between jobs in a workflow and faciliate that by having jobs run
32+
# out of a centralized clone (REMOTE_WORK_DIR)-- not the default clones setup by
33+
# the GHA runners (GITHUB_WORKSPACE)
34+
# This is the location of the clone setup by the GHA runner infrastructure by default
35+
# expanduser to replace the ~ present in the default, for portability
36+
'GITHUB_WORKSPACE': os.path.expanduser(os.environ['GITHUB_WORKSPACE']) if not RUN_LOCAL else local_cy_dir,
37+
# Self explanatory
38+
'GITHUB_API_URL': get_ci_value('GITHUB_API_URL'),
39+
# We look this up, instead of hardcoding "ucb-bar/chipyard", to support running
40+
# this CI pipeline under forks.
41+
'GITHUB_REPOSITORY': get_ci_value('GITHUB_REPOSITORY'),
42+
# Path to webhook payload on the runner machine
43+
'GITHUB_EVENT_PATH': get_ci_value('GITHUB_EVENT_PATH'),
44+
# Chipyard repo used on local CI machine to run tests from (cached across all workflow CI jobs)
45+
# CI scripts should refer variables
46+
# derived from this path so that they may be reused across workflows that may
47+
# initialize the Chipyard repository differently (e.g., as a submodule of a
48+
# larger project.)
49+
'REMOTE_WORK_DIR': get_ci_value('REMOTE_WORK_DIR', local_cy_dir if RUN_LOCAL else ""),
50+
# Github token with more permissions to access repositories across the FireSim org.
51+
'PERSONAL_ACCESS_TOKEN': get_ci_value('PERSONAL_ACCESS_TOKEN'),
52+
# Path to Chipyard's HWDB file (if it exists)
53+
'CHIPYARD_HWDB_PATH': get_ci_value('CHIPYARD_HWDB_PATH'),
54+
# Org/repo name of repository to store build bitstreams
55+
'GH_ORG': get_ci_value('GH_ORG'),
56+
'GH_REPO': get_ci_value('GH_REPO'),
57+
}
58+
59+
# for most uses these should be used (over using GITHUB_WORKSPACE)
60+
remote_cy_dir = ci_env['REMOTE_WORK_DIR']
61+
remote_fsim_dir = ci_env['REMOTE_WORK_DIR'] + "/sims/firesim"

0 commit comments

Comments
 (0)