Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: [ "3.12" ]
python-version: [ "3.13" ]
division: ["closed", "open", "closed-open"]
category: ["datacenter", "edge"]
case: ["closed", "closed-no-compliance", "closed-power", "closed-failed-power-logs", "case-1", "case-2", "case-3", "case-5", "case-6", "case-7", "case-8"]
Expand Down
12 changes: 2 additions & 10 deletions .github/workflows/test-mlperf-inference-abtf-poc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,25 +16,17 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-24.04, ubuntu-latest, macos-latest, macos-15, windows-latest]
python-version: [ "3.8", "3.12" ]
python-version: [ "3.9", "3.14" ]
backend: [ "pytorch" ]
implementation: [ "python" ]
docker: [ "", " --docker --docker_mlc_repo=${{ github.event.pull_request.head.repo.html_url }} --docker_mlc_repo_branch=${{ github.event.pull_request.head.ref }} --docker_dt" ]
extra-args: [ "--adr.compiler.tags=gcc", "--env.MLC_MLPERF_LOADGEN_BUILD_FROM_SRC=off" ]
exclude:
- os: ubuntu-latest
python-version: "3.8"
- os: windows-latest
python-version: "3.8"
- os: windows-latest
extra-args: "--adr.compiler.tags=gcc"
- os: windows-latest
docker: " --docker --docker_mlc_repo=${{ github.event.pull_request.head.repo.html_url }} --docker_mlc_repo_branch=${{ github.event.pull_request.head.ref }} --docker_dt"
# windows docker image is not supported in MLC yet
- os: macos-latest
python-version: "3.8"
- os: macos-15
python-version: "3.8"
- os: macos-latest
docker: " --docker --docker_mlc_repo=${{ github.event.pull_request.head.repo.html_url }} --docker_mlc_repo_branch=${{ github.event.pull_request.head.ref }} --docker_dt"
- os: macos-15
Expand Down Expand Up @@ -114,4 +106,4 @@ jobs:

- name: Test MLPerf Inference ABTF POC using ${{ matrix.backend }} on ${{ matrix.os }}
run: |
mlcr run-abtf,inference,_poc-demo --test_query_count=2 --adr.cocoeval.version_max=1.5.7 --adr.cocoeval.version_max_usable=1.5.7 --quiet ${{ matrix.extra-args }} ${{ matrix.docker }} -v
mlcr run-abtf,inference,_poc-demo --test_query_count=2 --quiet ${{ matrix.extra-args }} ${{ matrix.docker }} -v
2 changes: 1 addition & 1 deletion .github/workflows/test-mlperf-inference-resnet50.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: [ "3.12" ]
python-version: [ "3.13" ]
backend: [ "onnxruntime", "tf" ]
implementation: [ "python", "cpp" ]
exclude:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-mlperf-inference-retinanet.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: [ "3.12" ]
python-version: [ "3.13" ]
backend: [ "onnxruntime", "pytorch" ]
implementation: [ "python", "cpp" ]
compiler-string: [ "", "--adr.compiler.tags=aocc --env.MLC_AOCC_ACCEPT_EULA=yes" ]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-mlperf-inference-rgat.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: [ "3.12" ]
python-version: [ "3.14" ]
backend: [ "pytorch" ]
implementation: [ "python" ]

Expand Down
90 changes: 48 additions & 42 deletions automation/script/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -3621,13 +3621,17 @@ def _run_deps(self, deps, clean_env_keys_deps, env, state, const, const_state, a
if d.get(key):
d[key] = {}

# print(f"ii = {ii}, d = {d}")
utils.merge_dicts(
{'dict1': ii, 'dict2': d, 'append_lists': True, 'append_unique': True})

r = self.action_object.access(ii)
if r['return'] > 0:
return r
if is_true(d.get('continue_on_error')):
# Warning printed by mlcflow
# logger.warning(f"Dependency with tags: {d['tags']} failed. Ignoring the failure as 'continue_on_error' is set for the dependency call")
pass
else:
return r

run_state['version_info'] = run_state_copy.get(
'version_info')
Expand Down Expand Up @@ -5466,53 +5470,55 @@ def prepare_and_run_script_with_postprocessing(i, postprocess="postprocess"):

rc = os.system(cmd)

if rc > 0 and not i.get('ignore_script_error', False):
# Check if print files when error
print_files = meta.get('print_files_if_script_error', [])
if len(print_files) > 0:
for pr in print_files:
if os.path.isfile(pr):
r = utils.load_txt(file_name=pr)
if r['return'] == 0:
logger.info(
"========================================================")
logger.info("Print file {}:".format(pr))
logger.info("")
logger.info(r['string'])
logger.info("")

# Check where to report errors and failures
repo_to_report = run_state.get(
'script_entry_repo_to_report_errors', '')

if repo_to_report == '':
script_repo_alias = run_state.get('script_repo_alias', '')
script_repo_git = run_state.get('script_repo_git', False)

if script_repo_git and script_repo_alias != '':
repo_to_report = 'https://github.com/' + \
script_repo_alias.replace('@', '/') + '/issues'

if repo_to_report == '':
repo_to_report = 'https://github.com/mlcommons/mlperf-automations/issues'

note = '''
if rc > 0:
if not is_true(i.get('ignore_script_error', False)):
# Check if print files when error
print_files = meta.get('print_files_if_script_error', [])
if len(print_files) > 0:
for pr in print_files:
if os.path.isfile(pr):
r = utils.load_txt(file_name=pr)
if r['return'] == 0:
logger.info(
"========================================================")
logger.info("Print file {}:".format(pr))
logger.info("")
logger.info(r['string'])
logger.info("")

# Check where to report errors and failures
repo_to_report = run_state.get(
'script_entry_repo_to_report_errors', '')

if repo_to_report == '':
script_repo_alias = run_state.get('script_repo_alias', '')
script_repo_git = run_state.get('script_repo_git', False)

if script_repo_git and script_repo_alias != '':
repo_to_report = 'https://github.com/' + \
script_repo_alias.replace('@', '/') + '/issues'

if repo_to_report == '':
repo_to_report = 'https://github.com/mlcommons/mlperf-automations/issues'

note = '''
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Please file an issue at {} along with the full MLC command being run and the relevant
or full console log.
'''.format(repo_to_report)

rr = {
'return': 2,
'error': 'MLC script failed (name = {}, return code = {})\n\n{}'.format(
meta['alias'],
rc,
note)}
rr = {
'return': 2,
'error': f"""Native run script failed inside MLC script (name = {meta['alias']}, return code = {rc})\n\n{note}"""
}

if repro_prefix != '':
dump_repro(repro_prefix, rr, run_state)
if repro_prefix != '':
dump_repro(repro_prefix, rr, run_state)

return rr
return rr
else:
logger.warn(
f"""Native run script failed inside MLC script (name = {meta['alias']}, return code = {rc}. Ignoring as ignore_script_error is set.)\n""")

# Load updated state if exists
if tmp_file_run_state != '' and os.path.isfile(tmp_file_run_state):
Expand Down
130 changes: 130 additions & 0 deletions script/app-mlperf-automotive-mlcommons-python/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
# README for app-mlperf-automotive-mlcommons-python
This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution.

`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do
```
mkdir /mnt/$USER/MLC
ln -s /mnt/$USER/MLC $HOME/MLC
```
You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot.

## Setup

If you are not on a Python development environment please refer to the [official docs](https://docs.mlcommons.org/mlcflow/install/) for the installation.

```bash
python3 -m venv mlcflow
. mlcflow/bin/activate
pip install mlcflow
```

- Using a virtual environment is recommended (per `pip` best practices), but you may skip it or use `--break-system-packages` if needed.

### Pull mlperf-automations

Once `mlcflow` is installed:

```bash
mlc pull repo mlcommons@mlperf-automations --pat=<Your Private Access Token>
```
- `--pat` or `--ssh` is only needed if the repo is PRIVATE
- If `--pat` is avoided, you'll be asked to enter the password where you can enter your Private Access Token
- `--ssh` option can be used instead of `--pat=<>` option if you prefer to use SSH for accessing the github repository.
## Run Commands

```bash
mlcr automotive,mlcommons,reference,run-mlperf-inference,object-detection,abtf-model,demo
```

### Script Inputs

| Name | Description | Choices | Default |
|------|-------------|---------|------|
| `--device` | | | `` |
| `--count` | | | `` |
| `--docker` | | | `` |
| `--hw_name` | | | `` |
| `--imagenet_path` | | | `` |
| `--max_batchsize` | | | `` |
| `--mode` | | | `accuracy` |
| `--num_threads` | | | `` |
| `--threads` | Alias for num_threads | | `` |
| `--dataset` | | | `` |
| `--model` | | | `` |
| `--output_dir` | | | `` |
| `--power` | | | `` |
| `--power_server` | | | `` |
| `--ntp_server` | | | `` |
| `--max_amps` | | | `` |
| `--max_volts` | | | `` |
| `--regenerate_files` | | | `` |
| `--rerun` | | | `` |
| `--scenario` | | | `Offline` |
| `--test_query_count` | | | `10` |
| `--clean` | | | `` |
| `--dataset_args` | | | `` |
| `--target_qps` | | | `` |
| `--target_latency` | | | `` |
| `--offline_target_qps` | | | `` |
| `--server_target_qps` | | | `` |
| `--constantstream_target_qps` | | | `` |
| `--singlestream_target_latency` | | | `` |
| `--multistream_target_latency` | | | `` |
| `--output` | | | `` |
### Generic Script Inputs

| Name | Description | Choices | Default |
|------|-------------|---------|------|
| `--input` | Input to the script passed using the env key `MLC_INPUT` | | `` |
| `--output` | Output from the script passed using the env key `MLC_OUTPUT` | | `` |
| `--outdirname` | The directory to store the script output | | `cache directory ($HOME/MLC/repos/local/cache/<>) if the script is cacheable or else the current directory` |
| `--outbasename` | The output file/folder name | | `` |
| `--name` | | | `` |
| `--extra_cache_tags` | Extra cache tags to be added to the cached entry when the script results are saved | | `` |
| `--skip_compile` | Skip compilation | | `False` |
| `--skip_run` | Skip run | | `False` |
| `--accept_license` | Accept the required license requirement to run the script | | `False` |
| `--skip_system_deps` | Skip installing any system dependencies | | `False` |
| `--git_ssh` | Use SSH for git repos | | `False` |
| `--gh_token` | Github Token | | `` |
| `--hf_token` | Huggingface Token | | `` |
| `--verify_ssl` | Verify SSL | | `False` |
## Variations

### Batch-size

- `batch_size.#` _(# can be substituted dynamically)_

### Device

- `cpu` (default)
- `cuda`

### Framework

- `onnxruntime`
- `pytorch` (default)

### Implementation

- `python` (default)

### Loadgen-scenario

- `constantstream`
- `singlestream`

### Models

- `abtf-demo-model`
- `abtf-poc-model` (default)
- `bevformer`
- `deeplabv3plus`
- `ssd`

### Ungrouped

- `multistream`
- `mvp_demo`
- `offline`
- `server`
4 changes: 0 additions & 4 deletions script/app-mlperf-automotive-mlcommons-python/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -414,8 +414,6 @@ variations:
- tags: get,generic-python-lib,_pycocotools
- tags: get,generic-python-lib,_package.torchmetrics
- tags: get,generic-python-lib,_package.faster-coco-eval
version_max: "1.5.7"
version_max_usable: "1.5.7"
names:
- cocoeval
- tags: get,dataset,raw,mlcommons-cognata
Expand All @@ -441,8 +439,6 @@ variations:
- tags: get,generic-python-lib,_pycocotools
- tags: get,generic-python-lib,_package.torchmetrics
- tags: get,generic-python-lib,_package.faster-coco-eval
version_max: "1.5.7"
version_max_usable: "1.5.7"
names:
- cocoeval
- tags: get,dataset,raw,mlcommons-cognata,_abtf-poc
Expand Down
2 changes: 1 addition & 1 deletion script/get-aocc/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def preprocess(i):
def detect_version(i):
logger = i['automation'].logger

r = i['automation'].parse_version({'match_text': r'CLANG:\s(?:AOCC_)?([\d.]+(?:-[\w#]+)*(?:-Build#\d+)?|Unknown-Revision)(?=[ )])',
r = i['automation'].parse_version({'match_text': r'CLANG:\s(?:AOCC_)?([\d.]+(?:pre)?(?:-[\w#]+)*(?:-Build#\d+)?|Unknown-Revision)(?=[ )])',
'group_number': 1,
'env_key': 'MLC_AOCC_VERSION',
'which_env': i['env']})
Expand Down
Loading