MLPerf Inference Intel implementations #41
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: MLPerf Inference Intel implementations | |
| on: | |
| schedule: | |
| - cron: "29 1 * * 4" #to be adjusted | |
| jobs: | |
| run_intel: | |
| if: github.repository_owner == 'gateoverflow' | |
| runs-on: [ self-hosted, linux, x64, GO-spr ] | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| python-version: [ "3.12" ] | |
| model: [ "resnet50", "bert-99" ] | |
| steps: | |
| - name: Test MLPerf Inference Intel ${{ matrix.model }} | |
| run: | | |
| if [ -f "gh_action_conda/bin/deactivate" ]; then source gh_action_conda/bin/deactivate; fi | |
| python3 -m venv gh_action_conda | |
| source gh_action_conda/bin/activate | |
| export MLC_REPOS=$HOME/GH_MLC | |
| pip install --upgrade mlc-scripts | |
| pip install tabulate | |
| mlcr run-mlperf,inference,_all-scenarios,_submission,_full,_r4.1-dev --preprocess_submission=yes --execution_mode=valid --pull_changes=yes --pull_inference_changes=yes --model=${{ matrix.model }} --submitter="MLCommons" --hw_name=IntelSPR.24c --implementation=intel --backend=pytorch --category=datacenter --division=open --scenario=Offline --docker_dt=yes --docker_it=no --docker_mlc_repo=mlcommons@mlperf-automations --docker_mlc_repo_branch=dev --adr.compiler.tags=gcc --device=cpu --use_dataset_from_host=yes --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean --docker --quiet | |
| mlcr push,github,mlperf,inference,submission --repo_url=https://github.com/mlcommons/mlperf_inference_unofficial_submissions_v5.0 --repo_branch=auto-update --commit_message="Results from GH action on SPR.24c" --quiet --submission_dir=$HOME/gh_action_submissions --hw_name=IntelSPR.24c |