Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
179 changes: 178 additions & 1 deletion .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,177 @@ jobs:
ci_stats.html
*results.csv

power-stats:
needs: [gen-config]
strategy:
matrix:
board: ${{ fromJson(needs.gen-config.outputs.ci-boards) }}
runs-on: ubuntu-22.04
permissions:
actions: read
steps:
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'

- name: Install dependencies
run: |
npm install adm-zip csv-parse csv-stringify
pip install plotly pandas

- name: Checkout
uses: actions/checkout@v4
with:
path: tt-zephyr-platforms

- name: Download Past Power Recordings
uses: actions/github-script@v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const AdmZip = require('adm-zip');
const fs = require('fs');
const { parse } = require('csv-parse/sync');
const { stringify } = require('csv-stringify/sync');

// Aggregate power data by workflow run
const powerResults = {};
const artifacts = await github.rest.actions.listArtifactsForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
name: 'Power recording (${{ matrix.board }})',
});
console.log(`Total artifacts found: ${artifacts.data.artifacts.length}`);

for (const artifact of artifacts.data.artifacts) {
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
archive_format: 'zip'
});

const fname = `${artifact.name}-${artifact.id}-${artifact.workflow_run?.head_sha || 'unknown'}.zip`;
fs.writeFileSync(fname, Buffer.from(download.data));

const zip = new AdmZip(fname);
const zipEntries = zip.getEntries();

zipEntries.forEach(function (zipEntry) {
if (zipEntry.name === "power_recording.csv") {
console.log(`Processing ${zipEntry.entryName} from artifact ${artifact.name} (${artifact.id})`);
const content = zipEntry.getData().toString('utf8');

// Parse CSV content
const records = parse(content, {
columns: true,
trim: true
});

// Filter out N/A values and calculate statistics
const powerValues = records
.map(record => {
const power = record.power_watts;
if (power && power !== 'N/A') {
const numPower = parseFloat(power);
if (!isNaN(numPower)) {
return numPower;
}
}
return null;
})
.filter(val => val !== null);

if (powerValues.length === 0) {
console.log(`No valid power values found in ${zipEntry.entryName}`);
return;
}

const workflowRunId = artifact.workflow_run?.id;
if (!workflowRunId) {
console.log(`Skipping artifact ${artifact.id}, no workflow_run`);
return;
}

if (artifact.workflow_run.head_branch != 'main') {
console.log(`Skipping artifact ${artifact.id}, branch ${artifact.workflow_run.head_branch}`);
return;
}

// Calculate aggregate statistics
const avgPower = powerValues.reduce((a, b) => a + b, 0) / powerValues.length;
const maxPower = Math.max(...powerValues);
const minPower = Math.min(...powerValues);

if (!powerResults[workflowRunId]) {
powerResults[workflowRunId] = {
commit: artifact.workflow_run.head_sha,
branch: artifact.workflow_run.head_branch,
timestamp: artifact.created_at,
workflow_run_url: `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${workflowRunId}`,
avg_power: avgPower,
max_power: maxPower,
min_power: minPower,
sample_count: powerValues.length
};
} else {
// If multiple recordings for same run, aggregate them
const existing = powerResults[workflowRunId];
const totalSamples = existing.sample_count + powerValues.length;
existing.avg_power = (existing.avg_power * existing.sample_count + avgPower * powerValues.length) / totalSamples;
existing.max_power = Math.max(existing.max_power, maxPower);
existing.min_power = Math.min(existing.min_power, minPower);
existing.sample_count = totalSamples;
}
}
});

// Clean up zip file
fs.unlinkSync(fname);
}

// Create CSV file with aggregated power data
const csvData = [
['Average Power (W)', 'Max Power (W)', 'Min Power (W)', 'Sample Count', 'Commit', 'Branch', 'Workflow Run URL', 'Timestamp']
];

// Flatten the results into an array, sorted by timestamp
Object.values(powerResults)
.sort((a, b) => new Date(a.timestamp) - new Date(b.timestamp))
.forEach(result => {
csvData.push([
result.avg_power.toFixed(2),
result.max_power.toFixed(2),
result.min_power.toFixed(2),
result.sample_count,
result.commit,
result.branch,
result.workflow_run_url,
result.timestamp
]);
});

// Write CSV file
const csvContent = stringify(csvData);
fs.writeFileSync('power results.csv', csvContent);
console.log(`Created power results.csv with ${csvData.length - 1} workflow runs`);

- name: Generate Power Graph
run: |
python3 tt-zephyr-platforms/scripts/ci/render_power_graph.py . power_stats.html ${{ matrix.board }}

- name: Upload Power Graph
uses: actions/upload-artifact@v4
with:
name: power-graph-summaries ${{ matrix.board }}
path: |
power_stats.html
power results.csv

build:
runs-on: ubuntu-22.04
needs: [ci-stats, gen-config]
needs: [ci-stats, power-stats, gen-config]
steps:
- name: Checkout
uses: actions/checkout@v4
Expand Down Expand Up @@ -224,6 +392,12 @@ jobs:
path: ci-results
pattern: test-result-summaries*

- name: Download Power Results
uses: actions/download-artifact@v4
with:
path: power-results
pattern: power-graph-summaries*

- name: Copy CI Results
run: |
CI_BOARDS="$(jq -r -c ".[]" <<< '${{ needs.gen-config.outputs.ci-boards }}')"
Expand All @@ -234,6 +408,9 @@ jobs:
for board in ${CI_BOARD_REVS[@]}; do
cp "ci-results/test-result-summaries ${board}/ci_stats.html" \
tt-zephyr-platforms/doc/deploy/${board}_ci_stats.html

cp "power-results/power-graph-summaries ${board}/power_stats.html" \
tt-zephyr-platforms/doc/deploy/${board}_power_stats.html
done

- name: Setup pages
Expand Down
62 changes: 61 additions & 1 deletion .github/workflows/metal.yml
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,69 @@ jobs:
# To verify patch was applied
git diff

- name: Run Container Test
- name: Install power monitoring dependencies
run: |
sudo apt-get update && sudo apt-get install -y lm-sensors
python -m venv .env
source .env/bin/activate
pip install pandas plotly

- name: Download power monitoring script
run: |
mkdir -p $HOME/scripts/ci
curl -o $HOME/scripts/ci/monitor_power.py https://raw.githubusercontent.com/tenstorrent/tt-zephyr-platforms/${{ github.sha }}/scripts/ci/monitor_power.py
chmod +x $HOME/scripts/ci/monitor_power.py

- name: Re-probe driver for sensors
run: |
# Re-probe the driver to ensure sensors command works correctly
# This is needed due to driver implementation requirements
sudo modprobe -r tenstorrent 2>/dev/null || true
sudo modprobe tenstorrent 2>/dev/null || true

- name: Run Container Test with Power Monitoring
run: |
# Start power monitoring in the background with full path
source .env/bin/activate
python $HOME/scripts/ci/monitor_power.py -o $HOME/power_recording.csv &
MONITOR_PID=$!
echo "Started power monitoring with PID: $MONITOR_PID"
echo "Power recording will be saved to: $HOME/power_recording.csv"

# Run the actual test
sh -c "dockerfile/upstream_test_images/run_upstream_tests_vanilla.sh ${{ matrix.config.metal-target }}"
TEST_EXIT_CODE=$?

# Stop power monitoring gracefully
if kill -0 $MONITOR_PID 2>/dev/null; then
echo "Stopping power monitoring (PID: $MONITOR_PID)..."
kill -SIGINT $MONITOR_PID 2>/dev/null || kill $MONITOR_PID 2>/dev/null || true
wait $MONITOR_PID 2>/dev/null || true
echo "Power monitoring stopped"
else
echo "Power monitoring already stopped"
fi

# Check if the power recording file was created
if [ -f "$HOME/power_recording.csv" ]; then
echo "Power recording file created successfully"
ls -la $HOME/power_recording.csv
else
echo "Warning: Power recording file not found at $HOME/power_recording.csv"
fi

# Exit with the test exit code
exit $TEST_EXIT_CODE

- name: Upload Power Recording
if: ${{ always() }}
uses: actions/upload-artifact@v4
with:
name: Power recording (${{ matrix.config.board }})
path: ${{ env.HOME }}/power_recording.csv
retention-days: 30
if-no-files-found: warn

- name: cleanup
if: ${{ always() }}
run: |
Expand Down
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@
[P150A CI History](https://docs.tenstorrent.com/tt-zephyr-platforms/p150a_ci_stats.html)
[P300A CI History](https://docs.tenstorrent.com/tt-zephyr-platforms/p300a_ci_stats.html)

[P100A Power History](https://docs.tenstorrent.com/tt-zephyr-platforms/p100a_power_stats.html)
[P150A Power History](https://docs.tenstorrent.com/tt-zephyr-platforms/p150a_power_stats.html)
[P300A Power History](https://docs.tenstorrent.com/tt-zephyr-platforms/p300a_power_stats.html)

Welcome to TT-Zephyr-Platforms!

This is the Zephyr firmware repository for [Tenstorrent](https://tenstorrent.com) AI ULC.
Expand Down
Loading
Loading