Skip to content
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
20eb9b4
initial file
jeremy-jung1 Jul 2, 2025
7b349b6
Uncomment global.performance
jeremy-jung1 Jul 2, 2025
07b1206
Merge branch 'W-18759774-opentelemetry-util' into W-18760780-performa…
jeremy-jung1 Jul 2, 2025
3326746
Some comments and namespacing
jeremy-jung1 Jul 7, 2025
051ea73
linting
jeremy-jung1 Jul 7, 2025
f801b5b
Some dev only flags and mods
jeremy-jung1 Jul 8, 2025
ce666ec
more info
jeremy-jung1 Jul 8, 2025
ef27e9c
More dev
jeremy-jung1 Jul 8, 2025
02cbb52
Update performance.test.js
jeremy-jung1 Jul 8, 2025
bb36629
remove some console logs
jeremy-jung1 Jul 8, 2025
1e905cf
Updates
jeremy-jung1 Jul 8, 2025
0dd809d
Update performance.test.js
jeremy-jung1 Jul 8, 2025
1e586d9
Dev mod
jeremy-jung1 Jul 8, 2025
48ff1b9
Add tests and more functionality
jeremy-jung1 Jul 9, 2025
299039c
Merge branch 'W-18759774-opentelemetry-util' into W-18760780-performa…
jeremy-jung1 Jul 9, 2025
dfb8812
Fix tests and add cleanup
jeremy-jung1 Jul 10, 2025
20b8c96
linting
jeremy-jung1 Jul 10, 2025
1d9a905
fix tests
jeremy-jung1 Jul 10, 2025
fc1f8f5
Merge branch 'W-18759774-opentelemetry-util' into W-18760780-performa…
jeremy-jung1 Jul 10, 2025
7171b8f
remove dev only changes and add more tests
jeremy-jung1 Jul 10, 2025
943a40c
Remove handling of detail being an object
jeremy-jung1 Jul 10, 2025
4072966
Some temporary? changes to get it to work
jeremy-jung1 Jul 10, 2025
6c5430d
Update config
jeremy-jung1 Jul 10, 2025
24b37e0
bundlesize increase
jeremy-jung1 Jul 11, 2025
6c301c5
Update CHANGELOG.md
jeremy-jung1 Jul 11, 2025
5a0f754
Update react-rendering.js
jeremy-jung1 Jul 11, 2025
e8dc896
Apply feedback
jeremy-jung1 Jul 14, 2025
894549b
Apply feedback
jeremy-jung1 Jul 16, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/pwa-kit-react-sdk/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
- Fix the performance logging so that it'll capture all SSR queries, even those that result in errors [#2486](https://github.com/SalesforceCommerceCloud/pwa-kit/pull/2486)
- Created an opentelemetry server for SSR tracer intialization [#2617](https://github.com/SalesforceCommerceCloud/pwa-kit/pull/2617)
- Created opentelemetry.js file with utility functions to log OTel spans and metrics [#2705] (https://github.com/SalesforceCommerceCloud/pwa-kit/pull/2705)
- Update performance.js with usage of opentelemetry spans and add cleanups [#2722](https://github.com/SalesforceCommerceCloud/pwa-kit/pull/2722)

## v3.10.0 (May 22, 2025)
- Fix the performance logging util to use the correct delimiter for the server-timing header. [#2225](https://github.com/SalesforceCommerceCloud/pwa-kit/pull/2295)
Expand Down
23 changes: 22 additions & 1 deletion packages/pwa-kit-react-sdk/setup-jest.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,25 @@ jest.mock('@salesforce/pwa-kit-runtime/utils/ssr-config', () => {
// The global performance object is available in production
// environments for both the server and the client.
// It's just the jest environment that this is not available
global.performance = performance
if (global.performance) {
// global.performance exists but is missing methods from perf_hooks in jest
if (typeof performance.mark === 'function' && !global.performance.mark) {
global.performance.mark = performance.mark.bind(performance)
}
if (typeof performance.measure === 'function' && !global.performance.measure) {
global.performance.measure = performance.measure.bind(performance)
}
if (typeof performance.clearMarks === 'function' && !global.performance.clearMarks) {
global.performance.clearMarks = performance.clearMarks.bind(performance)
}
if (typeof performance.clearMeasures === 'function' && !global.performance.clearMeasures) {
global.performance.clearMeasures = performance.clearMeasures.bind(performance)
}
} else {
global.performance = performance
}

// This is needed for spans created in jest environment to not be no-op spans, otherwise the spans will lack attributes
import {BasicTracerProvider} from '@opentelemetry/sdk-trace-base'
const provider = new BasicTracerProvider()
provider.register()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Following the steps to test the changes I see in the logs we have a few "No parent span found" warnings and it looks like each span is using a different traceId instead of all spans sharing the same traceId.

...
pwa-kit-react-sdk.opentelemetry.logSpanData INFO OpenTelemetry span data {"traceId":"d8acdf23f3344d04d9da11e4dc6a6fed","name":"ssr.render-to-string","id":"e0a43b64a9e330bd","kind":0,"timestamp":"2025-07-16T02:09:20.417000000Z","duration":0,"attributes":{"service.name":"pwa-kit-react-sdk","performance_type":"start","performance.mark":"ssr.render-to-string","performance.type":"start","performance.detail":"","event":"start"},"status":{"code":0},"events":[],"links":[],"start_time":[1752631760,417000000],"end_time":[1752631760,417000000],"forwardTrace":false}
pwa-kit-react-sdk.opentelemetry.logSpanData INFO OpenTelemetry span data {"traceId":"d8acdf23f3344d04d9da11e4dc6a6fed","name":"ssr.render-to-string","id":"e0a43b64a9e330bd","kind":0,"timestamp":"2025-07-16T02:09:20.417000000Z","duration":68.30075,"attributes":{"service.name":"pwa-kit-react-sdk","performance_type":"start","performance.mark":"ssr.render-to-string","performance.type":"start","performance.detail":"","event":"end"},"status":{"code":1},"events":[],"links":[],"start_time":[1752631760,417000000],"end_time":[1752631760,485300750],"forwardTrace":false}
pwa-kit-react-sdk.opentelemetry.logSpanData INFO OpenTelemetry span data {"traceId":"714bd3edf17e826c55ed0dabb8f2cc38","name":"ssr.total","id":"31e2c7b3b61ff8f1","kind":0,"timestamp":"2025-07-16T02:09:19.220000000Z","duration":1266.033834,"attributes":{"service.name":"pwa-kit-react-sdk","performance_type":"start","performance.mark":"ssr.total","performance.type":"start","performance.detail":"","event":"end"},"status":{"code":1},"events":[],"links":[],"start_time":[1752631759,220000000],"end_time":[1752631760,486033834],"forwardTrace":false}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.route-matching"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.load-component"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.fetch-strategies.react-query.pre-render"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.fetch-strategies.react-query.use-query.1"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.fetch-strategies.react-query.use-query.useCategory-0"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.fetch-strategies.react-query.use-query.useProductSearch-2"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.fetch-strategies"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.render-to-string"}
pwa-kit-react-sdk.opentelemetry WARN No parent span found in context {"metricName":"ssr.total"}

Are we missing wrapping the render function with a root span? Is this something we plan to do in a separated ticket?
https://github.com/SalesforceCommerceCloud/pwa-kit/blob/W-18129479-spike-o11y/packages/pwa-kit-react-sdk/src/ssr/server/react-rendering.js#L136

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,11 @@ export const render = async (req, res, next) => {
// Here, we use Express's convention to invoke error middleware.
// Note, we don't have an error handling middleware yet! This is calling the
// default error handling middleware provided by Express

if (res.__performanceTimer) {
res.__performanceTimer.cleanup()
}

return next(e)
}

Expand All @@ -244,6 +249,8 @@ export const render = async (req, res, next) => {
res.set('Cache-Control', NO_CACHE)
}

res.__performanceTimer.cleanup()

if (redirectUrl) {
res.redirect(routerContext.status || 302, redirectUrl)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -739,6 +739,50 @@ describe('The Node SSR Environment', () => {
'max-age=0, nocache, nostore, must-revalidate'
)
}
},
{
description: `Performance timer cleanup is called during rendering`,
req: {url: '/pwa/', query: {__server_timing: '1'}},
mocks: () => {
// Mock PerformanceTimer to spy on cleanup
const PerformanceTimer = jest.requireActual('../../utils/performance').default
const originalCleanup = PerformanceTimer.prototype.cleanup
const cleanupSpy = jest.fn(originalCleanup)
PerformanceTimer.prototype.cleanup = cleanupSpy

// Store the spy for assertions
global.performanceCleanupSpy = cleanupSpy
},
assertions: (res) => {
expect(res.statusCode).toBe(200)
expect(global.performanceCleanupSpy).toHaveBeenCalled()
expect(global.performanceCleanupSpy).toHaveBeenCalledTimes(1)

// Clean up global
delete global.performanceCleanupSpy
}
},
{
description: `Performance timer cleanup is called even when rendering throws an error`,
req: {url: '/unknown-error/'}, // This URL causes an error
mocks: () => {
// Mock PerformanceTimer to spy on cleanup
const PerformanceTimer = jest.requireActual('../../utils/performance').default
const originalCleanup = PerformanceTimer.prototype.cleanup
const cleanupSpy = jest.fn(originalCleanup)
PerformanceTimer.prototype.cleanup = cleanupSpy

// Store the spy for assertions
global.performanceCleanupSpyError = cleanupSpy
},
assertions: (res) => {
expect(res.statusCode).toBe(500)
expect(global.performanceCleanupSpyError).toHaveBeenCalled()
expect(global.performanceCleanupSpyError).toHaveBeenCalledTimes(1)

// Clean up global
delete global.performanceCleanupSpyError
}
}
]

Expand Down
1 change: 0 additions & 1 deletion packages/pwa-kit-react-sdk/src/utils/opentelemetry.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ const logSpanData = (span, event = 'start', res = null) => {
const startTime = span.startTime
const endTime = event === 'start' ? startTime : span.endTime
const duration = event === 'start' ? 0 : hrTimeToMilliseconds(span.duration)

// Create the span data object that matches the expected format
const spanData = {
traceId: spanContext.traceId,
Expand Down
182 changes: 141 additions & 41 deletions packages/pwa-kit-react-sdk/src/utils/performance.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
import logger from './logger-instance'
import {createChildSpan, endSpan, logPerformanceMetric} from './opentelemetry'

export const PERFORMANCE_MARKS = {
total: 'ssr.total',
renderToString: 'ssr.render-to-string',
Expand Down Expand Up @@ -32,11 +34,10 @@ export default class PerformanceTimer {
}
constructor(options = {}) {
this.enabled = options.enabled || false
this.marks = {
start: new Map(),
end: new Map()
}
this.metrics = []
this.spans = new Map()
this.spanTimeouts = new Map()
this.maxSpanDuration = options.maxSpanDuration || 30000 // 30 seconds default
}

/**
Expand All @@ -61,15 +62,13 @@ export default class PerformanceTimer {
}

/**
* A utility function to format and log the performance metrics.
*
* @function
* @private
* Logs all performance metrics
*/
log() {
// Log each metric once with the standardized format
this.metrics.forEach((metric) => {
logger.info(`${metric.name} - ${metric.duration}ms ${metric.detail || ''}`, {
namespace: 'performance'
logPerformanceMetric(metric.name, metric.duration, {
'performance.detail': metric.detail || ''
})
})
}
Expand All @@ -78,50 +77,151 @@ export default class PerformanceTimer {
* This is a utility function to create performance marks.
* The data will be used in console logs and the http response header `server-timing`.
*
* @function
* @private
* @param {string} name - Unique identifier for the performance measurement.
* Must be the same for both start and end marks of a pair. E.g. 'ssr.render-to-string'
*
* @param {string} type - Mark type, either 'start' or 'end'. 'start' creates spans and browser marks,
* 'end' completes measurement and cleanup.
*
* @param {Object} [options={}] - Optional configuration object
* @param {string|Object} [options.detail=''] - Additional metadata for the mark
* included in logs and tracing attributes.
*/
mark(name, type, options = {}) {
if (!this.enabled) {
const {detail = ''} = options
if (!name || !type || !this.enabled) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we keep checking if enabled first, then validate inputs?

Suggested change
if (!name || !type || !this.enabled) {
if (!this.enabled || !name || !type) {

return
}

if (!name) {
logger.warn('Performance mark cannot be created because the name is undefined.', {
namespace: 'performance'
})
if (type !== this.MARKER_TYPES.START && type !== this.MARKER_TYPES.END) {
logger.warn('Invalid mark type', {type, name, namespace: 'PerformanceTimer.mark'})
return
}

if (type !== this.MARKER_TYPES.START && type !== this.MARKER_TYPES.END) {
logger.warn(
'Performance mark cannot be created because the type must be either "start" or "end".',
{
namespace: 'performance'
try {
// Format detail as a string if it's an object
const formattedDetail = typeof detail === 'object' ? JSON.stringify(detail) : detail

performance.mark(`${name}.${type}`, {
detail: formattedDetail
})

// Only create spans for 'start' events and store them for later use
if (type === this.MARKER_TYPES.START) {
if (!this.spans.has(name)) {
const span = createChildSpan(name, {
performance_mark: name,
performance_type: type,
performance_detail: formattedDetail
})
if (span) {
this.spans.set(name, span)

// Set up automatic cleanup for orphaned spans
const timeoutId = setTimeout(() => {
this._cleanupOrphanedSpan(name, 'timeout')
}, this.maxSpanDuration)
this.spanTimeouts.set(name, timeoutId)
}
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we log a warning in case we attempt to create a duplicate span name?

)
return
}
} else if (type === this.MARKER_TYPES.END) {
const startMark = `${name}.${this.MARKER_TYPES.START}`
const endMark = `${name}.${this.MARKER_TYPES.END}`

const timestamp = performance.now()
const isEnd = type === this.MARKER_TYPES.END
const storage = isEnd ? this.marks.end : this.marks.start
storage.set(name, {
name,
timestamp,
detail: options.detail
})
try {
const measure = performance.measure(name, startMark, endMark)

if (isEnd) {
const startMark = this.marks.start.get(name)
if (startMark) {
const measurement = {
name,
duration: timestamp - startMark.timestamp,
detail: options.detail
// Add the metric to the metrics array for Server-Timing header
this.metrics.push({
name,
duration: measure.duration,
detail: formattedDetail
})

// End the corresponding span if it exists and clear timeout
const span = this.spans.get(name)
if (span) {
endSpan(span)
this.spans.delete(name)

// Clear the timeout since span completed normally
const timeoutId = this.spanTimeouts.get(name)
if (timeoutId) {
clearTimeout(timeoutId)
this.spanTimeouts.delete(name)
}
}

// Clear the marks
performance.clearMarks(startMark)
performance.clearMarks(endMark)
performance.clearMeasures(name)
} catch (error) {
logger.warn('Failed to measure performance mark', {
name,
error: error.message,
startMark,
endMark,
namespace: 'PerformanceTimer.mark'
})
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we also call cleanup in case of catching an error?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The timed out automatic cleanup made for each span on creation would clean them up eventually. Even when endSpan logs an error within itself.

this.metrics.push(measurement)
}
} catch (error) {
if (error.name === 'SyntaxError') {
logger.warn('Invalid performance mark name', {name, error: error.message})
} else {
logger.error('Error creating performance mark', {
name,
type,
error: error.message,
stack: error.stack,
namespace: 'PerformanceTimer.mark'
})
}
}
}

/**
* Helper method to clean up a specific orphaned span
* @private
*/
_cleanupOrphanedSpan(name, reason = 'manual') {
const span = this.spans.get(name)
if (span) {
logger.warn('Cleaning up orphaned span', {
name,
error: 'Deleting orphaned span (reason: ' + reason + ' cleanup)',
namespace: 'PerformanceTimer._cleanupOrphanedSpan'
})
endSpan(span)
this.spans.delete(name)
}

// Clear the timeout
const timeoutId = this.spanTimeouts.get(name)
if (timeoutId) {
clearTimeout(timeoutId)
this.spanTimeouts.delete(name)
}
}

/**
* Clean up all orphaned spans and clear all timeouts
* Call this when the timer is no longer needed or when you want to force cleanup
*/
cleanup() {
// Clean up any orphaned spans
this.spans.forEach((span, name) => {
this._cleanupOrphanedSpan(name, 'manual_cleanup')
})

// Clear any remaining timeouts
this.spanTimeouts.forEach((timeoutId) => {
clearTimeout(timeoutId)
})
this.spanTimeouts.clear()

// Clear metrics as well
this.metrics = []
}
}
Loading
Loading