diff --git a/include/perfetto/trace_processor/trace_processor.h b/include/perfetto/trace_processor/trace_processor.h index f5d973430d..00b1ec2da6 100644 --- a/include/perfetto/trace_processor/trace_processor.h +++ b/include/perfetto/trace_processor/trace_processor.h @@ -250,6 +250,16 @@ class PERFETTO_EXPORT_COMPONENT TraceProcessor : public TraceProcessorStorage { // materialization of structured queries. On success, |out| is populated with // the new instance and ownership is transferred to the caller. virtual base::Status CreateSummarizer(std::unique_ptr* out) = 0; + + // EXPERIMENTAL: Converts a proto from binary format to textproto format. + // `proto_type` is the fully qualified proto type name (e.g., + // ".perfetto.protos.TraceSummarySpec"). + // `proto_bytes` contains the binary proto data. + // The result is written to `output`. + virtual base::Status ProtoToText(const std::string& proto_type, + const uint8_t* proto_bytes, + size_t proto_size, + std::string* output) = 0; }; } // namespace perfetto::trace_processor diff --git a/protos/perfetto/trace_processor/trace_processor.proto b/protos/perfetto/trace_processor/trace_processor.proto index fe41e900e8..12b3915166 100644 --- a/protos/perfetto/trace_processor/trace_processor.proto +++ b/protos/perfetto/trace_processor/trace_processor.proto @@ -19,8 +19,10 @@ syntax = "proto2"; package perfetto.protos; import "protos/perfetto/common/descriptor.proto"; +import "protos/perfetto/perfetto_sql/structured_query.proto"; import "protos/perfetto/trace_processor/metatrace_categories.proto"; import "protos/perfetto/trace_summary/file.proto"; +import "protos/perfetto/trace_summary/v2_metric.proto"; // This file defines the schema for {,un}marshalling arguments and return values // when interfacing to the trace processor binary interface. @@ -114,6 +116,7 @@ message TraceProcessorRpc { TPM_UPDATE_SUMMARIZER_SPEC = 17; TPM_QUERY_SUMMARIZER = 18; TPM_DESTROY_SUMMARIZER = 19; + TPM_PROTO_CONTENT = 20; } oneof type { @@ -157,6 +160,8 @@ message TraceProcessorRpc { QuerySummarizerArgs query_summarizer_args = 113; // For TPM_DESTROY_SUMMARIZER. DestroySummarizerArgs destroy_summarizer_args = 114; + // For TPM_PROTO_CONTENT. + ProtoContentArgs proto_content_args = 115; // TraceProcessorMethod response args. // For TPM_APPEND_TRACE_DATA. @@ -185,6 +190,8 @@ message TraceProcessorRpc { QuerySummarizerResult query_summarizer_result = 217; // For TPM_DESTROY_SUMMARIZER. DestroySummarizerResult destroy_summarizer_result = 218; + // For TPM_PROTO_CONTENT. + ProtoContentResult proto_content_result = 219; } // Previously: RawQueryArgs for TPM_QUERY_RAW_DEPRECATED @@ -495,3 +502,16 @@ message DestroySummarizerArgs { message DestroySummarizerResult { optional string error = 1; } + +message ProtoContentArgs { + oneof proto { + TraceSummarySpec trace_summary_spec = 1; + PerfettoSqlStructuredQuery structured_query = 2; + TraceMetricV2Spec metric_spec = 3; + } +} + +message ProtoContentResult { + optional string textproto = 1; + optional string error = 2; +} diff --git a/python/perfetto/trace_processor/trace_processor.descriptor b/python/perfetto/trace_processor/trace_processor.descriptor index cbaac82cfd..4a5eb84b36 100644 Binary files a/python/perfetto/trace_processor/trace_processor.descriptor and b/python/perfetto/trace_processor/trace_processor.descriptor differ diff --git a/src/trace_processor/rpc/rpc.cc b/src/trace_processor/rpc/rpc.cc index f68af5fb59..25966cc4de 100644 --- a/src/trace_processor/rpc/rpc.cc +++ b/src/trace_processor/rpc/rpc.cc @@ -478,6 +478,43 @@ void Rpc::ParseRpcRequest(const uint8_t* data, size_t len) { resp.Send(rpc_response_fn_); break; } + case RpcProto::TPM_PROTO_CONTENT: { + Response resp(tx_seq_id_++, req_type); + protozero::ConstBytes args = req.proto_content_args(); + protos::pbzero::ProtoContentArgs::Decoder decoder(args.data, args.size); + + // Determine which proto type was provided and get its bytes + std::string proto_type; + protozero::ConstBytes proto_bytes; + + if (decoder.has_trace_summary_spec()) { + proto_type = ".perfetto.protos.TraceSummarySpec"; + proto_bytes = decoder.trace_summary_spec(); + } else if (decoder.has_structured_query()) { + proto_type = ".perfetto.protos.PerfettoSqlStructuredQuery"; + proto_bytes = decoder.structured_query(); + } else if (decoder.has_metric_spec()) { + proto_type = ".perfetto.protos.TraceMetricV2Spec"; + proto_bytes = decoder.metric_spec(); + } else { + auto* result = resp->set_proto_content_result(); + result->set_error("No proto provided in ProtoContentArgs"); + resp.Send(rpc_response_fn_); + break; + } + + auto* result = resp->set_proto_content_result(); + std::string textproto; + base::Status status = trace_processor_->ProtoToText( + proto_type, proto_bytes.data, proto_bytes.size, &textproto); + if (!status.ok()) { + result->set_error(status.message()); + } else { + result->set_textproto(textproto); + } + resp.Send(rpc_response_fn_); + break; + } default: { // This can legitimately happen if the client is newer. We reply with a // generic "unknown request" response, so the client can do feature diff --git a/src/trace_processor/trace_processor_impl.cc b/src/trace_processor/trace_processor_impl.cc index 1ae09784be..bf2132ddf3 100644 --- a/src/trace_processor/trace_processor_impl.cc +++ b/src/trace_processor/trace_processor_impl.cc @@ -795,6 +795,30 @@ void TraceProcessorImpl::EnableMetatrace(MetatraceConfig config) { // | Experimental | // ================================================================= +base::Status TraceProcessorImpl::ProtoToText(const std::string& proto_type, + const uint8_t* proto_bytes, + size_t proto_size, + std::string* output) { + // Ensure the descriptor is loaded for this proto type + auto opt_idx = metrics_descriptor_pool_.FindDescriptorIdx(proto_type); + if (!opt_idx) { + // Try loading the trace summary descriptor which contains most types we + // need + metrics_descriptor_pool_.AddFromFileDescriptorSet( + kTraceSummaryDescriptor.data(), kTraceSummaryDescriptor.size()); + opt_idx = metrics_descriptor_pool_.FindDescriptorIdx(proto_type); + if (!opt_idx) { + return base::ErrStatus("Unknown proto type: %s", proto_type.c_str()); + } + } + + *output = protozero_to_text::ProtozeroToText( + metrics_descriptor_pool_, proto_type, + protozero::ConstBytes{proto_bytes, proto_size}, + protozero_to_text::kIncludeNewLines); + return base::OkStatus(); +} + namespace { class StringInterner { diff --git a/src/trace_processor/trace_processor_impl.h b/src/trace_processor/trace_processor_impl.h index c0ede78c2a..ed16a32450 100644 --- a/src/trace_processor/trace_processor_impl.h +++ b/src/trace_processor/trace_processor_impl.h @@ -131,12 +131,17 @@ class TraceProcessorImpl : public TraceProcessor, std::vector GetMetricDescriptors() override; - // =================== - // | Summarizer | - // =================== + // ============================ + // | Experimental methods | + // ============================ base::Status CreateSummarizer(std::unique_ptr* out) override; + base::Status ProtoToText(const std::string& proto_type, + const uint8_t* proto_bytes, + size_t proto_size, + std::string* output) override; + private: // Needed for iterators to be able to access the context. friend class IteratorImpl; diff --git a/ui/src/assets/explore_page/node_info/metrics.md b/ui/src/assets/explore_page/node_info/metrics.md new file mode 100644 index 0000000000..dbb0fcbe06 --- /dev/null +++ b/ui/src/assets/explore_page/node_info/metrics.md @@ -0,0 +1,38 @@ +# Metrics + +**Purpose:** Define trace-based metrics from your query results. This node packages your data into a `TraceMetricV2TemplateSpec` proto (metric bundle) that can be exported and used in trace analysis pipelines. The selected value column becomes the metric value, and all other columns become dimensions. + +**How to use:** + +1. **Connect an input:** This node requires a source of data (e.g., from a Table Source or after filtering/aggregating data) + +2. **Set Metric ID Prefix:** Give your metric a unique identifier prefix (e.g., `cpu_metrics`, `memory_usage`). The metric will be named `_`. + +3. **Select a Value Column:** Choose a numeric column (int, double, etc.) that contains the metric value you want to track. Then configure: + - **Unit:** Select the appropriate unit for the metric values (Count, Time, Bytes, Percentage, etc.). Use "Custom" for units not in the predefined list. + - **Polarity:** Indicate whether higher or lower values are "better" + - Higher is Better: e.g., throughput, cache hit rate + - Lower is Better: e.g., latency, error count + - Not Applicable: for metrics where direction doesn't apply + +4. **Configure Dimension Uniqueness:** Specify whether dimension combinations are unique + - Unique: Each combination of dimension values appears at most once + - Not Unique: The same dimension combination may appear multiple times + +**Dimensions:** +All columns in your input **except** the value column automatically become dimensions. Use a Modify Columns node before this one to control which columns are included as dimensions. + +**Export:** +Click the "Export" button to generate a textproto representation of your metric template specification. This can be saved and used in trace analysis pipelines. + +**Example workflow:** +1. Start with a Table Source (e.g., `slice` table) +2. Add Aggregation to compute `SUM(dur)` grouped by `process_name` +3. Add Metrics node: + - Metric ID Prefix: `slice_stats` + - Value column: `sum_dur` with Unit: Time (nanoseconds), Polarity: Not Applicable +4. Export the metric spec + +This creates a metric `slice_stats_sum_dur` with `process_name` as a dimension. + +**Output:** The node passes through input columns unchanged. The metric template specification is generated separately via the Export button. diff --git a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/core_nodes.ts b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/core_nodes.ts index cfe4755b17..9cfd2b0374 100644 --- a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/core_nodes.ts +++ b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/core_nodes.ts @@ -72,6 +72,11 @@ import { CounterToIntervalsNode, CounterToIntervalsNodeState, } from './nodes/counter_to_intervals_node'; +import { + MetricsNode, + MetricsNodeState, + MetricsSerializedState, +} from './nodes/metrics_node'; import {Icons} from '../../../base/semantic_icons'; import {NodeType} from '../query_node'; @@ -556,4 +561,20 @@ export function registerCoreNodes() { sqlModules, }), }); + + nodeRegistry.register('metrics', { + name: 'Metrics', + description: + 'Define a trace-based metric with value column and dimensions.', + icon: 'analytics', + type: 'modification', + nodeType: NodeType.kMetrics, + factory: (state) => new MetricsNode(state as MetricsNodeState), + deserialize: (state, _trace, sqlModules) => + new MetricsNode({ + ...MetricsNode.deserializeState(state as MetricsSerializedState), + sqlModules, + }), + postDeserializeLate: (node) => (node as MetricsNode).onPrevNodesUpdated(), + }); } diff --git a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/nodes/metrics_node.ts b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/nodes/metrics_node.ts new file mode 100644 index 0000000000..3ee68cea04 --- /dev/null +++ b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/nodes/metrics_node.ts @@ -0,0 +1,983 @@ +// Copyright (C) 2026 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import m from 'mithril'; +import { + QueryNode, + QueryNodeState, + nextNodeId, + NodeType, +} from '../../query_node'; +import protos from '../../../../protos'; +import {ColumnInfo, newColumnInfoList} from '../column_info'; +import {NodeIssues} from '../node_issues'; +import {LabeledControl, OutlinedField} from '../widgets'; +import {NodeModifyAttrs, NodeDetailsAttrs} from '../node_explorer_types'; +import {Button, ButtonVariant} from '../../../../widgets/button'; +import {loadNodeDoc} from '../node_doc_loader'; +import { + ColumnName, + NodeDetailsMessage, + NodeDetailsSpacer, + NodeTitle, +} from '../node_styling_widgets'; +import {isNumericType} from '../utils'; +import {showModal} from '../../../../widgets/modal'; +import {CodeSnippet} from '../../../../widgets/code_snippet'; +import { + getStructuredQueries, + buildEmbeddedQueryTree, +} from '../query_builder_utils'; +import {Tabs} from '../../../../widgets/tabs'; +import {SegmentedButtons} from '../../../../widgets/segmented_buttons'; +import {Spinner} from '../../../../widgets/spinner'; +import {Row} from '../../../../trace_processor/query_result'; +import {DataGrid} from '../../../../components/widgets/datagrid/datagrid'; +import {SchemaRegistry} from '../../../../components/widgets/datagrid/datagrid_schema'; + +interface EnumOption { + value: string; + label: string; +} + +// Parsed metric bundle for table display +interface MetricBundle { + metricId: string; + schema: SchemaRegistry; + rows: Row[]; +} + +// Helper to extract dimension value as string +function getDimensionValue( + dim: protos.TraceMetricV2Bundle.Row.IDimension, +): string { + if (dim.stringValue !== undefined && dim.stringValue !== null) { + return dim.stringValue; + } + if (dim.int64Value !== undefined && dim.int64Value !== null) { + return String(dim.int64Value); + } + if (dim.doubleValue !== undefined && dim.doubleValue !== null) { + return String(dim.doubleValue); + } + if (dim.boolValue !== undefined && dim.boolValue !== null) { + return String(dim.boolValue); + } + return 'NULL'; +} + +// Helper to extract value as number or null +function getMetricValue( + val: protos.TraceMetricV2Bundle.Row.IValue, +): number | null { + if (val.doubleValue !== undefined && val.doubleValue !== null) { + return val.doubleValue; + } + return null; +} + +// Extract dimension names from spec (handles both dimensions and dimensionsSpecs) +function getDimensionNamesFromSpec( + spec: protos.ITraceMetricV2Spec | null | undefined, +): string[] { + if (!spec) return []; + + // First check dimensionsSpecs (detailed specs with name field) + if (spec.dimensionsSpecs && spec.dimensionsSpecs.length > 0) { + return spec.dimensionsSpecs + .map((ds) => ds.name) + .filter((name): name is string => name !== null && name !== undefined); + } + + // Fall back to simple dimensions array + return spec.dimensions ?? []; +} + +// Parse TraceSummary proto into MetricBundle array +function parseTraceSummary(data: Uint8Array): MetricBundle[] { + const summary = protos.TraceSummary.decode(data); + const bundles: MetricBundle[] = []; + + for (const bundle of summary.metricBundles) { + // Get all specs to find dimension names and value names + const specs = bundle.specs ?? []; + const firstSpec = specs[0]; + const metricId = firstSpec?.id ?? bundle.bundleId ?? 'unknown'; + const dimensionNames = getDimensionNamesFromSpec(firstSpec); + + // Get value names from all specs (templates can have multiple value columns) + const valueNames = specs + .map((s) => s.value) + .filter((v): v is string => v !== null && v !== undefined); + + // If no value names found, use default + if (valueNames.length === 0) { + valueNames.push('value'); + } + + // Build schema for this metric + const schemaColumns: Record< + string, + {title: string; columnType: 'text' | 'quantitative'} + > = {}; + for (const dimName of dimensionNames) { + schemaColumns[dimName] = {title: dimName, columnType: 'text'}; + } + for (const valueName of valueNames) { + schemaColumns[valueName] = {title: valueName, columnType: 'quantitative'}; + } + + const schema: SchemaRegistry = { + [metricId]: schemaColumns, + }; + + // Convert rows to DataGrid format + const rows: Row[] = []; + for (const row of bundle.row ?? []) { + const rowData: Row = {}; + + // Add dimensions + for (let i = 0; i < dimensionNames.length; i++) { + const dimName = dimensionNames[i]; + const dimValue = row.dimension?.[i]; + rowData[dimName] = dimValue ? getDimensionValue(dimValue) : null; + } + + // Add all values (templates can have multiple value columns) + for (let i = 0; i < valueNames.length; i++) { + const valueName = valueNames[i]; + const val = row.values?.[i]; + rowData[valueName] = val ? getMetricValue(val) : null; + } + + rows.push(rowData); + } + + bundles.push({metricId, schema, rows}); + } + + return bundles; +} + +/** + * Converts an UPPER_SNAKE_CASE enum key to a human-readable label. + * E.g., "TIME_NANOS" -> "Time nanos", "HIGHER_IS_BETTER" -> "Higher is better" + */ +function enumKeyToLabel(key: string): string { + return key + .toLowerCase() + .split('_') + .map((word, i) => + i === 0 ? word.charAt(0).toUpperCase() + word.slice(1) : word, + ) + .join(' '); +} + +/** + * Extracts enum options from a protobuf enum object. + * Filters out UNSPECIFIED values and converts keys to human-readable labels. + */ +function getEnumOptions( + enumObj: Record, + excludePatterns: string[] = ['UNSPECIFIED'], +): EnumOption[] { + const options: EnumOption[] = []; + for (const key of Object.keys(enumObj)) { + // Skip numeric reverse mappings and excluded patterns + if (typeof enumObj[key] !== 'number') continue; + if (excludePatterns.some((pattern) => key.includes(pattern))) continue; + // Skip legacy values + if (key.includes('LEGACY')) continue; + + options.push({ + value: key, + label: enumKeyToLabel(key), + }); + } + return options; +} + +/** + * Returns metric unit options from the proto enum, plus a CUSTOM option. + */ +function getMetricUnitOptions(): EnumOption[] { + const options = getEnumOptions(protos.TraceMetricV2Spec.MetricUnit); + // Add custom unit option at the end + options.push({value: 'CUSTOM', label: 'Custom unit...'}); + return options; +} + +/** + * Returns polarity options from the proto enum. + */ +function getPolarityOptions(): EnumOption[] { + return getEnumOptions(protos.TraceMetricV2Spec.MetricPolarity); +} + +/** + * Returns dimension uniqueness options from the proto enum. + */ +function getDimensionUniquenessOptions(): EnumOption[] { + return getEnumOptions(protos.TraceMetricV2Spec.DimensionUniqueness); +} + +export interface MetricsSerializedState { + metricIdPrefix: string; + valueColumn?: string; + unit: string; + customUnit?: string; + polarity: string; + dimensionUniqueness: string; +} + +export interface MetricsNodeState extends QueryNodeState { + metricIdPrefix: string; + valueColumn?: string; + unit: string; + customUnit?: string; + polarity: string; + dimensionUniqueness: string; + // Available columns from the input (for UI selection) + availableColumns: ColumnInfo[]; +} + +// Modal content for export with results +interface ExportModalContentAttrs { + textproto: string; + metricIdPrefix: string; + bundles: MetricBundle[] | undefined; + isLoading: boolean; + error: string | undefined; +} + +class ExportModalContent implements m.ClassComponent { + private activeTab: string = 'spec'; + private resultViewMode: 'table' | 'json' = 'table'; + private jsonText: string = ''; + + view({attrs}: m.CVnode): m.Children { + const {textproto, metricIdPrefix, bundles, isLoading, error} = attrs; + + // Build tabs + const tabs = [ + { + key: 'spec', + title: 'Spec', + content: m(CodeSnippet, { + text: textproto, + language: 'textproto', + downloadFileName: `${metricIdPrefix || 'metric'}_spec.pbtxt`, + }), + }, + { + key: 'result', + title: 'Result', + content: this.renderResultContent(bundles, isLoading, error), + }, + ]; + + return m( + '.pf-metrics-export-modal', + m(Tabs, { + tabs, + activeTabKey: this.activeTab, + onTabChange: (key) => { + this.activeTab = key; + }, + }), + ); + } + + private renderResultContent( + bundles: MetricBundle[] | undefined, + isLoading: boolean, + error: string | undefined, + ): m.Children { + if (isLoading) { + return m('.pf-metrics-export-result__loading', m(Spinner)); + } + + if (error !== undefined) { + return m('.pf-metrics-export-result__error', `Error: ${error}`); + } + + if (bundles === undefined || bundles.length === 0) { + return m( + '.pf-metrics-export-result__empty', + 'No results. The metric may not have matched any data.', + ); + } + + return m( + '.pf-metrics-export-result', + m( + '.pf-metrics-export-result__header', + m(SegmentedButtons, { + options: [{label: 'Table'}, {label: 'JSON'}], + selectedOption: this.resultViewMode === 'table' ? 0 : 1, + onOptionSelected: (num) => { + this.resultViewMode = num === 0 ? 'table' : 'json'; + }, + }), + ), + this.resultViewMode === 'json' + ? m(CodeSnippet, {language: 'json', text: this.jsonText}) + : m( + '.pf-metrics-export-result__tables', + bundles.map((bundle) => + m( + '.pf-metrics-export-result__bundle', + bundles.length > 1 && + m('.pf-metrics-export-result__bundle-title', bundle.metricId), + m(DataGrid, { + data: bundle.rows, + schema: bundle.schema, + rootSchema: bundle.metricId, + }), + ), + ), + ), + ); + } + + setJsonText(text: string): void { + this.jsonText = text; + } +} + +export class MetricsNode implements QueryNode { + readonly nodeId: string; + readonly type = NodeType.kMetrics; + primaryInput?: QueryNode; + nextNodes: QueryNode[]; + readonly state: MetricsNodeState; + + get finalCols(): ColumnInfo[] { + // Metrics node outputs a TraceMetricV2TemplateSpec, not SQL columns + // Pass through the input columns since the node doesn't modify them + // The actual output is the metric template spec proto + return this.primaryInput?.finalCols ?? []; + } + + constructor(state: MetricsNodeState) { + this.nodeId = nextNodeId(); + this.state = { + ...state, + metricIdPrefix: state.metricIdPrefix ?? '', + valueColumn: state.valueColumn, + unit: state.unit ?? 'COUNT', + customUnit: state.customUnit, + polarity: state.polarity ?? 'NOT_APPLICABLE', + dimensionUniqueness: state.dimensionUniqueness ?? 'NOT_UNIQUE', + availableColumns: state.availableColumns ?? [], + }; + this.nextNodes = []; + } + + /** + * Returns the dimensions for this metric. + * Dimensions are all columns except the value column. + */ + getDimensions(): string[] { + if (this.state.valueColumn === undefined) { + return this.state.availableColumns.map((c) => c.name); + } + return this.state.availableColumns + .filter((c) => c.name !== this.state.valueColumn) + .map((c) => c.name); + } + + onPrevNodesUpdated() { + this.updateAvailableColumns(); + } + + updateAvailableColumns() { + if (this.primaryInput === undefined) { + return; + } + this.state.availableColumns = newColumnInfoList( + this.primaryInput.finalCols ?? [], + false, + ); + + // Validate that selected value column still exists and is numeric + if (this.state.valueColumn !== undefined) { + const valueCol = this.state.availableColumns.find( + (c) => c.name === this.state.valueColumn, + ); + if (valueCol === undefined || !isNumericType(valueCol.type)) { + this.state.valueColumn = undefined; + } + } + } + + validate(): boolean { + // Clear any previous errors at the start of validation + if (this.state.issues) { + this.state.issues.clear(); + } + + if (this.primaryInput === undefined) { + this.setValidationError('No input node connected'); + return false; + } + if (!this.primaryInput.validate()) { + this.setValidationError('Previous node is invalid'); + return false; + } + + // Validate metric ID prefix + if (!this.state.metricIdPrefix || this.state.metricIdPrefix.trim() === '') { + this.setValidationError('Metric ID prefix is required'); + return false; + } + + // Check for custom unit issue + if ( + this.state.valueColumn !== undefined && + this.state.unit === 'CUSTOM' && + (!this.state.customUnit || this.state.customUnit.trim() === '') + ) { + this.setValidationError( + `Custom unit is required for value column '${this.state.valueColumn}'`, + ); + return false; + } + + // Must have a value column + if ( + this.state.valueColumn === undefined || + this.state.valueColumn.trim() === '' + ) { + this.setValidationError('A value column is required'); + return false; + } + + // Check that the value column exists and is numeric + const inputCols = this.primaryInput.finalCols ?? []; + const valueCol = inputCols.find((c) => c.name === this.state.valueColumn); + if (valueCol === undefined) { + this.setValidationError( + `Value column '${this.state.valueColumn}' not found in input`, + ); + return false; + } + if (!isNumericType(valueCol.type)) { + this.setValidationError( + `Value column '${this.state.valueColumn}' must be numeric (got ${valueCol.type})`, + ); + return false; + } + + return true; + } + + private setValidationError(message: string): void { + if (!this.state.issues) { + this.state.issues = new NodeIssues(); + } + this.state.issues.queryError = new Error(message); + } + + getTitle(): string { + return 'Metrics'; + } + + nodeDetails(): NodeDetailsAttrs { + const details: m.Child[] = [NodeTitle(this.getTitle())]; + + // Show invalid state when metric ID prefix is empty + if (!this.state.metricIdPrefix || this.state.metricIdPrefix.trim() === '') { + details.push(NodeDetailsMessage('Metric ID prefix required')); + return { + content: m('.pf-metrics-v2-node-details', details), + }; + } + + details.push( + m('div', 'ID prefix: ', ColumnName(this.state.metricIdPrefix)), + ); + + if (this.state.valueColumn !== undefined) { + details.push(NodeDetailsSpacer()); + details.push(m('div', 'Value: ', ColumnName(this.state.valueColumn))); + } + + const dimensions = this.getDimensions(); + if (dimensions.length > 0) { + details.push( + m( + 'div', + 'Dimensions: ', + dimensions.map((d, i) => [ + ColumnName(d), + i < dimensions.length - 1 ? ', ' : '', + ]), + ), + ); + } + + return { + content: m('.pf-metrics-v2-node-details', details), + }; + } + + nodeSpecificModify(): NodeModifyAttrs { + const sections: NodeModifyAttrs['sections'] = []; + + // Metric ID prefix and Export button row + const templateSpec = this.getMetricTemplateSpec(); + sections.push({ + content: m( + '.pf-metrics-header-row', + m(OutlinedField, { + label: 'Metric ID prefix', + value: this.state.metricIdPrefix, + placeholder: 'e.g., memory_per_process', + oninput: (e: Event) => { + this.state.metricIdPrefix = (e.target as HTMLInputElement).value; + this.state.onchange?.(); + }, + }), + m(Button, { + label: 'Export', + icon: 'download', + onclick: () => this.showExportModal(), + disabled: templateSpec === undefined, + variant: ButtonVariant.Outlined, + className: 'pf-metrics-v2-export-button', + }), + ), + }); + + // Value column controls + sections.push({ + content: this.renderValueControls(), + }); + + // Show computed dimensions (read-only info) + const dimensions = this.getDimensions(); + if (dimensions.length > 0) { + sections.push({ + content: m( + LabeledControl, + {label: 'Dimensions:'}, + m( + '.pf-metrics-v2-dimensions-info', + dimensions.map((d, i) => [ + ColumnName(d), + i < dimensions.length - 1 ? ', ' : '', + ]), + ), + ), + }); + } + + // Dimension uniqueness section + sections.push({ + content: m( + OutlinedField, + { + label: 'Dimension uniqueness', + value: this.state.dimensionUniqueness, + onchange: (e: Event) => { + this.state.dimensionUniqueness = ( + e.target as HTMLSelectElement + ).value; + this.state.onchange?.(); + }, + }, + getDimensionUniquenessOptions().map((d) => + m( + 'option', + { + value: d.value, + selected: this.state.dimensionUniqueness === d.value, + }, + d.label, + ), + ), + ), + }); + + return { + info: 'Configure a trace-based metric. Select a numeric value column - all other columns become dimensions automatically. Use a Modify Columns node before this to control which columns are included.', + sections, + }; + } + + private renderValueControls(): m.Children { + // Only show numeric columns + const numericColumns = this.state.availableColumns.filter((c) => + isNumericType(c.type), + ); + + const columnOptions = numericColumns.map((col) => + m( + 'option', + { + value: col.name, + selected: this.state.valueColumn === col.name, + }, + `${col.name} (${col.type})`, + ), + ); + + const needsCustomUnit = this.state.unit === 'CUSTOM'; + + return [ + // Column selector + m( + OutlinedField, + { + label: 'Value column', + value: this.state.valueColumn ?? '', + onchange: (e: Event) => { + const selectedValue = (e.target as HTMLSelectElement).value; + this.state.valueColumn = selectedValue || undefined; + this.state.onchange?.(); + }, + }, + [ + m('option', {value: '', disabled: true}, 'Select numeric column...'), + ...columnOptions, + ], + ), + // Unit selector + m( + OutlinedField, + { + label: 'Unit', + value: this.state.unit, + onchange: (e: Event) => { + const newUnit = (e.target as HTMLSelectElement).value; + this.state.unit = newUnit; + // Clear custom unit if not using custom + if (newUnit !== 'CUSTOM') { + this.state.customUnit = undefined; + } + this.state.onchange?.(); + }, + }, + getMetricUnitOptions().map((u) => + m( + 'option', + {value: u.value, selected: this.state.unit === u.value}, + u.label, + ), + ), + ), + // Custom unit input (conditionally shown) + needsCustomUnit + ? m(OutlinedField, { + label: 'Custom unit', + value: this.state.customUnit ?? '', + placeholder: 'Enter custom unit...', + oninput: (e: Event) => { + this.state.customUnit = (e.target as HTMLInputElement).value; + this.state.onchange?.(); + }, + }) + : undefined, + // Polarity selector + m( + OutlinedField, + { + label: 'Polarity', + value: this.state.polarity, + onchange: (e: Event) => { + this.state.polarity = (e.target as HTMLSelectElement).value; + this.state.onchange?.(); + }, + }, + getPolarityOptions().map((p) => + m( + 'option', + {value: p.value, selected: this.state.polarity === p.value}, + p.label, + ), + ), + ), + ]; + } + + private async showExportModal(): Promise { + const templateSpec = this.getMetricTemplateSpec(); + if (templateSpec === undefined) { + return; + } + + const engine = this.state.trace?.engine; + if (engine === undefined) { + showModal({ + title: 'Export Error', + content: 'No trace loaded', + buttons: [{text: 'Close'}], + }); + return; + } + + // Build a self-contained query tree for the template. The + // summarizeTrace API materializes shared queries as standalone tables, + // which breaks when those queries contain nested innerQueryId + // references. Embedding resolves this by inlining everything. + if (this.primaryInput !== undefined) { + const allQueries = getStructuredQueries(this.primaryInput); + if (!(allQueries instanceof Error)) { + const embedded = buildEmbeddedQueryTree(allQueries); + if (embedded !== undefined) { + templateSpec.query = embedded; + } + } + } + + const summarySpec = new protos.TraceSummarySpec(); + summarySpec.metricTemplateSpec = [templateSpec]; + + // State for the modal content + let textproto = ''; + let bundles: MetricBundle[] | undefined; + let isLoading = true; + let error: string | undefined; + let modalContent: ExportModalContent | undefined; + + try { + // Get the textproto spec + textproto = await engine.getProtoContent({ + traceSummarySpec: summarySpec, + }); + + // Show the modal immediately with loading state for results + showModal({ + title: 'Export Metric', + content: () => { + // Create modal content component if not exists + if (modalContent === undefined) { + modalContent = new ExportModalContent(); + } + return m(ExportModalContent, { + textproto, + metricIdPrefix: this.state.metricIdPrefix, + bundles, + isLoading, + error, + }); + }, + buttons: [{text: 'Close'}], + }); + + // Now run the metric to get results + const result = await engine.summarizeTrace( + [summarySpec], + undefined, + undefined, + 'proto', + ); + + if (result.error) { + error = result.error; + isLoading = false; + m.redraw(); + return; + } + + if (!result.protoSummary) { + error = 'No results returned'; + isLoading = false; + m.redraw(); + return; + } + + // Parse the results + bundles = parseTraceSummary(result.protoSummary); + + // Generate JSON text for the JSON view + const summary = protos.TraceSummary.decode(result.protoSummary); + const jsonText = JSON.stringify( + protos.TraceSummary.toObject(summary), + null, + 2, + ); + if (modalContent !== undefined) { + modalContent.setJsonText(jsonText); + } + + isLoading = false; + m.redraw(); + } catch (e) { + if (textproto === '') { + // Failed to get textproto, show error modal + showModal({ + title: 'Export Error', + content: `Failed to export: ${e}`, + buttons: [{text: 'Close'}], + }); + } else { + // Modal is already showing, just update the error state + error = `${e}`; + isLoading = false; + m.redraw(); + } + } + } + + nodeInfo(): m.Children { + return loadNodeDoc('metrics'); + } + + clone(): QueryNode { + const stateCopy: MetricsNodeState = { + metricIdPrefix: this.state.metricIdPrefix, + valueColumn: this.state.valueColumn, + unit: this.state.unit, + customUnit: this.state.customUnit, + polarity: this.state.polarity, + dimensionUniqueness: this.state.dimensionUniqueness, + availableColumns: newColumnInfoList(this.state.availableColumns), + onchange: this.state.onchange, + issues: this.state.issues, + }; + return new MetricsNode(stateCopy); + } + + getStructuredQuery(): protos.PerfettoSqlStructuredQuery | undefined { + if (!this.validate()) return undefined; + if (this.primaryInput === undefined) return undefined; + + const inputQuery = this.primaryInput.getStructuredQuery(); + if (inputQuery === undefined) return undefined; + + // Wrap the input query to give this node its own ID in the query tree. + // The explore page uses node IDs to map query results back to nodes, + // so each node needs a unique ID even if it doesn't transform the SQL. + const sq = new protos.PerfettoSqlStructuredQuery(); + sq.id = this.nodeId; + sq.innerQueryId = this.primaryInput.nodeId; + return sq; + } + + /** + * Returns the TraceMetricV2TemplateSpec proto for this metric configuration. + */ + getMetricTemplateSpec(): protos.TraceMetricV2TemplateSpec | undefined { + if (!this.validate()) return undefined; + if (this.primaryInput === undefined) return undefined; + + const valueColumn = this.state.valueColumn; + if (valueColumn === undefined) return undefined; + + const inputQuery = this.primaryInput.getStructuredQuery(); + if (inputQuery === undefined) return undefined; + + const templateSpec = new protos.TraceMetricV2TemplateSpec(); + templateSpec.idPrefix = this.state.metricIdPrefix; + templateSpec.dimensions = this.getDimensions(); + templateSpec.query = inputQuery; + + // Set dimension uniqueness + if (this.state.dimensionUniqueness === 'UNIQUE') { + templateSpec.dimensionUniqueness = + protos.TraceMetricV2Spec.DimensionUniqueness.UNIQUE; + } else { + templateSpec.dimensionUniqueness = + protos.TraceMetricV2Spec.DimensionUniqueness.NOT_UNIQUE; + } + + // Build value column spec + const valueSpec = new protos.TraceMetricV2TemplateSpec.ValueColumnSpec(); + valueSpec.name = valueColumn; + + // Set unit + if (this.state.unit === 'CUSTOM' && this.state.customUnit) { + valueSpec.customUnit = this.state.customUnit; + } else if (this.state.unit !== 'CUSTOM') { + const unitEnum = + protos.TraceMetricV2Spec.MetricUnit[ + this.state.unit as keyof typeof protos.TraceMetricV2Spec.MetricUnit + ]; + if (unitEnum !== undefined) { + valueSpec.unit = unitEnum; + } + } + + // Set polarity + const polarityEnum = + protos.TraceMetricV2Spec.MetricPolarity[ + this.state + .polarity as keyof typeof protos.TraceMetricV2Spec.MetricPolarity + ]; + if (polarityEnum !== undefined) { + valueSpec.polarity = polarityEnum; + } + + templateSpec.valueColumnSpecs = [valueSpec]; + + return templateSpec; + } + + serializeState(): MetricsSerializedState & {primaryInputId?: string} { + return { + primaryInputId: this.primaryInput?.nodeId, + metricIdPrefix: this.state.metricIdPrefix, + valueColumn: this.state.valueColumn, + unit: this.state.unit, + customUnit: this.state.customUnit, + polarity: this.state.polarity, + dimensionUniqueness: this.state.dimensionUniqueness, + }; + } + + static deserializeState(state: MetricsSerializedState): MetricsNodeState { + let valueColumn = state.valueColumn; + let unit = state.unit ?? 'COUNT'; + let customUnit = state.customUnit; + let polarity = state.polarity ?? 'NOT_APPLICABLE'; + + // Handle migration from old multi-value format + if (valueColumn === undefined && 'values' in state) { + const values = ( + state as unknown as { + values?: Array<{ + column?: string; + unit: string; + customUnit?: string; + polarity: string; + }>; + } + ).values; + if (values !== undefined && values.length > 0) { + valueColumn = values[0].column; + unit = values[0].unit; + customUnit = values[0].customUnit; + polarity = values[0].polarity; + } + } + + // Handle migration from old metricId to metricIdPrefix + const metricIdPrefix = + state.metricIdPrefix ?? + (state as unknown as {metricId?: string}).metricId ?? + ''; + + return { + metricIdPrefix, + valueColumn, + unit, + customUnit, + polarity, + dimensionUniqueness: state.dimensionUniqueness ?? 'NOT_UNIQUE', + availableColumns: [], + }; + } +} diff --git a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/nodes/metrics_node_unittest.ts b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/nodes/metrics_node_unittest.ts new file mode 100644 index 0000000000..0cf0839816 --- /dev/null +++ b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/nodes/metrics_node_unittest.ts @@ -0,0 +1,1128 @@ +// Copyright (C) 2026 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + MetricsNode, + MetricsNodeState, + MetricsSerializedState, +} from './metrics_node'; +import {NodeType} from '../../query_node'; +import { + createMockNode, + createColumnInfo, + connectNodes, + expectValidationError, + createMockNodeWithStructuredQuery, +} from '../testing/test_utils'; +import protos from '../../../../protos'; + +describe('MetricsNode', () => { + describe('constructor', () => { + it('should initialize with default state', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.state.metricIdPrefix).toBe(''); + expect(node.state.valueColumn).toBeUndefined(); + expect(node.state.unit).toBe('COUNT'); + expect(node.state.polarity).toBe('NOT_APPLICABLE'); + expect(node.state.dimensionUniqueness).toBe('NOT_UNIQUE'); + expect(node.state.availableColumns).toEqual([]); + }); + + it('should have correct node type', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.type).toBe(NodeType.kMetrics); + }); + + it('should initialize with no primary input', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.primaryInput).toBeUndefined(); + }); + + it('should initialize with empty nextNodes array', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.nextNodes).toEqual([]); + }); + + it('should preserve provided state values', () => { + const node = new MetricsNode({ + metricIdPrefix: 'my_metric', + valueColumn: 'value1', + unit: 'BYTES', + polarity: 'HIGHER_IS_BETTER', + dimensionUniqueness: 'UNIQUE', + availableColumns: [], + }); + + expect(node.state.metricIdPrefix).toBe('my_metric'); + expect(node.state.valueColumn).toBe('value1'); + expect(node.state.unit).toBe('BYTES'); + expect(node.state.polarity).toBe('HIGHER_IS_BETTER'); + expect(node.state.dimensionUniqueness).toBe('UNIQUE'); + }); + }); + + describe('finalCols', () => { + it('should return empty array when no primary input', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.finalCols).toEqual([]); + }); + + it('should pass through input columns unchanged', () => { + const inputCols = [ + createColumnInfo('id', 'int'), + createColumnInfo('name', 'string'), + createColumnInfo('value', 'double'), + ]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({} as MetricsNodeState); + node.primaryInput = inputNode; + + expect(node.finalCols).toEqual(inputCols); + }); + }); + + describe('getDimensions', () => { + it('should return all columns when no value column is set', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: undefined, + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [ + createColumnInfo('id', 'int'), + createColumnInfo('name', 'string'), + ], + }); + + expect(node.getDimensions()).toEqual(['id', 'name']); + }); + + it('should return all columns except value column as dimensions', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value1', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [ + createColumnInfo('id', 'int'), + createColumnInfo('name', 'string'), + createColumnInfo('value1', 'double'), + createColumnInfo('category', 'string'), + ], + }); + + expect(node.getDimensions()).toEqual(['id', 'name', 'category']); + }); + + it('should return empty array when the only column is the value column', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [createColumnInfo('value', 'double')], + }); + + expect(node.getDimensions()).toEqual([]); + }); + }); + + describe('validate', () => { + it('should fail validation when no primary input', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + + expectValidationError(node, 'No input node connected'); + }); + + it('should fail validation when primary input is invalid', () => { + const inputNode = createMockNode({validate: () => false}); + + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expectValidationError(node, 'Previous node is invalid'); + }); + + it('should fail validation when metric ID prefix is empty', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: '', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expectValidationError(node, 'Metric ID prefix is required'); + }); + + it('should fail validation when metric ID prefix is only whitespace', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: ' ', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expectValidationError(node, 'Metric ID prefix is required'); + }); + + it('should fail validation when no value column set', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: undefined, + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expectValidationError(node, 'A value column is required'); + }); + + it('should fail validation when value column not found in input', () => { + const inputCols = [createColumnInfo('other', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'missing_column', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expectValidationError(node, "Value column 'missing_column' not found"); + }); + + it('should fail validation when value column is not numeric', () => { + const inputCols = [createColumnInfo('name', 'string')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'name', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expectValidationError(node, 'must be numeric'); + }); + + it('should fail validation when custom unit not provided with CUSTOM unit', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'CUSTOM', + customUnit: '', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expectValidationError(node, 'Custom unit is required'); + }); + + it('should pass validation with valid configuration', () => { + const inputCols = [ + createColumnInfo('id', 'int'), + createColumnInfo('value', 'double'), + createColumnInfo('name', 'string'), + ]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expect(node.validate()).toBe(true); + }); + + it('should pass validation with custom unit', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'CUSTOM', + customUnit: 'widgets', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expect(node.validate()).toBe(true); + }); + + it('should clear previous validation errors on success', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + + // First validation should fail (no input) + expect(node.validate()).toBe(false); + expect(node.state.issues?.queryError).toBeDefined(); + + // Add valid input and validate again + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + connectNodes(inputNode, node); + + expect(node.validate()).toBe(true); + expect(node.state.issues?.queryError).toBeUndefined(); + }); + }); + + describe('onPrevNodesUpdated', () => { + it('should update available columns from primary input', () => { + const inputCols = [ + createColumnInfo('id', 'int'), + createColumnInfo('name', 'string'), + createColumnInfo('value', 'double'), + ]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({} as MetricsNodeState); + connectNodes(inputNode, node); + + node.onPrevNodesUpdated(); + + expect(node.state.availableColumns.length).toBe(3); + expect(node.state.availableColumns.map((c) => c.name)).toEqual([ + 'id', + 'name', + 'value', + ]); + }); + + it('should clear value column if it no longer exists', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'old_value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + node.onPrevNodesUpdated(); + + expect(node.state.valueColumn).toBeUndefined(); + }); + + it('should clear value column if it becomes non-numeric', () => { + const inputCols = [createColumnInfo('value', 'string')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + node.onPrevNodesUpdated(); + + expect(node.state.valueColumn).toBeUndefined(); + }); + + it('should preserve value column if it still exists and is numeric', () => { + const inputCols = [createColumnInfo('value', 'double')]; + const inputNode = createMockNode({columns: inputCols}); + + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + node.onPrevNodesUpdated(); + + expect(node.state.valueColumn).toBe('value'); + }); + + it('should do nothing when no primary input', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + + // Should not throw + node.onPrevNodesUpdated(); + + // State should remain unchanged + expect(node.state.valueColumn).toBe('value'); + }); + }); + + describe('getStructuredQuery', () => { + it('should return undefined when validation fails', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.getStructuredQuery()).toBeUndefined(); + }); + + it('should return undefined when primary input has no structured query', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNode({ + columns: inputCols, + getStructuredQuery: () => undefined, + }); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + expect(node.getStructuredQuery()).toBeUndefined(); + }); + + it('should return structured query when valid', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + const sq = node.getStructuredQuery(); + + expect(sq).toBeDefined(); + expect(sq?.id).toBe(node.nodeId); + expect(sq?.innerQueryId).toBe(inputNode.nodeId); + }); + }); + + describe('getMetricTemplateSpec', () => { + it('should return undefined when validation fails', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.getMetricTemplateSpec()).toBeUndefined(); + }); + + it('should return template spec with correct id prefix', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'my_metric_prefix', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec).toBeDefined(); + expect(spec?.idPrefix).toBe('my_metric_prefix'); + }); + + it('should compute dimensions as all columns except value column', () => { + const inputCols = [ + createColumnInfo('value1', 'double'), + createColumnInfo('dim1', 'string'), + createColumnInfo('dim2', 'int'), + ]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value1', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec?.dimensions).toEqual(['dim1', 'dim2']); + }); + + it('should include value column spec', () => { + const inputCols = [createColumnInfo('value', 'double')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec?.valueColumnSpecs?.length).toBe(1); + expect(spec?.valueColumnSpecs?.[0].name).toBe('value'); + expect(spec?.valueColumnSpecs?.[0].unit).toBe( + protos.TraceMetricV2Spec.MetricUnit.COUNT, + ); + }); + + it('should set dimension uniqueness to UNIQUE', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec?.dimensionUniqueness).toBe( + protos.TraceMetricV2Spec.DimensionUniqueness.UNIQUE, + ); + }); + + it('should set dimension uniqueness to NOT_UNIQUE', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec?.dimensionUniqueness).toBe( + protos.TraceMetricV2Spec.DimensionUniqueness.NOT_UNIQUE, + ); + }); + + it('should set custom unit in value spec', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'CUSTOM', + customUnit: 'my_custom_unit', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec?.valueColumnSpecs?.[0].customUnit).toBe('my_custom_unit'); + }); + + it('should set polarity in value spec', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'LOWER_IS_BETTER', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec?.valueColumnSpecs?.[0].polarity).toBe( + protos.TraceMetricV2Spec.MetricPolarity.LOWER_IS_BETTER, + ); + }); + + it('should include query from primary input', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery('input', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'test_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + const spec = node.getMetricTemplateSpec(); + + expect(spec?.query).toBeDefined(); + expect(spec?.query?.id).toBe('input'); + }); + }); + + describe('serializeState', () => { + it('should serialize all state properties', () => { + const inputNode = createMockNode({columns: []}); + + const node = new MetricsNode({ + metricIdPrefix: 'my_metric', + valueColumn: 'value1', + unit: 'BYTES', + polarity: 'HIGHER_IS_BETTER', + dimensionUniqueness: 'UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + const serialized = node.serializeState(); + + expect(serialized.metricIdPrefix).toBe('my_metric'); + expect(serialized.valueColumn).toBe('value1'); + expect(serialized.unit).toBe('BYTES'); + expect(serialized.polarity).toBe('HIGHER_IS_BETTER'); + expect(serialized.dimensionUniqueness).toBe('UNIQUE'); + expect(serialized.primaryInputId).toBe(inputNode.nodeId); + }); + + it('should handle missing primary input', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + + const serialized = node.serializeState(); + + expect(serialized.primaryInputId).toBeUndefined(); + }); + }); + + describe('deserializeState', () => { + it('should deserialize all state properties', () => { + const serialized: MetricsSerializedState = { + metricIdPrefix: 'restored_metric', + valueColumn: 'value1', + unit: 'MEGABYTES', + polarity: 'LOWER_IS_BETTER', + dimensionUniqueness: 'UNIQUE', + }; + + const state = MetricsNode.deserializeState(serialized); + + expect(state.metricIdPrefix).toBe('restored_metric'); + expect(state.valueColumn).toBe('value1'); + expect(state.unit).toBe('MEGABYTES'); + expect(state.polarity).toBe('LOWER_IS_BETTER'); + expect(state.dimensionUniqueness).toBe('UNIQUE'); + expect(state.availableColumns).toEqual([]); + }); + + it('should provide defaults for missing properties', () => { + const state = MetricsNode.deserializeState({} as MetricsSerializedState); + + expect(state.metricIdPrefix).toBe(''); + expect(state.valueColumn).toBeUndefined(); + expect(state.unit).toBe('COUNT'); + expect(state.polarity).toBe('NOT_APPLICABLE'); + expect(state.dimensionUniqueness).toBe('NOT_UNIQUE'); + }); + + it('should migrate from old multi-value format', () => { + // Old format had values array + const oldFormat = { + metricIdPrefix: 'old_metric', + values: [ + { + column: 'old_value', + unit: 'BYTES', + customUnit: 'old_custom', + polarity: 'HIGHER_IS_BETTER', + }, + ], + dimensionUniqueness: 'UNIQUE', + } as unknown as MetricsSerializedState; + + const state = MetricsNode.deserializeState(oldFormat); + + expect(state.metricIdPrefix).toBe('old_metric'); + expect(state.valueColumn).toBe('old_value'); + expect(state.unit).toBe('BYTES'); + expect(state.customUnit).toBe('old_custom'); + expect(state.polarity).toBe('HIGHER_IS_BETTER'); + }); + + it('should migrate from old metricId field', () => { + const oldFormat = { + metricId: 'legacy_metric', + valueColumn: 'val', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + } as unknown as MetricsSerializedState; + + const state = MetricsNode.deserializeState(oldFormat); + + expect(state.metricIdPrefix).toBe('legacy_metric'); + }); + }); + + describe('clone', () => { + it('should create a new node with same state', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value1', + unit: 'BYTES', + polarity: 'HIGHER_IS_BETTER', + dimensionUniqueness: 'UNIQUE', + availableColumns: [createColumnInfo('value1', 'int')], + }); + + const cloned = node.clone() as MetricsNode; + + expect(cloned).toBeInstanceOf(MetricsNode); + expect(cloned.nodeId).not.toBe(node.nodeId); + expect(cloned.state.metricIdPrefix).toBe('test'); + expect(cloned.state.valueColumn).toBe('value1'); + expect(cloned.state.unit).toBe('BYTES'); + expect(cloned.state.dimensionUniqueness).toBe('UNIQUE'); + }); + + it('should preserve onchange callback', () => { + const onchange = jest.fn(); + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + onchange, + }); + + const cloned = node.clone() as MetricsNode; + + expect(cloned.state.onchange).toBe(onchange); + }); + }); + + describe('getTitle', () => { + it('should return correct title', () => { + const node = new MetricsNode({} as MetricsNodeState); + + expect(node.getTitle()).toBe('Metrics'); + }); + }); + + describe('nodeDetails', () => { + it('should always include title', () => { + const node = new MetricsNode({} as MetricsNodeState); + + const details = node.nodeDetails(); + + expect(details.content).toBeDefined(); + }); + + it('should show invalid state when metric ID prefix is empty', () => { + const node = new MetricsNode({ + metricIdPrefix: '', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + + const details = node.nodeDetails(); + + // Content should be defined and show invalid state + expect(details.content).toBeDefined(); + }); + + it('should show metric ID prefix when configured', () => { + const node = new MetricsNode({ + metricIdPrefix: 'my_metric', + valueColumn: undefined, + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + + const details = node.nodeDetails(); + + expect(details.content).toBeDefined(); + }); + + it('should show value column', () => { + const node = new MetricsNode({ + metricIdPrefix: 'my_metric', + valueColumn: 'value1', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + + const details = node.nodeDetails(); + + expect(details.content).toBeDefined(); + }); + + it('should show computed dimensions', () => { + const node = new MetricsNode({ + metricIdPrefix: 'my_metric', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [ + createColumnInfo('value', 'double'), + createColumnInfo('dim1', 'string'), + createColumnInfo('dim2', 'int'), + ], + }); + + const details = node.nodeDetails(); + + expect(details.content).toBeDefined(); + // Dimensions should be computed as ['dim1', 'dim2'] + expect(node.getDimensions()).toEqual(['dim1', 'dim2']); + }); + }); + + describe('nodeSpecificModify', () => { + it('should return sections for configuration', () => { + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [ + createColumnInfo('value', 'int'), + createColumnInfo('name', 'string'), + ], + }); + + const modify = node.nodeSpecificModify(); + + expect(modify).toBeDefined(); + expect(modify.sections).toBeDefined(); + if (modify.sections !== undefined) { + expect(modify.sections.length).toBeGreaterThan(0); + } + expect(modify.info).toContain('metric'); + }); + }); + + describe('integration tests', () => { + it('should work end-to-end with complete configuration', () => { + const inputCols = [ + createColumnInfo('id', 'int'), + createColumnInfo('ts', 'timestamp'), + createColumnInfo('cpu_time', 'double'), + createColumnInfo('process_name', 'string'), + createColumnInfo('thread_name', 'string'), + ]; + const inputNode = createMockNodeWithStructuredQuery('source', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'process_metrics', + valueColumn: 'cpu_time', + unit: 'TIME_NANOS', + polarity: 'LOWER_IS_BETTER', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + // Should validate + expect(node.validate()).toBe(true); + + // Should generate structured query + const sq = node.getStructuredQuery(); + expect(sq).toBeDefined(); + + // Should generate metric template spec + const spec = node.getMetricTemplateSpec(); + expect(spec).toBeDefined(); + expect(spec?.idPrefix).toBe('process_metrics'); + expect(spec?.valueColumnSpecs?.length).toBe(1); + expect(spec?.valueColumnSpecs?.[0].name).toBe('cpu_time'); + expect(spec?.valueColumnSpecs?.[0].unit).toBe( + protos.TraceMetricV2Spec.MetricUnit.TIME_NANOS, + ); + // Dimensions should be all columns except value + expect(spec?.dimensions).toEqual([ + 'id', + 'ts', + 'process_name', + 'thread_name', + ]); + }); + + it('should handle serialization round-trip', () => { + const inputCols = [ + createColumnInfo('value1', 'double'), + createColumnInfo('dim', 'string'), + ]; + const inputNode = createMockNodeWithStructuredQuery('source', inputCols); + + const node = new MetricsNode({ + metricIdPrefix: 'original_metric', + valueColumn: 'value1', + unit: 'PERCENTAGE', + polarity: 'HIGHER_IS_BETTER', + dimensionUniqueness: 'UNIQUE', + availableColumns: inputCols, + }); + connectNodes(inputNode, node); + + // Serialize + const serialized = node.serializeState(); + + // Deserialize + const restoredState = MetricsNode.deserializeState( + serialized as MetricsSerializedState, + ); + + // Create new node + const restoredNode = new MetricsNode(restoredState); + + // Should have same configuration + expect(restoredNode.state.metricIdPrefix).toBe('original_metric'); + expect(restoredNode.state.valueColumn).toBe('value1'); + expect(restoredNode.state.unit).toBe('PERCENTAGE'); + expect(restoredNode.state.polarity).toBe('HIGHER_IS_BETTER'); + expect(restoredNode.state.dimensionUniqueness).toBe('UNIQUE'); + }); + + it('should preserve value column after deserialization and reconnection', () => { + const inputCols = [ + createColumnInfo('id', 'int'), + createColumnInfo('metric_value', 'double'), + createColumnInfo('category', 'string'), + ]; + const inputNode = createMockNodeWithStructuredQuery('source', inputCols); + + // Simulate deserialization: create node with value but empty availableColumns + const deserializedState = MetricsNode.deserializeState({ + metricIdPrefix: 'my_metric', + valueColumn: 'metric_value', + unit: 'BYTES', + polarity: 'LOWER_IS_BETTER', + dimensionUniqueness: 'NOT_UNIQUE', + }); + + // availableColumns should be empty after deserialization + expect(deserializedState.availableColumns).toEqual([]); + + const restoredNode = new MetricsNode(deserializedState); + + // Value should be preserved even with empty availableColumns + expect(restoredNode.state.valueColumn).toBe('metric_value'); + + // Connect to input (simulates third pass of deserialization) + connectNodes(inputNode, restoredNode); + + // Call onPrevNodesUpdated (simulates fourth pass of deserialization) + restoredNode.onPrevNodesUpdated(); + + // availableColumns should now be populated + expect(restoredNode.state.availableColumns.length).toBe(3); + expect(restoredNode.state.availableColumns.map((c) => c.name)).toEqual([ + 'id', + 'metric_value', + 'category', + ]); + + // Value should STILL be preserved (not cleared) + expect(restoredNode.state.valueColumn).toBe('metric_value'); + + // Dimensions should be computed as all columns except value + expect(restoredNode.getDimensions()).toEqual(['id', 'category']); + + // Node should validate successfully + expect(restoredNode.validate()).toBe(true); + }); + + it('should clear value column if it no longer exists after reconnection', () => { + const inputCols = [ + createColumnInfo('id', 'int'), + createColumnInfo('different_value', 'double'), + ]; + const inputNode = createMockNodeWithStructuredQuery('source', inputCols); + + const deserializedState = MetricsNode.deserializeState({ + metricIdPrefix: 'my_metric', + valueColumn: 'old_value_column', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + }); + + const restoredNode = new MetricsNode(deserializedState); + connectNodes(inputNode, restoredNode); + restoredNode.onPrevNodesUpdated(); + + // Value column should be cleared because 'old_value_column' doesn't exist + expect(restoredNode.state.valueColumn).toBeUndefined(); + }); + + it('should clear value column if it becomes non-numeric after reconnection', () => { + const inputCols = [ + createColumnInfo('id', 'int'), + createColumnInfo('metric_value', 'string'), // Same name but now string type + ]; + const inputNode = createMockNodeWithStructuredQuery('source', inputCols); + + const deserializedState = MetricsNode.deserializeState({ + metricIdPrefix: 'my_metric', + valueColumn: 'metric_value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + }); + + const restoredNode = new MetricsNode(deserializedState); + connectNodes(inputNode, restoredNode); + restoredNode.onPrevNodesUpdated(); + + // Value column should be cleared because 'metric_value' is not numeric + expect(restoredNode.state.valueColumn).toBeUndefined(); + }); + + it('should include primaryInputId in serialized state', () => { + const inputCols = [createColumnInfo('value', 'int')]; + const inputNode = createMockNodeWithStructuredQuery( + 'input-123', + inputCols, + ); + // Override the nodeId for testing + (inputNode as {nodeId: string}).nodeId = 'input-123'; + + const node = new MetricsNode({ + metricIdPrefix: 'test', + valueColumn: 'value', + unit: 'COUNT', + polarity: 'NOT_APPLICABLE', + dimensionUniqueness: 'NOT_UNIQUE', + availableColumns: [], + }); + connectNodes(inputNode, node); + + const serialized = node.serializeState(); + + expect(serialized.primaryInputId).toBe('input-123'); + }); + }); +}); diff --git a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/query_builder_utils.ts b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/query_builder_utils.ts index ef57e712ed..990f29e9de 100644 --- a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/query_builder_utils.ts +++ b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/query_builder_utils.ts @@ -257,3 +257,207 @@ export function isAQuery( maybeQuery.sql !== undefined ); } + +/** + * Builds a fully self-contained structured query tree from a flat list of + * queries. All `innerQueryId` references are resolved to `innerQuery` + * embeddings. + * + * Needed for the `summarizeTrace` API which materializes shared queries as + * standalone tables. Shared queries with nested `innerQueryId` sub-queries + * fail because the referenced queries aren't available as CTEs in the + * standalone context. + * + * @param queries Flat list in dependency order (from `getStructuredQueries`). + * The last entry is the root. + * @returns A single self-contained query, or undefined if input is empty. + */ +export function buildEmbeddedQueryTree( + queries: protos.PerfettoSqlStructuredQuery[], +): protos.PerfettoSqlStructuredQuery | undefined { + if (queries.length === 0) return undefined; + + const queryMap = new Map(); + for (const q of queries) { + if (q.id) { + queryMap.set(q.id, q); + } + } + + const root = queries[queries.length - 1]; + const rootObj = protos.PerfettoSqlStructuredQuery.toObject(root); + const resolved = inlineQueryRefs(rootObj, queryMap, new Set()); + return protos.PerfettoSqlStructuredQuery.fromObject(resolved); +} + +// Plain-object type returned by protobufjs toObject(). +// Field access returns `any` which is inherent to the toObject() API. +type QueryPlainObj = ReturnType< + typeof protos.PerfettoSqlStructuredQuery.toObject +>; + +/** + * Recursively resolves all `innerQueryId` references in a plain-object + * query to `innerQuery` embeddings. Uses plain objects (from toObject) to + * avoid protobufjs oneof field management issues — setting innerQuery on a + * Message instance may not reliably clear innerQueryId, causing the encoder + * to write both fields and the decoder to pick the wrong one. + */ +function inlineQueryRefs( + obj: QueryPlainObj, + queryMap: Map, + visited: Set, +): QueryPlainObj { + // Cycle guard: prevent infinite recursion on malformed graphs. + const id = obj.id as string | undefined; + if (id !== undefined) { + if (visited.has(id)) return obj; + visited.add(id); + } + + // Resolve innerQueryId → embedded innerQuery. + const refId = obj.innerQueryId as string | undefined; + if (refId !== undefined) { + const referenced = queryMap.get(refId); + if (referenced !== undefined) { + const refObj = protos.PerfettoSqlStructuredQuery.toObject(referenced); + obj.innerQuery = inlineQueryRefs(refObj, queryMap, visited); + delete obj.innerQueryId; + } + } else if (obj.innerQuery !== undefined) { + obj.innerQuery = inlineQueryRefs( + obj.innerQuery as QueryPlainObj, + queryMap, + visited, + ); + } + + // Resolve sub-queries in multi-input operations. + inlineMultiInputRefs(obj, queryMap, visited); + + // Note: Do NOT remove from visited set. Once a node is processed, it should + // remain marked as visited to prevent infinite recursion if referenced again + // through a different path in the query graph. + return obj; +} + +// Helper to safely check if a field from a toObject() plain object is set. +function has(val: unknown): val is Record { + return val !== undefined && val !== null; +} + +function inlineMultiInputRefs( + obj: QueryPlainObj, + queryMap: Map, + visited: Set, +): void { + const ii = obj.intervalIntersect as QueryPlainObj | undefined; + if (has(ii)) { + if (has(ii.base)) { + ii.base = inlineQueryRefs(ii.base as QueryPlainObj, queryMap, visited); + } + if (Array.isArray(ii.intervalIntersect)) { + ii.intervalIntersect = ii.intervalIntersect.map((q: QueryPlainObj) => + inlineQueryRefs(q, queryMap, visited), + ); + } + } + + const join = obj.experimentalJoin as QueryPlainObj | undefined; + if (has(join)) { + if (has(join.leftQuery)) { + join.leftQuery = inlineQueryRefs( + join.leftQuery as QueryPlainObj, + queryMap, + visited, + ); + } + if (has(join.rightQuery)) { + join.rightQuery = inlineQueryRefs( + join.rightQuery as QueryPlainObj, + queryMap, + visited, + ); + } + } + + const union = obj.experimentalUnion as QueryPlainObj | undefined; + if (has(union) && Array.isArray(union.queries)) { + union.queries = union.queries.map((q: QueryPlainObj) => + inlineQueryRefs(q, queryMap, visited), + ); + } + + const addCols = obj.experimentalAddColumns as QueryPlainObj | undefined; + if (has(addCols)) { + if (has(addCols.coreQuery)) { + addCols.coreQuery = inlineQueryRefs( + addCols.coreQuery as QueryPlainObj, + queryMap, + visited, + ); + } + if (has(addCols.inputQuery)) { + addCols.inputQuery = inlineQueryRefs( + addCols.inputQuery as QueryPlainObj, + queryMap, + visited, + ); + } + } + + const fti = obj.experimentalFilterToIntervals as QueryPlainObj | undefined; + if (has(fti)) { + if (has(fti.base)) { + fti.base = inlineQueryRefs(fti.base as QueryPlainObj, queryMap, visited); + } + if (has(fti.intervals)) { + fti.intervals = inlineQueryRefs( + fti.intervals as QueryPlainObj, + queryMap, + visited, + ); + } + } + + const cs = obj.experimentalCreateSlices as QueryPlainObj | undefined; + if (has(cs)) { + if (has(cs.startsQuery)) { + cs.startsQuery = inlineQueryRefs( + cs.startsQuery as QueryPlainObj, + queryMap, + visited, + ); + } + if (has(cs.endsQuery)) { + cs.endsQuery = inlineQueryRefs( + cs.endsQuery as QueryPlainObj, + queryMap, + visited, + ); + } + } + + const ci = obj.experimentalCounterIntervals as QueryPlainObj | undefined; + if (has(ci) && has(ci.inputQuery)) { + ci.inputQuery = inlineQueryRefs( + ci.inputQuery as QueryPlainObj, + queryMap, + visited, + ); + } + + // Resolve SQL dependency sub-queries. + const sql = obj.sql as QueryPlainObj | undefined; + if (has(sql) && Array.isArray(sql.dependencies)) { + for (const dep of sql.dependencies as QueryPlainObj[]) { + if (has(dep.query)) { + dep.query = inlineQueryRefs( + dep.query as QueryPlainObj, + queryMap, + visited, + ); + } + } + } +} diff --git a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/widgets.scss b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/widgets.scss index 249d797f77..8aa1ddb06e 100644 --- a/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/widgets.scss +++ b/ui/src/plugins/dev.perfetto.ExplorePage/query_builder/widgets.scss @@ -701,3 +701,97 @@ .pf-callout-with-spacing { margin-bottom: 16px; } + +// Metrics header row with ID prefix and Export button +.pf-metrics-header-row { + display: flex; + gap: 8px; + align-items: flex-start; + + // OutlinedField should take most of the space + > .pf-outlined-field { + flex: 1; + } +} + +// Metrics V2 Export button - styled to match OutlinedField height +.pf-metrics-v2-export-button.pf-button { + // Match the OutlinedField dimensions + min-height: 44px; // Same as OutlinedField (legend height + input padding) + padding: 8px 16px; + justify-content: center; +} + +// Metrics export modal +.pf-metrics-export-modal { + min-width: 600px; + max-width: 80vw; + max-height: 70vh; + display: flex; + flex-direction: column; + + .pf-tabs { + flex: 1; + display: flex; + flex-direction: column; + min-height: 0; + } + + .pf-tabs__content { + flex: 1; + overflow: auto; + min-height: 300px; + } +} + +// Metrics export result section +.pf-metrics-export-result { + display: flex; + flex-direction: column; + gap: 12px; + + &__header { + display: flex; + justify-content: flex-start; + padding-bottom: 8px; + border-bottom: 1px solid var(--pf-color-border); + } + + &__loading { + display: flex; + justify-content: center; + align-items: center; + padding: 40px; + } + + &__error { + padding: 16px; + color: var(--pf-color-danger); + background: var(--pf-color-background-secondary); + border-radius: 4px; + } + + &__empty { + padding: 16px; + color: var(--pf-color-text-muted); + text-align: center; + } + + &__tables { + display: flex; + flex-direction: column; + gap: 16px; + } + + &__bundle { + display: flex; + flex-direction: column; + gap: 8px; + } + + &__bundle-title { + font-weight: 600; + font-size: var(--pf-font-size-m); + color: var(--pf-color-text); + } +} diff --git a/ui/src/plugins/dev.perfetto.ExplorePage/query_node.ts b/ui/src/plugins/dev.perfetto.ExplorePage/query_node.ts index c9bab4432a..e72807d520 100644 --- a/ui/src/plugins/dev.perfetto.ExplorePage/query_node.ts +++ b/ui/src/plugins/dev.perfetto.ExplorePage/query_node.ts @@ -56,6 +56,7 @@ export enum NodeType { kSort = 'sort', kFilter = 'filter', kCounterToIntervals = 'counter_to_intervals', + kMetrics = 'metrics', // Multi node operations kIntervalIntersect = 'interval_intersect', @@ -78,6 +79,7 @@ export function singleNodeOperation(type: NodeType): boolean { case NodeType.kSort: case NodeType.kFilter: case NodeType.kCounterToIntervals: + case NodeType.kMetrics: return true; default: return false; diff --git a/ui/src/trace_processor/engine.ts b/ui/src/trace_processor/engine.ts index 5b8db2a9e9..a3c2911bbc 100644 --- a/ui/src/trace_processor/engine.ts +++ b/ui/src/trace_processor/engine.ts @@ -134,6 +134,18 @@ export interface Engine { summarizerId: string, ): Promise; + /** + * Converts a proto to textproto format. + * @param proto The proto to convert (TraceSummarySpec, PerfettoSqlStructuredQuery, or TraceMetricV2Spec) + * @returns The textproto representation + */ + getProtoContent( + proto: + | {traceSummarySpec: protos.TraceSummarySpec} + | {structuredQuery: protos.PerfettoSqlStructuredQuery} + | {metricSpec: protos.TraceMetricV2Spec}, + ): Promise; + getProxy(tag: string): EngineProxy; readonly numRequestsPending: number; readonly failed: string | undefined; @@ -169,6 +181,7 @@ export abstract class EngineBase implements Engine, Disposable { private pendingUpdateSummarizerSpec?: Deferred; private pendingQuerySummarizer?: Deferred; private pendingDestroySummarizer?: Deferred; + private pendingProtoContent?: Deferred; private _numRequestsPending = 0; private _failed: string | undefined = undefined; private _queryLog: Array = []; @@ -373,6 +386,18 @@ export abstract class EngineBase implements Engine, Disposable { // We don't have any pending promises for this request so just // return. break; + case TPM.TPM_PROTO_CONTENT: + const protoContentRes = assertExists( + rpc.protoContentResult, + ) as protos.ProtoContentResult; + const pendingProto = assertExists(this.pendingProtoContent); + if (exists(protoContentRes.error) && protoContentRes.error.length > 0) { + pendingProto.reject(new Error(protoContentRes.error)); + } else { + pendingProto.resolve(protoContentRes.textproto ?? ''); + } + this.pendingProtoContent = undefined; + break; default: console.log( 'Unexpected TraceProcessor response received: ', @@ -741,6 +766,33 @@ export abstract class EngineBase implements Engine, Disposable { return result; } + getProtoContent( + proto: + | {traceSummarySpec: protos.TraceSummarySpec} + | {structuredQuery: protos.PerfettoSqlStructuredQuery} + | {metricSpec: protos.TraceMetricV2Spec}, + ): Promise { + if (this.pendingProtoContent) { + return Promise.reject(new Error('Already getting proto content')); + } + const result = defer(); + const rpc = protos.TraceProcessorRpc.create(); + rpc.request = TPM.TPM_PROTO_CONTENT; + const args = (rpc.protoContentArgs = new protos.ProtoContentArgs()); + + if ('traceSummarySpec' in proto) { + args.traceSummarySpec = proto.traceSummarySpec; + } else if ('structuredQuery' in proto) { + args.structuredQuery = proto.structuredQuery; + } else if ('metricSpec' in proto) { + args.metricSpec = proto.metricSpec; + } + + this.pendingProtoContent = result; + this.rpcSendRequest(rpc); + return result; + } + // Marshals the TraceProcessorRpc request arguments and sends the request // to the concrete Engine (Wasm or HTTP). private rpcSendRequest(rpc: protos.TraceProcessorRpc) { @@ -868,6 +920,15 @@ export class EngineProxy implements Engine, Disposable { return this.engine.destroySummarizer(summarizerId); } + getProtoContent( + proto: + | {traceSummarySpec: protos.TraceSummarySpec} + | {structuredQuery: protos.PerfettoSqlStructuredQuery} + | {metricSpec: protos.TraceMetricV2Spec}, + ): Promise { + return this.engine.getProtoContent(proto); + } + get engineId(): string { return this.engine.id; }