|
1 | 1 | import json
|
2 | 2 | from asyncio import gather
|
3 |
| -from collections import defaultdict |
4 | 3 | from collections.abc import Mapping
|
5 |
| -from dataclasses import asdict, dataclass |
6 | 4 | from datetime import datetime
|
7 | 5 | from enum import Enum
|
8 | 6 | from typing import TYPE_CHECKING, Any, Iterable, Optional, cast
|
9 | 7 |
|
10 | 8 | import numpy as np
|
11 |
| -import pandas as pd |
12 | 9 | import strawberry
|
13 | 10 | from openinference.semconv.trace import SpanAttributes
|
14 | 11 | from strawberry import ID, UNSET
|
|
19 | 16 | import phoenix.trace.schemas as trace_schema
|
20 | 17 | from phoenix.db import models
|
21 | 18 | from phoenix.server.api.context import Context
|
| 19 | +from phoenix.server.api.helpers.annotations import compute_span_annotation_summaries |
22 | 20 | from phoenix.server.api.helpers.dataset_helpers import (
|
23 | 21 | get_dataset_example_input,
|
24 | 22 | get_dataset_example_output,
|
@@ -558,28 +556,7 @@ async def span_annotation_summaries(
|
558 | 556 | annotation for annotation in annotations if satisfies_filter(annotation, filter)
|
559 | 557 | ]
|
560 | 558 |
|
561 |
| - @dataclass |
562 |
| - class Metrics: |
563 |
| - record_count: int = 0 |
564 |
| - label_count: int = 0 |
565 |
| - score_sum: float = 0 |
566 |
| - score_count: int = 0 |
567 |
| - |
568 |
| - summaries: defaultdict[str, defaultdict[Optional[str], Metrics]] = defaultdict( |
569 |
| - lambda: defaultdict(Metrics) |
570 |
| - ) |
571 |
| - for annotation in annotations: |
572 |
| - metrics = summaries[annotation.name][annotation.label] |
573 |
| - metrics.record_count += 1 |
574 |
| - metrics.label_count += int(annotation.label is not None) |
575 |
| - metrics.score_sum += annotation.score or 0 |
576 |
| - metrics.score_count += int(annotation.score is not None) |
577 |
| - |
578 |
| - result: list[AnnotationSummary] = [] |
579 |
| - for name, label_metrics in summaries.items(): |
580 |
| - rows = [{"label": label, **asdict(metrics)} for label, metrics in label_metrics.items()] |
581 |
| - result.append(AnnotationSummary(name=name, df=pd.DataFrame(rows), simple_avg=True)) |
582 |
| - return result |
| 559 | + return compute_span_annotation_summaries(annotations) |
583 | 560 |
|
584 | 561 | @strawberry.field(
|
585 | 562 | description="Evaluations of the documents associated with the span, e.g. "
|
|
0 commit comments