Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build-and-push-async-upload.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
provenance: mode=max

- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v3
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v3

- name: Sign image with cosign
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-and-push-controller-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ jobs:
cache-to: type=gha,mode=max
provenance: mode=max
- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v3
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v3
- name: Sign image with cosign
run: |
cosign sign --yes "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}@${{ steps.build-push.outputs.digest }}"
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-and-push-csi-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ jobs:
cache-to: type=gha,mode=max
provenance: mode=max
- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v3
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v3
- name: Sign image with cosign
run: |
cosign sign --yes "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}@${{ steps.build-push.outputs.digest }}"
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-and-push-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ jobs:
cache-to: type=gha,mode=max
provenance: mode=max
- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v3
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v3
- name: Sign image with cosign
run: |
cosign sign --yes "${{ env.IMG_REGISTRY }}/${{ env.IMG_ORG }}/${{ env.IMG_REPO }}@${{ steps.build-push.outputs.digest }}"
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-and-push-ui-images-standalone.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ jobs:
provenance: mode=max

- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v3
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v3

- name: Sign image with cosign
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-and-push-ui-images.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ jobs:
provenance: mode=max

- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v3
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v3

- name: Sign image with cosign
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/scorecard.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,6 @@ jobs:

# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: results.sarif
2 changes: 1 addition & 1 deletion .github/workflows/trivy-image-scanning.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
timeout: 30m0s

- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@05b1a5d28f8763fd11e77388fe57846f1ba8e766 # v4
uses: github/codeql-action/upload-sarif@a899987af240c0578ed84ce13c02319a693e168f # v4
if: always()
with:
sarif_file: 'trivy-results-${{ env.SANITIZED_IMAGE_NAME }}.sarif'
8 changes: 6 additions & 2 deletions catalog/internal/catalog/mcpcatalog/db_mcp.go
Original file line number Diff line number Diff line change
Expand Up @@ -290,10 +290,14 @@ func (d *dbMCPCatalogImpl) GetMCPServerTool(ctx context.Context, serverID string
return nil, fmt.Errorf("error loading tools for server %s: %w", serverID, err)
}

// Find tool by name
// Find tool by name. DB stores tools with a qualified prefix (serverName@version:toolName)
// for uniqueness, but the API exposes only the unqualified tool name.
for _, tool := range tools {
attrs := tool.GetAttributes()
if attrs != nil && attrs.Name != nil && *attrs.Name == toolName {
if attrs == nil || attrs.Name == nil {
continue
}
if converter.UnqualifyToolName(*attrs.Name) == toolName {
apiTool := converter.ConvertDbMCPToolToOpenapi(tool)
return apiTool, nil
}
Expand Down
46 changes: 42 additions & 4 deletions catalog/internal/catalog/mcpcatalog/db_mcp_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,12 @@ func listWithServer(id int32, name string) *internalmodels.ListWrapper[models.MC
return &internalmodels.ListWrapper[models.MCPServer]{Items: []models.MCPServer{server}}
}

func listWithServers(servers ...struct{ id int32; name string }) *internalmodels.ListWrapper[models.MCPServer] {
type serverStub struct {
id int32
name string
}

func listWithServers(servers ...serverStub) *internalmodels.ListWrapper[models.MCPServer] {
items := make([]models.MCPServer, 0, len(servers))
for _, s := range servers {
items = append(items, &models.MCPServerImpl{
Expand Down Expand Up @@ -413,9 +418,9 @@ func TestListMCPServers_ToolCountZero(t *testing.T) {
func TestListMCPServers_MultipleServersWithDifferentToolCounts(t *testing.T) {
repo := &mockMCPServerRepo{
listResult: listWithServers(
struct{ id int32; name string }{1, "server-a"},
struct{ id int32; name string }{2, "server-b"},
struct{ id int32; name string }{3, "server-c"},
serverStub{1, "server-a"},
serverStub{2, "server-b"},
serverStub{3, "server-c"},
),
}
toolRepo := &mockMCPServerToolRepo{
Expand Down Expand Up @@ -487,6 +492,39 @@ func TestListMCPServers_ZeroToolLimitDoesNotSetPageSize(t *testing.T) {
assert.Nil(t, toolRepo.capturedListOptions.PageSize, "PageSize should be nil when ToolLimit is 0")
}

func TestGetMCPServerTool_StripsQualifiedPrefix(t *testing.T) {
serverEntity := &models.MCPServerImpl{
ID: serverID(88),
Attributes: &models.MCPServerAttributes{Name: serverName("dynatrace-mcp")},
}
repo := &mockMCPServerRepo{getResult: serverEntity}
toolRepo := &mockMCPServerToolRepo{
listResult: []models.MCPServerTool{makeTool("dynatrace-mcp@1.6.1:list_problems")},
}
cat := newTestCatalogWithToolRepo(repo, toolRepo, nil)

result, err := cat.GetMCPServerTool(context.Background(), "88", "list_problems")
require.NoError(t, err)
require.NotNil(t, result)
assert.Equal(t, "list_problems", result.Name)
}

func TestGetMCPServerTool_NotFound(t *testing.T) {
serverEntity := &models.MCPServerImpl{
ID: serverID(88),
Attributes: &models.MCPServerAttributes{Name: serverName("dynatrace-mcp")},
}
repo := &mockMCPServerRepo{getResult: serverEntity}
toolRepo := &mockMCPServerToolRepo{
listResult: []models.MCPServerTool{makeTool("dynatrace-mcp@1.6.1:list_problems")},
}
cat := newTestCatalogWithToolRepo(repo, toolRepo, nil)

_, err := cat.GetMCPServerTool(context.Background(), "88", "nonexistent_tool")
require.Error(t, err)
assert.ErrorIs(t, err, api.ErrNotFound)
}

func TestGetMCPServer_IncludeToolsUsesAccurateCount(t *testing.T) {
serverEntity := &models.MCPServerImpl{
ID: serverID(1),
Expand Down
19 changes: 12 additions & 7 deletions catalog/internal/catalog/modelcatalog/performance_metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ type PerformanceMetricsLoader struct {
metricsArtifactRepo dbmodels.CatalogMetricsArtifactRepository
modelTypeID int32
metricsArtifactTypeID int32
// Cache of model ID -> directory path mapping to avoid repeated directory scans
// Cache of lowercase model ID -> directory path for case-insensitive lookups
modelDirCache map[string]string
}

Expand Down Expand Up @@ -179,7 +179,7 @@ func NewPerformanceMetricsLoader(path []string, modelRepo dbmodels.CatalogModelR
return loader, nil
}

// buildModelDirCache scans directories once and builds a cache of model ID -> directory path
// buildModelDirCache scans directories once and builds a cache of lowercase model ID -> directory path
func (pml *PerformanceMetricsLoader) buildModelDirCache() error {
modelCount := 0
for _, rootPath := range pml.path {
Expand Down Expand Up @@ -213,9 +213,14 @@ func (pml *PerformanceMetricsLoader) buildModelDirCache() error {
return nil // Continue with other directories
}

// Add to cache
pml.modelDirCache[metadata.ID] = dirPath
modelCount++
// Add to cache (lowercase key for case-insensitive matching)
cacheKey := strings.ToLower(metadata.ID)
if existing, ok := pml.modelDirCache[cacheKey]; ok {
glog.Warningf("Case-insensitive cache collision: %q (dir: %s) overwrites existing entry (dir: %s)", metadata.ID, dirPath, existing)
} else {
modelCount++
}
pml.modelDirCache[cacheKey] = dirPath
glog.V(3).Infof("Cached model directory: %s -> %s", metadata.ID, dirPath)

return nil
Expand Down Expand Up @@ -243,8 +248,8 @@ func (pml *PerformanceMetricsLoader) Load(ctx context.Context, record ModelProvi

// Namespaced name is source_id:model_name
namespacedName := *attrs.Name
// Resolve directory from cache: cache is keyed by metadata.ID (display name)
displayName := DisplayNameFromStoredName(namespacedName)
// Resolve directory from cache: cache is keyed by lowercase metadata.ID (display name)
displayName := strings.ToLower(DisplayNameFromStoredName(namespacedName))
dirPath, found := pml.modelDirCache[displayName]
if !found {
glog.V(2).Infof("No performance metrics directory found for model %s", namespacedName)
Expand Down
126 changes: 126 additions & 0 deletions catalog/internal/catalog/modelcatalog/performance_metrics_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ package modelcatalog

import (
"encoding/json"
"os"
"path/filepath"
"strings"
"testing"
)
Expand Down Expand Up @@ -1180,6 +1182,130 @@ func TestMetadataJSONEdgeCases(t *testing.T) {
}
}

func TestBuildModelDirCache_CaseInsensitive(t *testing.T) {
// Create a temp directory structure with metadata.json files using mixed-case IDs
tmpDir := t.TempDir()

tests := []struct {
name string
metaID string // ID written to metadata.json
lookupKey string // key used to look up in cache (simulates DisplayNameFromStoredName output)
wantFound bool
}{
{
name: "exact case match",
metaID: "sarvam-30b-FP8-Dynamic",
lookupKey: "sarvam-30b-fp8-dynamic",
wantFound: true,
},
{
name: "all uppercase metadata ID",
metaID: "MODEL-ABC-FP16",
lookupKey: "model-abc-fp16",
wantFound: true,
},
{
name: "already lowercase",
metaID: "already-lowercase",
lookupKey: "already-lowercase",
wantFound: true,
},
{
name: "mixed case with org prefix",
metaID: "RedHatAI/Granite-3B-Code",
lookupKey: "redhatai/granite-3b-code",
wantFound: true,
},
{
name: "no match returns not found",
metaID: "some-model",
lookupKey: "different-model",
wantFound: false,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create a subdirectory with a metadata.json for this test case
modelDir := filepath.Join(tmpDir, tt.name)
if err := os.MkdirAll(modelDir, 0o755); err != nil {
t.Fatalf("failed to create model dir: %v", err)
}
metaContent := `{"id": "` + tt.metaID + `"}`
if err := os.WriteFile(filepath.Join(modelDir, "metadata.json"), []byte(metaContent), 0o644); err != nil {
t.Fatalf("failed to write metadata.json: %v", err)
}

// Build cache with only this directory
loader := &PerformanceMetricsLoader{
path: []string{modelDir},
modelDirCache: make(map[string]string),
}
if err := loader.buildModelDirCache(); err != nil {
t.Fatalf("buildModelDirCache() error = %v", err)
}

// Lookup using the lowercased key (same as Load does)
_, found := loader.modelDirCache[strings.ToLower(tt.lookupKey)]
if found != tt.wantFound {
t.Errorf("cache lookup for %q: got found=%v, want found=%v (cache keys: %v)",
tt.lookupKey, found, tt.wantFound, cacheKeys(loader.modelDirCache))
}
})
}
}

func TestBuildModelDirCache_CollisionWarning(t *testing.T) {
// Two directories with metadata IDs that differ only by case should result
// in the second overwriting the first (last-write-wins). The collision
// warning is logged but we verify the cache ends up with a single entry.
tmpDir := t.TempDir()

dir1 := filepath.Join(tmpDir, "dir1")
dir2 := filepath.Join(tmpDir, "dir2")
for _, d := range []string{dir1, dir2} {
if err := os.MkdirAll(d, 0o755); err != nil {
t.Fatalf("failed to create dir: %v", err)
}
}
if err := os.WriteFile(filepath.Join(dir1, "metadata.json"), []byte(`{"id": "Model-FP8"}`), 0o644); err != nil {
t.Fatalf("failed to write metadata.json: %v", err)
}
if err := os.WriteFile(filepath.Join(dir2, "metadata.json"), []byte(`{"id": "model-fp8"}`), 0o644); err != nil {
t.Fatalf("failed to write metadata.json: %v", err)
}

loader := &PerformanceMetricsLoader{
path: []string{tmpDir},
modelDirCache: make(map[string]string),
}
if err := loader.buildModelDirCache(); err != nil {
t.Fatalf("buildModelDirCache() error = %v", err)
}

// Both IDs normalize to "model-fp8", so the cache should have exactly one entry
if len(loader.modelDirCache) != 1 {
t.Errorf("expected 1 cache entry after collision, got %d: %v", len(loader.modelDirCache), cacheKeys(loader.modelDirCache))
}
// filepath.Walk visits in lexicographic order, so dir2 (last-write) wins
cachedPath, found := loader.modelDirCache["model-fp8"]
if !found {
t.Fatalf("expected cache key %q, got keys: %v", "model-fp8", cacheKeys(loader.modelDirCache))
}
if cachedPath != dir2 {
t.Errorf("expected collision winner to be %s (last walked), got %s", dir2, cachedPath)
}
}

// cacheKeys returns all keys from a map for diagnostic output.
func cacheKeys(m map[string]string) []string {
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
return keys
}

func generateLongString(length int) string {
var result strings.Builder
char := "a"
Expand Down
17 changes: 11 additions & 6 deletions catalog/internal/converter/mcp_converter.go
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,16 @@ func ConvertOpenapiMCPToolToDb(openapiTool *openapi.MCPTool) models.MCPServerToo
return dbTool
}

// UnqualifyToolName strips the "serverName@version:" prefix from a qualified
// tool name, returning just the tool name portion. If the name contains no
// colon, it is returned unchanged.
func UnqualifyToolName(name string) string {
if idx := strings.LastIndex(name, ":"); idx != -1 {
return name[idx+1:]
}
return name
}

// ConvertDbMCPToolToOpenapi converts a database MCPServerTool to OpenAPI MCPTool.
func ConvertDbMCPToolToOpenapi(dbTool models.MCPServerTool) *openapi.MCPTool {
attr := dbTool.GetAttributes()
Expand All @@ -298,12 +308,7 @@ func ConvertDbMCPToolToOpenapi(dbTool models.MCPServerTool) *openapi.MCPTool {
accessType = "read_only" // default fallback to prevent API contract violation
}

// Strip internal qualified prefix (serverName@version:) from tool name before returning to API.
// The DB stores tools as "server@version:toolName" for uniqueness, but the API should return just "toolName".
toolName := *attr.Name
if idx := strings.LastIndex(toolName, ":"); idx != -1 {
toolName = toolName[idx+1:]
}
toolName := UnqualifyToolName(*attr.Name)

// Create OpenAPI tool with required fields
openapiTool := openapi.NewMCPTool(toolName, accessType)
Expand Down
7 changes: 5 additions & 2 deletions clients/python/src/model_registry/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,8 +393,11 @@ def save_to_oci_registry( # noqa: C901 ( complex args >8 )
if auth_file is not None:
params["authfile"] = auth_file.name
backend_def.pull(base_image, local_image_path, **params)
# Extract the absolute path from the files found in the path
files = [file[0] for file in _get_files_from_path(model_files_path)] # type: ignore[arg-type]
# Pass top-level entries (dir/*) instead of individual leaf files (dir/**/*).
# olot's tarball_from_file preserves subdirectory structure when given a
# directory, but flattens everything to basename when given individual files.
model_path = Path(model_files_path)
files = [model_path] if model_path.is_file() else sorted(model_path.iterdir())
oci_layers_on_top(local_image_path, files, modelcard)
backend_def.push(local_image_path, oci_ref, **params)

Expand Down
Loading
Loading