Skip to content

Commit edac301

Browse files
authored
Follow up fixes for GEN AI Playground UI (opendatahub-io#5184)
* Deal better with unavailable models * Update view code modal to show MCP, Vector store and files information
1 parent 708a1e3 commit edac301

File tree

9 files changed

+131
-26
lines changed

9 files changed

+131
-26
lines changed

packages/gen-ai/frontend/src/__mocks__/mockVectorStores.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ export const mockVectorStores: VectorStore[] = [
2121
metadata: {
2222
description: 'Test vector store for unit tests',
2323
category: 'test',
24+
provider_id: 'test-provider-id',
2425
},
2526
},
2627
{
@@ -40,6 +41,7 @@ export const mockVectorStores: VectorStore[] = [
4041
},
4142
metadata: {
4243
description: 'Another test vector store',
44+
provider_id: 'test-provider-id',
4345
},
4446
},
4547
];

packages/gen-ai/frontend/src/app/AIAssets/components/AIModelTableRow.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@ const AIModelTableRow: React.FC<AIModelTableRowProps> = ({
7474
},
7575
})
7676
}
77+
isDisabled={model.status !== 'Running'}
7778
>
7879
Try in playground
7980
</Button>

packages/gen-ai/frontend/src/app/Chatbot/ChatbotPlayground.tsx

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import {
1313
import { useLocation } from 'react-router-dom';
1414
import { useUserContext } from '~/app/context/UserContext';
1515
import { ChatbotContext } from '~/app/context/ChatbotContext';
16+
import { getLlamaModelStatus } from '~/app/utilities';
1617
import { DEFAULT_SYSTEM_INSTRUCTIONS, FILE_UPLOAD_CONFIG, ERROR_MESSAGES } from './const';
1718
import { ChatbotSourceSettingsModal } from './sourceUpload/ChatbotSourceSettingsModal';
1819
import useSourceManagement from './hooks/useSourceManagement';
@@ -65,7 +66,12 @@ const ChatbotPlayground: React.FC<ChatbotPlaygroundProps> = ({
6566
// so that when refreshing the page, the selected model is not passed again
6667
window.history.replaceState({}, '');
6768
} else {
68-
setSelectedModel(models[0].id);
69+
const availableModels = models.filter(
70+
(model) => getLlamaModelStatus(model.id, aiModels) === 'Running',
71+
);
72+
if (availableModels.length > 0) {
73+
setSelectedModel(availableModels[0].id);
74+
}
6975
}
7076
}
7177
}, [modelsLoaded, models, selectedModel, setSelectedModel, aiModels, selectedAAModel]);
@@ -161,6 +167,7 @@ const ChatbotPlayground: React.FC<ChatbotPlaygroundProps> = ({
161167
input={lastInput}
162168
model={selectedModel}
163169
systemInstruction={systemInstruction}
170+
files={fileManagement.files}
164171
/>
165172
<Drawer isExpanded isInline position="right">
166173
<Divider />

packages/gen-ai/frontend/src/app/Chatbot/components/ModelDetailsDropdown.tsx

Lines changed: 30 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,14 @@ import {
33
Dropdown,
44
DropdownItem,
55
DropdownList,
6+
Icon,
67
MenuToggle,
78
MenuToggleElement,
9+
Tooltip,
810
} from '@patternfly/react-core';
11+
import { ExclamationCircleIcon } from '@patternfly/react-icons';
912
import { ChatbotContext } from '~/app/context/ChatbotContext';
10-
import { getLlamaModelDisplayName } from '~/app/utilities';
13+
import { getLlamaModelDisplayName, getLlamaModelStatus } from '~/app/utilities';
1114

1215
interface ModelDetailsDropdownProps {
1316
selectedModel: string;
@@ -26,6 +29,7 @@ const ModelDetailsDropdown: React.FunctionComponent<ModelDetailsDropdownProps> =
2629
setIsOpen(false);
2730
onModelChange(value);
2831
};
32+
2933
return (
3034
<Dropdown
3135
isOpen={isOpen}
@@ -53,11 +57,31 @@ const ModelDetailsDropdown: React.FunctionComponent<ModelDetailsDropdownProps> =
5357
shouldFocusToggleOnSelect
5458
>
5559
<DropdownList style={{ maxHeight: '300px', overflowY: 'auto' }}>
56-
{models.map((option) => (
57-
<DropdownItem value={option.id} key={option.id}>
58-
{getLlamaModelDisplayName(option.id, aiModels)}
59-
</DropdownItem>
60-
))}
60+
{models.map((option) => {
61+
const isDisabled = getLlamaModelStatus(option.id, aiModels) !== 'Running';
62+
return (
63+
<DropdownItem
64+
value={option.id}
65+
key={option.id}
66+
actions={
67+
isDisabled ? (
68+
<Tooltip content="This model is unavailable. Check the model's deployment status and resolve any issues. Update the playground's configuration to refresh the list.">
69+
<Icon
70+
status="danger"
71+
iconSize="md"
72+
style={{ marginRight: 'var(--pf-t--global--spacer--md)' }}
73+
>
74+
<ExclamationCircleIcon />
75+
</Icon>
76+
</Tooltip>
77+
) : null
78+
}
79+
isAriaDisabled={isDisabled}
80+
>
81+
{getLlamaModelDisplayName(option.id, aiModels)}
82+
</DropdownItem>
83+
);
84+
})}
6185
</DropdownList>
6286
</Dropdown>
6387
);

packages/gen-ai/frontend/src/app/Chatbot/components/ViewCodeModal.tsx

Lines changed: 45 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,21 @@ import {
1111
} from '@patternfly/react-core';
1212
import { CodeEditor, Language } from '@patternfly/react-code-editor';
1313
import { exportCode } from '~/app/services/llamaStackService';
14-
import { CodeExportRequest } from '~/app/types';
14+
import { CodeExportRequest, FileModel } from '~/app/types';
1515
import { GenAiContext } from '~/app/context/GenAiContext';
16+
import { useMCPServers } from '~/app/hooks/useMCPServers';
17+
import { useMCPTokenContext } from '~/app/context/MCPTokenContext';
18+
import { generateMCPServerConfig } from '~/app/utilities';
19+
import { useMCPSelectionContext } from '~/app/context/MCPSelectionContext';
20+
import useFetchVectorStores from '~/app/hooks/useFetchVectorStores';
1621

1722
interface ViewCodeModalProps {
1823
isOpen: boolean;
1924
onToggle: () => void;
2025
input: string;
2126
model: string;
2227
systemInstruction?: string;
28+
files: FileModel[];
2329
}
2430

2531
const ViewCodeModal: React.FunctionComponent<ViewCodeModalProps> = ({
@@ -28,11 +34,21 @@ const ViewCodeModal: React.FunctionComponent<ViewCodeModalProps> = ({
2834
input,
2935
model,
3036
systemInstruction,
37+
files,
3138
}) => {
3239
const [code, setCode] = React.useState<string>('');
3340
const [isLoading, setIsLoading] = React.useState<boolean>(false);
3441
const [error, setError] = React.useState<string>('');
3542
const { namespace } = React.useContext(GenAiContext);
43+
const { servers: mcpServers } = useMCPServers(namespace?.name || '');
44+
const { serverTokens } = useMCPTokenContext();
45+
const { playgroundSelectedServerIds } = useMCPSelectionContext();
46+
const [vectorStores, vectorStoresLoaded] = useFetchVectorStores(namespace?.name);
47+
48+
const mcpServersToUse = React.useMemo(
49+
() => mcpServers.filter((server) => playgroundSelectedServerIds.includes(server.url)),
50+
[mcpServers, playgroundSelectedServerIds],
51+
);
3652

3753
const handleExportCode = React.useCallback(async () => {
3854
setIsLoading(true);
@@ -45,13 +61,30 @@ const ViewCodeModal: React.FunctionComponent<ViewCodeModalProps> = ({
4561
return;
4662
}
4763

64+
if (!vectorStoresLoaded || vectorStores.length === 0) {
65+
setError('Vector stores not loaded');
66+
setIsLoading(false);
67+
return;
68+
}
69+
4870
try {
71+
/* eslint-disable camelcase */
4972
const request: CodeExportRequest = {
5073
input,
5174
model,
5275
instructions: systemInstruction,
5376
stream: false,
77+
mcp_servers: mcpServersToUse.map((server) => generateMCPServerConfig(server, serverTokens)),
78+
vector_store: {
79+
name: vectorStores[0].name,
80+
// TODO: Get embedding model and dimension from vector store, it's optional
81+
// embedding_model: 'all-minilm:l6-v2',
82+
// embedding_dimension: 768,
83+
provider_id: vectorStores[0].metadata.provider_id,
84+
},
85+
files: files.map((file) => ({ file: file.filename, purpose: file.purpose })),
5486
};
87+
/* eslint-enable camelcase */
5588

5689
const response = await exportCode(request, namespace.name);
5790
setCode(response.data.code);
@@ -60,7 +93,17 @@ const ViewCodeModal: React.FunctionComponent<ViewCodeModalProps> = ({
6093
} finally {
6194
setIsLoading(false);
6295
}
63-
}, [input, model, namespace?.name, systemInstruction]);
96+
}, [
97+
files,
98+
input,
99+
model,
100+
namespace?.name,
101+
systemInstruction,
102+
mcpServersToUse,
103+
serverTokens,
104+
vectorStores,
105+
vectorStoresLoaded,
106+
]);
64107

65108
React.useEffect(() => {
66109
if (isOpen) {

packages/gen-ai/frontend/src/app/Chatbot/components/chatbotConfiguration/ChatbotConfigurationModal.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ const ChatbotConfigurationModal: React.FC<ChatbotConfigurationModalProps> = ({
183183
{!configuringPlayground && (
184184
<ModalFooter>
185185
<DashboardModalFooter
186-
submitLabel={existingModels ? 'Update configuration' : 'Configure'}
186+
submitLabel={lsdStatus ? 'Update configuration' : 'Configure'}
187187
onSubmit={onSubmit}
188188
onCancel={() => onBeforeClose()}
189189
error={error}

packages/gen-ai/frontend/src/app/types.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ export type VectorStore = {
4141
/** Key-value metadata (max 16 pairs, keys ≤64 chars, values ≤512 chars) */
4242
metadata: {
4343
description?: string;
44+
provider_id: string;
4445
[key: string]: string | undefined;
4546
};
4647
/** Human-readable name (max 256 characters) */
@@ -226,6 +227,14 @@ export type CodeExportRequest = {
226227
stream?: boolean;
227228
temperature?: number;
228229
tools?: CodeExportTool[];
230+
mcp_servers?: MCPServerConfig[];
231+
vector_store?: {
232+
name: string;
233+
embedding_model?: string;
234+
embedding_dimension?: number;
235+
provider_id: string;
236+
};
237+
files?: { file: string; purpose: string }[];
229238
};
230239

231240
export type CodeExportData = {

packages/gen-ai/frontend/src/app/utilities/mcp.ts

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import {
88
MCPToolCallData,
99
} from '~/app/types';
1010
import { ServerStatusInfo } from '~/app/hooks/useMCPServers';
11+
import { generateMCPServerConfig } from './utils';
1112

1213
/**
1314
* Transform MCP server data from API to table format
@@ -189,21 +190,9 @@ export const getSelectedServersForAPI = (
189190
const isConnected = statusInfo?.status === 'connected' || tokenInfo?.authenticated === true;
190191

191192
if (isConnected && isValidated) {
192-
const serverTokenInfo = serverTokens.get(server.url);
193-
const headers: Record<string, string> = {};
194-
195-
if (serverTokenInfo?.token) {
196-
const raw = serverTokenInfo.token.trim();
197-
headers.Authorization = raw.toLowerCase().startsWith('bearer ') ? raw : `Bearer ${raw}`;
198-
}
199-
200-
validServers.push({
201-
// eslint-disable-next-line camelcase
202-
server_label: server.name,
203-
// eslint-disable-next-line camelcase
204-
server_url: server.url,
205-
headers,
206-
});
193+
const serverConfig = generateMCPServerConfig(server, serverTokens);
194+
195+
validServers.push(serverConfig);
207196
} else {
208197
excludedCount++;
209198
}

packages/gen-ai/frontend/src/app/utilities/utils.ts

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { K8sResourceCommon } from 'mod-arch-shared';
2-
import { AIModel } from '~/app/types';
2+
import { AIModel, TokenInfo, MCPServerFromAPI, MCPServerConfig } from '~/app/types';
33

44
export const getId = (): `${string}-${string}-${string}-${string}-${string}` => crypto.randomUUID();
55

@@ -30,3 +30,33 @@ export const getLlamaModelDisplayName = (modelId: string, aiModels: AIModel[]):
3030
}
3131
return `${providerId}/${enabledModel.display_name}`;
3232
};
33+
34+
export const getLlamaModelStatus = (
35+
modelId: string,
36+
aiModels: AIModel[],
37+
): AIModel['status'] | undefined => {
38+
const { id } = splitLlamaModelId(modelId);
39+
const enabledModel = aiModels.find((aiModel) => aiModel.model_id === id);
40+
return enabledModel?.status;
41+
};
42+
43+
export const generateMCPServerConfig = (
44+
server: MCPServerFromAPI,
45+
serverTokens: Map<string, TokenInfo>,
46+
): MCPServerConfig => {
47+
const serverTokenInfo = serverTokens.get(server.url);
48+
const headers: Record<string, string> = {};
49+
50+
if (serverTokenInfo?.token) {
51+
const raw = serverTokenInfo.token.trim();
52+
headers.Authorization = raw.toLowerCase().startsWith('bearer ') ? raw : `Bearer ${raw}`;
53+
}
54+
55+
/* eslint-disable camelcase */
56+
return {
57+
server_label: server.name,
58+
server_url: server.url,
59+
headers,
60+
};
61+
/* eslint-enable camelcase */
62+
};

0 commit comments

Comments
 (0)