Skip to content

Commit fa56926

Browse files
hteeyeohCiprian Goea
andauthored
chatqna-core: Fix UI issues (open-edge-platform#147)
Signed-off-by: Yeoh, Hoong Tee <[email protected]> Co-authored-by: Ciprian Goea <[email protected]>
1 parent 3713036 commit fa56926

File tree

2 files changed

+33
-46
lines changed

2 files changed

+33
-46
lines changed

sample-applications/chat-question-and-answer-core/ui/src/redux/conversation/conversationSlice.ts

Lines changed: 24 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,10 @@ import {
2020
uuidv4,
2121
} from '../../utils/util.ts';
2222
import store, { RootState } from '../store.ts';
23-
import { NotificationSeverity } from '../../components/Notification/notify.ts';
24-
import { notify } from '../../components/Notification/notify.ts';
23+
import {
24+
NotificationSeverity,
25+
notify,
26+
} from '../../components/Notification/notify.ts';
2527

2628
const initialState: ConversationReducer = {
2729
conversations: [],
@@ -140,6 +142,18 @@ export const conversationSlice = createSlice({
140142
},
141143
});
142144

145+
const handleConnectionError = async (message?: string) => {
146+
const healthStatus = await checkHealth();
147+
148+
if (healthStatus.status === 503) {
149+
notify(
150+
message ||
151+
'The backend service is starting up. Please try again in a few moments.',
152+
NotificationSeverity.ERROR,
153+
);
154+
}
155+
};
156+
143157
export const fetchInitialFiles = createAsyncThunk(
144158
'conversation/fetchInitialFiles',
145159
async (_, { rejectWithValue }) => {
@@ -174,6 +188,8 @@ export const uploadFile = createAsyncThunk(
174188
const body = new FormData();
175189
body.append('files', file);
176190

191+
await handleConnectionError();
192+
177193
const response = await client.post(DATA_PREP_URL, body);
178194

179195
if (response.status === 200) {
@@ -211,6 +227,8 @@ export const removeFile = createAsyncThunk(
211227
throw new Error('File not found');
212228
}
213229

230+
await handleConnectionError();
231+
214232
const response = await client.delete(
215233
`${DATA_PREP_URL}?document=${encodeURIComponent(fileName)}&delete_all=${deleteAll}`,
216234
);
@@ -246,6 +264,8 @@ export const removeAllFiles = createAsyncThunk(
246264
throw new Error('No files to delete');
247265
}
248266

267+
await handleConnectionError();
268+
249269
const response = await client.delete(
250270
`${DATA_PREP_URL}?delete_all=${true}`,
251271
);
@@ -297,6 +317,8 @@ export const doConversation = (conversationRequest: ConversationRequest) => {
297317
store.dispatch(addMessageToMessages(userPrompt));
298318
}
299319

320+
handleConnectionError().catch(console.error);
321+
300322
let result: string = '';
301323
try {
302324
fetchEventSource(CHAT_QNA_URL, {
@@ -340,21 +362,6 @@ export const doConversation = (conversationRequest: ConversationRequest) => {
340362
store.dispatch(setIsGenerating(false));
341363
store.dispatch(setOnGoingResult(''));
342364

343-
(async () => {
344-
const healthStatus = await checkHealth();
345-
if (healthStatus.status !== 200) {
346-
notify(
347-
healthStatus.message ||
348-
'LLM model server is not ready to accept connections. Please try after a few minutes.',
349-
NotificationSeverity.ERROR,
350-
);
351-
} else {
352-
notify(
353-
err.message || 'Could not connect to backend at this moment',
354-
NotificationSeverity.ERROR,
355-
);
356-
}
357-
})();
358365
throw err;
359366
},
360367
onclose() {

sample-applications/chat-question-and-answer-core/ui/src/utils/util.ts

Lines changed: 9 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
// Copyright (C) 2025 Intel Corporation
22
// SPDX-License-Identifier: Apache-2.0
33

4-
import { HEALTH_CHECK_URL, MODEL_URL } from '../config';
4+
import { HEALTH_CHECK_URL, MODEL_URL } from '../config.ts';
55
import client from './client.ts';
66

77
export const getCurrentTimeStamp = () => {
@@ -76,39 +76,19 @@ export const capitalize = (input: string): string => {
7676
export const checkHealth = async () => {
7777
try {
7878
const response = await client.get(HEALTH_CHECK_URL);
79-
if (response.status === 200) {
80-
return { status: response.status };
81-
} else {
82-
return {
83-
status: response.status,
84-
message:
85-
'LLM model server is not ready to accept connections. Please try after a few minutes.',
86-
};
87-
}
79+
return { status: response.status };
8880
} catch (error) {
89-
return {
90-
status: 503,
91-
message:
92-
'LLM model server is not ready to accept connections. Please try after a few minutes.',
93-
};
81+
return { status: 503 };
9482
}
9583
};
9684

9785
export const fetchModelName = async () => {
9886
try {
99-
const response = await client.get(MODEL_URL);
100-
if (response.status === 200) {
101-
return { status: 200, llmModel: response.data.llm_model };
102-
} else {
103-
return {
104-
status: response.status,
105-
message: 'LLM Model is not set',
106-
};
107-
}
108-
} catch (error) {
109-
return {
110-
status: 503,
111-
message: 'LLM Model is not set',
112-
};
87+
const { status, data } = await client.get(MODEL_URL);
88+
return status === 200
89+
? { status, llmModel: data.llm_model }
90+
: { status, message: 'LLM Model is not set' };
91+
} catch {
92+
return { status: 503, message: 'LLM Model is not set' };
11393
}
11494
};

0 commit comments

Comments
 (0)