Skip to content

Commit b8dc3c7

Browse files
committed
fix(messagesStore): allow to fetch messages in both directions
Signed-off-by: Maksim Sukharev <[email protected]>
1 parent 62ecadd commit b8dc3c7

File tree

4 files changed

+96
-75
lines changed

4 files changed

+96
-75
lines changed

src/constants.ts

+2
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@ export const SESSION = {
2929
export const CHAT = {
3030
FETCH_LIMIT: 100,
3131
MINIMUM_VISIBLE: 5,
32+
FETCH_OLD: 0,
33+
FETCH_NEW: 1,
3234
} as const
3335

3436
export const CALL = {

src/services/messagesService.ts

+16-4
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import SHA256 from 'crypto-js/sha256.js'
99
import axios from '@nextcloud/axios'
1010
import { generateOcsUrl } from '@nextcloud/router'
1111

12+
import { CHAT } from '../constants.ts'
1213
import type {
1314
ChatMessage,
1415
clearHistoryResponse,
@@ -45,15 +46,22 @@ type EditMessagePayload = { token: string, messageId: number, updatedMessage: ed
4546
* @param data.token the conversation token;
4647
* @param data.lastKnownMessageId last known message id;
4748
* @param data.includeLastKnown whether to include the last known message in the response;
49+
* @param [data.lookIntoFuture=0] direction of message fetch
4850
* @param [data.limit=100] Number of messages to load
4951
* @param options options;
5052
*/
51-
const fetchMessages = async function({ token, lastKnownMessageId, includeLastKnown, limit = 100 }: ReceiveMessagesPayload, options?: object): receiveMessagesResponse {
53+
const fetchMessages = async function({
54+
token,
55+
lastKnownMessageId,
56+
includeLastKnown,
57+
lookIntoFuture = CHAT.FETCH_OLD,
58+
limit = 100,
59+
}: ReceiveMessagesPayload, options?: object): receiveMessagesResponse {
5260
return axios.get(generateOcsUrl('apps/spreed/api/v1/chat/{token}', { token }, options), {
5361
...options,
5462
params: {
5563
setReadMarker: 0,
56-
lookIntoFuture: 0,
64+
lookIntoFuture,
5765
lastKnownMessageId,
5866
limit,
5967
includeLastKnown: includeLastKnown ? 1 : 0,
@@ -71,12 +79,16 @@ const fetchMessages = async function({ token, lastKnownMessageId, includeLastKno
7179
* @param [data.limit=100] Number of messages to load
7280
* @param options options
7381
*/
74-
const pollNewMessages = async ({ token, lastKnownMessageId, limit = 100 }: ReceiveMessagesPayload, options?: object): receiveMessagesResponse => {
82+
const pollNewMessages = async ({
83+
token,
84+
lastKnownMessageId,
85+
limit = 100,
86+
}: ReceiveMessagesPayload, options?: object): receiveMessagesResponse => {
7587
return axios.get(generateOcsUrl('apps/spreed/api/v1/chat/{token}', { token }, options), {
7688
...options,
7789
params: {
7890
setReadMarker: 0,
79-
lookIntoFuture: 1,
91+
lookIntoFuture: CHAT.FETCH_NEW,
8092
lastKnownMessageId,
8193
limit,
8294
includeLastKnown: 0,

src/store/messagesStore.js

+22-6
Original file line numberDiff line numberDiff line change
@@ -856,8 +856,16 @@ const actions = {
856856
* @param {string} data.lastKnownMessageId last known message id;
857857
* @param {number} data.minimumVisible Minimum number of chat messages we want to load
858858
* @param {boolean} data.includeLastKnown whether to include the last known message in the response;
859+
* @param {number} [data.lookIntoFuture=0] direction of message fetch
859860
*/
860-
async fetchMessages(context, { token, lastKnownMessageId, includeLastKnown, requestOptions, minimumVisible }) {
861+
async fetchMessages(context, {
862+
token,
863+
lastKnownMessageId,
864+
includeLastKnown,
865+
requestOptions,
866+
minimumVisible,
867+
lookIntoFuture = CHAT.FETCH_OLD,
868+
}) {
861869
minimumVisible = (typeof minimumVisible === 'undefined') ? CHAT.MINIMUM_VISIBLE : minimumVisible
862870

863871
context.dispatch('cancelFetchMessages')
@@ -871,6 +879,7 @@ const actions = {
871879
token,
872880
lastKnownMessageId,
873881
includeLastKnown,
882+
lookIntoFuture,
874883
limit: CHAT.FETCH_LIMIT,
875884
}, requestOptions)
876885

@@ -904,10 +913,12 @@ const actions = {
904913
})
905914

906915
if (response.headers['x-chat-last-given']) {
907-
context.dispatch('setFirstKnownMessageId', {
908-
token,
909-
id: parseInt(response.headers['x-chat-last-given'], 10),
910-
})
916+
const id = parseInt(response.headers['x-chat-last-given'], 10)
917+
if (lookIntoFuture === CHAT.FETCH_NEW) {
918+
context.dispatch('setLastKnownMessageId', { token, id })
919+
} else {
920+
context.dispatch('setFirstKnownMessageId', { token, id })
921+
}
911922
}
912923

913924
// For guests we also need to set the last known message id
@@ -925,12 +936,16 @@ const actions = {
925936

926937
if (minimumVisible > 0) {
927938
debugTimer.tick(`${token} | fetch history`, 'first chunk')
939+
const lastKnownMessageId = lookIntoFuture === CHAT.FETCH_NEW
940+
? context.getters.getLastKnownMessageId(token)
941+
: context.getters.getFirstKnownMessageId(token)
928942
// There are not yet enough visible messages loaded, so fetch another chunk.
929943
// This can happen when a lot of reactions or poll votings happen
930944
return await context.dispatch('fetchMessages', {
931945
token,
932-
lastKnownMessageId: context.getters.getFirstKnownMessageId(token),
946+
lastKnownMessageId,
933947
includeLastKnown,
948+
lookIntoFuture,
934949
minimumVisible,
935950
})
936951
}
@@ -1022,6 +1037,7 @@ const actions = {
10221037
token,
10231038
lastKnownMessageId: context.getters.getFirstKnownMessageId(token),
10241039
includeLastKnown: false,
1040+
lookIntoFuture: CHAT.FETCH_OLD,
10251041
minimumVisible: minimumVisible * 2,
10261042
})
10271043
}

src/store/messagesStore.spec.js

+56-65
Original file line numberDiff line numberDiff line change
@@ -833,6 +833,30 @@ describe('messagesStore', () => {
833833
let addGuestNameAction
834834
let cancelFunctionMock
835835

836+
const oldMessagesList = [{
837+
id: 98,
838+
token: TOKEN,
839+
actorType: ATTENDEE.ACTOR_TYPE.USERS,
840+
}, {
841+
id: 99,
842+
token: TOKEN,
843+
actorType: ATTENDEE.ACTOR_TYPE.GUESTS,
844+
}]
845+
const originalMessagesList = [{
846+
id: 100,
847+
token: TOKEN,
848+
actorType: ATTENDEE.ACTOR_TYPE.USERS,
849+
}]
850+
const newMessagesList = [{
851+
id: 101,
852+
token: TOKEN,
853+
actorType: ATTENDEE.ACTOR_TYPE.USERS,
854+
}, {
855+
id: 102,
856+
token: TOKEN,
857+
actorType: ATTENDEE.ACTOR_TYPE.GUESTS,
858+
}]
859+
836860
beforeEach(() => {
837861
testStoreConfig = cloneDeep(messagesStore)
838862
const guestNameStore = useGuestNameStore()
@@ -851,79 +875,44 @@ describe('messagesStore', () => {
851875
})
852876

853877
store = new Vuex.Store(testStoreConfig)
854-
})
855-
856-
test('fetches messages from server including last known', async () => {
857-
const messages = [{
858-
id: 1,
859-
token: TOKEN,
860-
actorType: ATTENDEE.ACTOR_TYPE.USERS,
861-
}, {
862-
id: 2,
863-
token: TOKEN,
864-
actorType: ATTENDEE.ACTOR_TYPE.GUESTS,
865-
}]
866-
const response = generateOCSResponse({
867-
headers: {
868-
'x-chat-last-common-read': '123',
869-
'x-chat-last-given': '100',
870-
},
871-
payload: messages,
872-
})
873-
fetchMessages.mockResolvedValueOnce(response)
874-
875-
await store.dispatch('fetchMessages', {
876-
token: TOKEN,
877-
lastKnownMessageId: 100,
878-
includeLastKnown: true,
879-
requestOptions: {
880-
dummyOption: true,
881-
},
882-
minimumVisible: 0,
883-
})
884-
885-
expect(fetchMessages).toHaveBeenCalledWith({
886-
token: TOKEN,
887-
lastKnownMessageId: 100,
888-
includeLastKnown: true,
889-
limit: CHAT.FETCH_LIMIT,
890-
}, {
891-
dummyOption: true,
892-
})
893-
894-
expect(updateLastCommonReadMessageAction)
895-
.toHaveBeenCalledWith(expect.anything(), { token: TOKEN, lastCommonReadMessage: 123 })
896-
897-
expect(addGuestNameAction).toHaveBeenCalledWith(messages[1], { noUpdate: true })
898878

899-
expect(store.getters.messagesList(TOKEN)).toStrictEqual(messages)
900-
expect(store.getters.getFirstKnownMessageId(TOKEN)).toBe(100)
901-
expect(store.getters.getLastKnownMessageId(TOKEN)).toBe(2)
879+
for (const index in originalMessagesList) {
880+
store.commit('addMessage', { token: TOKEN, message: originalMessagesList[index] })
881+
if (index === '0') {
882+
store.dispatch('setFirstKnownMessageId', { token: TOKEN, id: originalMessagesList[index].id })
883+
}
884+
if (index === `${originalMessagesList.length - 1}`) {
885+
store.dispatch('setLastKnownMessageId', { token: TOKEN, id: originalMessagesList[index].id })
886+
}
887+
}
902888
})
903889

904-
test('fetches messages from server excluding last known', async () => {
905-
const messages = [{
906-
id: 1,
907-
token: TOKEN,
908-
actorType: ATTENDEE.ACTOR_TYPE.USERS,
909-
}, {
910-
id: 2,
911-
token: TOKEN,
912-
actorType: ATTENDEE.ACTOR_TYPE.GUESTS,
913-
}]
890+
const testCasesOld = [
891+
[true, CHAT.FETCH_OLD, [...oldMessagesList, originalMessagesList.at(0)].reverse(), 98, 100],
892+
[false, CHAT.FETCH_OLD, [...oldMessagesList].reverse(), 98, 100],
893+
[true, CHAT.FETCH_NEW, [originalMessagesList.at(-1), ...newMessagesList], 100, 102],
894+
[false, CHAT.FETCH_NEW, newMessagesList, 100, 102],
895+
]
896+
test.each(testCasesOld)('fetches messages from server: including last known - %s, look into future - %s', async (includeLastKnown, lookIntoFuture, payload, firstKnown, lastKnown) => {
897+
914898
const response = generateOCSResponse({
915899
headers: {
916900
'x-chat-last-common-read': '123',
917-
'x-chat-last-given': '100',
901+
'x-chat-last-given': payload.at(-1).id.toString(),
918902
},
919-
payload: messages,
903+
payload,
920904
})
921905
fetchMessages.mockResolvedValueOnce(response)
906+
const expectedMessages = lookIntoFuture
907+
? [originalMessagesList[0], ...newMessagesList]
908+
: [...oldMessagesList, originalMessagesList[0]]
909+
const expectedMessageFromGuest = expectedMessages.find(message => message.actorType === ATTENDEE.ACTOR_TYPE.GUESTS)
922910

923911
await store.dispatch('fetchMessages', {
924912
token: TOKEN,
925913
lastKnownMessageId: 100,
926-
includeLastKnown: false,
914+
includeLastKnown,
915+
lookIntoFuture,
927916
requestOptions: {
928917
dummyOption: true,
929918
},
@@ -933,7 +922,8 @@ describe('messagesStore', () => {
933922
expect(fetchMessages).toHaveBeenCalledWith({
934923
token: TOKEN,
935924
lastKnownMessageId: 100,
936-
includeLastKnown: false,
925+
includeLastKnown,
926+
lookIntoFuture,
937927
limit: CHAT.FETCH_LIMIT,
938928
}, {
939929
dummyOption: true,
@@ -942,11 +932,11 @@ describe('messagesStore', () => {
942932
expect(updateLastCommonReadMessageAction)
943933
.toHaveBeenCalledWith(expect.anything(), { token: TOKEN, lastCommonReadMessage: 123 })
944934

945-
expect(addGuestNameAction).toHaveBeenCalledWith(messages[1], { noUpdate: true })
935+
expect(addGuestNameAction).toHaveBeenCalledWith(expectedMessageFromGuest, { noUpdate: true })
946936

947-
expect(store.getters.messagesList(TOKEN)).toStrictEqual(messages)
948-
expect(store.getters.getFirstKnownMessageId(TOKEN)).toBe(100)
949-
expect(store.getters.getLastKnownMessageId(TOKEN)).toBe(null)
937+
expect(store.getters.messagesList(TOKEN)).toStrictEqual(expectedMessages)
938+
expect(store.getters.getFirstKnownMessageId(TOKEN)).toBe(firstKnown)
939+
expect(store.getters.getLastKnownMessageId(TOKEN)).toBe(lastKnown)
950940
})
951941

952942
test('cancels fetching messages', () => {
@@ -1111,6 +1101,7 @@ describe('messagesStore', () => {
11111101
token: TOKEN,
11121102
lastKnownMessageId: 3,
11131103
includeLastKnown: false,
1104+
lookIntoFuture: CHAT.FETCH_OLD,
11141105
limit: CHAT.FETCH_LIMIT,
11151106
}, undefined)
11161107

0 commit comments

Comments
 (0)