Skip to content

Commit 36a20e7

Browse files
committed
update sliding window logic to 3AM daily message reset
1 parent 5fc762c commit 36a20e7

File tree

5 files changed

+160
-31
lines changed

5 files changed

+160
-31
lines changed

src/bot.ts

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -498,6 +498,19 @@ const logErrorHandler = (ex: any): void => {
498498
logger.error(ex)
499499
}
500500

501+
// bot.command('testcleanup', async (ctx) => {
502+
// await openAiBot.testCleanup(ctx as OnMessageContext)
503+
// })
504+
505+
bot.command('new', async (ctx) => {
506+
writeCommandLog(ctx as OnMessageContext).catch(logErrorHandler)
507+
await openAiBot.onStop(ctx as OnMessageContext)
508+
return await ctx.reply('Chat history reseted', {
509+
parse_mode: 'Markdown',
510+
message_thread_id: ctx.message?.message_thread_id
511+
})
512+
})
513+
501514
bot.command('more', async (ctx) => {
502515
writeCommandLog(ctx as OnMessageContext).catch(logErrorHandler)
503516
return await ctx.reply(commandsHelpText.more, {

src/helpers.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import config from './config'
2+
import { conversationManager } from './modules/llms/utils/conversationManager'
23
import { LlmModelsEnum } from './modules/llms/utils/llmModelsManager'
34
import { type DalleImageSize } from './modules/llms/utils/types'
45
import { type BotSessionData } from './modules/types'
@@ -26,7 +27,8 @@ export function createInitialSessionData (): BotSessionData {
2627
price: 0,
2728
usage: 0,
2829
isProcessingQueue: false,
29-
requestQueue: []
30+
requestQueue: [],
31+
cleanupState: conversationManager.initializeCleanupTimes()
3032
},
3133
chatGpt: {
3234
model: config.llms.model,
@@ -36,7 +38,8 @@ export function createInitialSessionData (): BotSessionData {
3638
price: 0,
3739
usage: 0,
3840
isProcessingQueue: false,
39-
requestQueue: []
41+
requestQueue: [],
42+
cleanupState: conversationManager.initializeCleanupTimes()
4043
},
4144
dalle: {
4245
numImages: config.openAi.dalle.sessionDefault.numImages,

src/modules/llms/llmsBase.ts

Lines changed: 44 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,8 +233,7 @@ export abstract class LlmsBase implements PayableBot {
233233

234234
async onChatRequestHandler (ctx: OnMessageContext | OnCallBackQueryData, stream: boolean, usesTools: boolean): Promise<void> {
235235
const session = this.getSession(ctx)
236-
session.chatConversation = conversationManager.manageConversationWindow(session.chatConversation)
237-
236+
session.chatConversation = conversationManager.manageConversationWindow(session.chatConversation, ctx, this.sessionDataKey)
238237
while (session.requestQueue.length > 0) {
239238
try {
240239
const msg = session.requestQueue.shift()
@@ -478,6 +477,49 @@ export abstract class LlmsBase implements PayableBot {
478477
session.price = 0
479478
}
480479

480+
async testCleanup (ctx: OnMessageContext | OnCallBackQueryData): Promise<void> {
481+
const session = this.getSession(ctx)
482+
// Force cleanup times for testing
483+
const now = new Date()
484+
const forcedCleanupTime = new Date(now)
485+
forcedCleanupTime.setHours(2, 59, 0, 0) // Set to 2:59 AM
486+
session.cleanupState = {
487+
nextCleanupTime: forcedCleanupTime.getTime() + (60 * 1000), // 3 AM
488+
lastCleanupTime: forcedCleanupTime.getTime() - (24 * 60 * 60 * 1000) // Yesterday 2:59 AM
489+
}
490+
console.log('Testing cleanup with forced times:', {
491+
nextCleanup: new Date(session.cleanupState.nextCleanupTime).toLocaleString(),
492+
lastCleanup: new Date(session.cleanupState.lastCleanupTime).toLocaleString(),
493+
currentTime: now.toLocaleString()
494+
})
495+
// Add some test messages with various timestamps
496+
if (session.chatConversation.length === 0) {
497+
const yesterday = new Date(now)
498+
yesterday.setDate(yesterday.getDate() - 1)
499+
session.chatConversation = [
500+
{
501+
role: 'user',
502+
content: 'Message from 2 days ago',
503+
model: 'test',
504+
timestamp: yesterday.getTime() - (24 * 60 * 60 * 1000)
505+
},
506+
{
507+
role: 'assistant',
508+
content: 'Message from yesterday',
509+
model: 'test',
510+
timestamp: yesterday.getTime()
511+
},
512+
{
513+
role: 'user',
514+
content: 'Message from today',
515+
model: 'test',
516+
timestamp: now.getTime()
517+
}
518+
]
519+
}
520+
await this.onChatRequestHandler(ctx, false, false)
521+
}
522+
481523
async onError (
482524
ctx: OnMessageContext | OnCallBackQueryData,
483525
e: any,
Lines changed: 92 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,41 +1,106 @@
1-
import { type VisionContent, type ChatConversation } from '../../types'
2-
3-
const MINUTE_IN_MS = 60000 // 1 minute in milliseconds
4-
const INACTIVE_THRESHOLD = 5 * MINUTE_IN_MS // 5 minutes
5-
const IDLE_THRESHOLD = MINUTE_IN_MS // 1 minute
6-
const IDLE_MESSAGE_LIMIT = 5
7-
8-
// const HOUR_IN_MS = 3600000 // 1 hour in milliseconds
9-
// const INACTIVE_THRESHOLD = 12 * HOUR_IN_MS // 12 hours
10-
// const IDLE_THRESHOLD = HOUR_IN_MS // 1 hour
11-
// const IDLE_MESSAGE_LIMIT = 5
12-
13-
// Utility functions
14-
export const conversationManager = {
15-
manageConversationWindow (conversation: ChatConversation[]): ChatConversation[] {
16-
console.log('fco::::::: here', conversation.length)
17-
if (conversation.length === 0) return conversation
1+
import {
2+
type VisionContent,
3+
type ChatConversation,
4+
type OnMessageContext,
5+
type OnCallBackQueryData,
6+
type ConversationManagerState,
7+
type BotSessionData,
8+
type LlmsSessionData,
9+
type ImageGenSessionData
10+
} from '../../types'
11+
12+
// Constants for time calculations
13+
const MS_PER_DAY = 24 * 60 * 60 * 1000
14+
const CLEANUP_HOUR = 3 // 3 AM cleanup time
15+
16+
const getSession = (ctx: OnMessageContext | OnCallBackQueryData, sessionDataKey: string):
17+
LlmsSessionData & ImageGenSessionData => {
18+
return ctx.session[sessionDataKey as keyof BotSessionData] as LlmsSessionData & ImageGenSessionData
19+
}
20+
21+
const conversationManager = {
22+
/**
23+
* Initialize or update cleanup timestamps
24+
*/
25+
initializeCleanupTimes (): ConversationManagerState {
26+
const now = new Date()
27+
const today3AM = new Date(now)
28+
today3AM.setHours(CLEANUP_HOUR, 0, 0, 0)
29+
30+
if (now.getTime() >= today3AM.getTime()) {
31+
// If current time is past 3 AM, set next cleanup to tomorrow 3 AM
32+
return {
33+
nextCleanupTime: today3AM.getTime() + MS_PER_DAY,
34+
lastCleanupTime: today3AM.getTime()
35+
}
36+
} else {
37+
// If current time is before 3 AM, set next cleanup to today 3 AM
38+
return {
39+
nextCleanupTime: today3AM.getTime(),
40+
lastCleanupTime: today3AM.getTime() - MS_PER_DAY
41+
}
42+
}
43+
},
44+
45+
/**
46+
* Check if cleanup is needed based on context
47+
*/
48+
needsCleanup (ctx: OnMessageContext | OnCallBackQueryData, sessionDataKey: string): boolean {
1849
const now = Date.now()
19-
const lastMessageTime = conversation[conversation.length - 1].timestamp
20-
const timeDifference = now - lastMessageTime
21-
// Case 1: Inactive conversation (>12 hours) - Reset
22-
if (timeDifference > INACTIVE_THRESHOLD) {
23-
return []
50+
const session = getSession(ctx, sessionDataKey)
51+
52+
// Initialize times if not set
53+
if (!session.cleanupState || session.cleanupState.nextCleanupTime === 0) {
54+
session.cleanupState = this.initializeCleanupTimes()
2455
}
2556

26-
// Case 2: Idle conversation (>1 hour) - Keep last 5 messages
27-
if (timeDifference > IDLE_THRESHOLD) {
28-
return conversation.slice(-IDLE_MESSAGE_LIMIT)
57+
// Check if we've passed the next cleanup time
58+
if (now >= session.cleanupState.nextCleanupTime) {
59+
// Update cleanup times in session
60+
session.cleanupState = {
61+
lastCleanupTime: session.cleanupState.nextCleanupTime,
62+
nextCleanupTime: session.cleanupState.nextCleanupTime + MS_PER_DAY
63+
}
64+
return true
65+
}
66+
67+
return false
68+
},
69+
70+
/**
71+
* Manage conversation window with context-aware cleanup
72+
*/
73+
manageConversationWindow (conversation: ChatConversation[], ctx: OnMessageContext | OnCallBackQueryData, sessionDataKey: string): ChatConversation[] {
74+
if (conversation.length === 0) return conversation
75+
76+
// Only perform cleanup if needed
77+
if (this.needsCleanup(ctx, sessionDataKey)) {
78+
const session = getSession(ctx, sessionDataKey)
79+
return conversation.filter(msg => msg.timestamp >= session.cleanupState.lastCleanupTime)
2980
}
3081

31-
// Case 3: Active conversation (<1 hour) - Keep full history
3282
return conversation
3383
},
3484

35-
addMessageWithTimestamp (message: Omit<ChatConversation, 'timestamp'> | Partial<Omit<ChatConversation, 'content' | 'timestamp'>> & { content: string | VisionContent[] }): ChatConversation {
85+
/**
86+
* Add a new message to the conversation with current timestamp
87+
*/
88+
addMessageWithTimestamp (
89+
message: Omit<ChatConversation, 'timestamp'> |
90+
Partial<Omit<ChatConversation, 'content' | 'timestamp'>> &
91+
{ content: string | VisionContent[] },
92+
ctx: OnMessageContext | OnCallBackQueryData
93+
): ChatConversation {
94+
// Initialize times if not set
95+
if (!ctx.session.llms.cleanupState || ctx.session.llms.cleanupState.nextCleanupTime === 0) {
96+
ctx.session.llms.cleanupState = this.initializeCleanupTimes()
97+
}
98+
3699
return {
37100
...message,
38101
timestamp: Date.now()
39102
}
40103
}
41104
}
105+
106+
export { conversationManager }

src/modules/types.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,11 @@ export interface promptRequest {
8585
commandPrefix?: string
8686
}
8787

88+
export interface ConversationManagerState {
89+
lastCleanupTime: number
90+
nextCleanupTime: number
91+
}
92+
8893
export interface LlmsSessionData {
8994
model: string
9095
isEnabled: boolean
@@ -94,6 +99,7 @@ export interface LlmsSessionData {
9499
price: number
95100
requestQueue: ChatConversation[]
96101
isProcessingQueue: boolean
102+
cleanupState: ConversationManagerState
97103
}
98104

99105
export interface OneCountryData {

0 commit comments

Comments
 (0)