Skip to content

Commit 22e574a

Browse files
authored
Merge pull request #248 from harmony-one/dev
Dev to Master Merge (9/05)
2 parents 0a61b5e + 22d7c0b commit 22e574a

File tree

19 files changed

+1002
-689
lines changed

19 files changed

+1002
-689
lines changed

src/bot.ts

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import {TranslateBot} from "./modules/translate/TranslateBot";
1+
import { TranslateBot } from "./modules/translate/TranslateBot";
22

33
require("events").EventEmitter.defaultMaxListeners = 30;
44
import express from "express";
@@ -98,8 +98,8 @@ function createInitialSessionData(): BotSessionData {
9898
},
9999
translate: {
100100
languages: [],
101-
enable: false
102-
}
101+
enable: false,
102+
},
103103
};
104104
}
105105

@@ -270,21 +270,23 @@ const onMessage = async (ctx: OnMessageContext) => {
270270
const price = translateBot.getEstimatedPrice(ctx);
271271
const isPaid = await payments.pay(ctx, price);
272272

273-
if(isPaid) {
274-
const response = await translateBot.onEvent(ctx, (reason?: string) => {
275-
payments.refundPayment(reason, ctx, price);
276-
}).catch((e) => {
277-
payments.refundPayment(e.message || "Unknown error", ctx, price);
278-
return {next: false};
279-
});
273+
if (isPaid) {
274+
const response = await translateBot
275+
.onEvent(ctx, (reason?: string) => {
276+
payments.refundPayment(reason, ctx, price);
277+
})
278+
.catch((e) => {
279+
payments.refundPayment(e.message || "Unknown error", ctx, price);
280+
return { next: false };
281+
});
280282

281283
if (!response.next) {
282284
return;
283285
}
284286
}
285287
}
286288

287-
if (openAiBot.isSupportedEvent(ctx)) {
289+
if (await openAiBot.isSupportedEvent(ctx)) {
288290
if (ctx.session.openAi.imageGen.isEnabled) {
289291
const price = openAiBot.getEstimatedPrice(ctx);
290292
const isPaid = await payments.pay(ctx, price!);
@@ -438,15 +440,15 @@ bot.command("love", (ctx) => {
438440
});
439441
});
440442

441-
bot.command('stop', (ctx) => {
443+
bot.command("stop", (ctx) => {
442444
logger.info("/stop command");
443445
ctx.session.openAi.chatGpt.chatConversation = [];
444446
ctx.session.openAi.chatGpt.usage = 0;
445-
ctx.session.openAi.chatGpt.price = 0;
447+
ctx.session.openAi.chatGpt.price = 0;
446448
ctx.session.translate.enable = false;
447-
ctx.session.translate.languages = []
448-
ctx.session.oneCountry.lastDomain = ""
449-
})
449+
ctx.session.translate.languages = [];
450+
ctx.session.oneCountry.lastDomain = "";
451+
});
450452
// bot.command("memo", (ctx) => {
451453
// ctx.reply(MEMO.text, {
452454
// parse_mode: "Markdown",

src/config.ts

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,6 @@ export default {
2626
? parseInt(process.env.SESSION_TIMEOUT)
2727
: 48, // in hours
2828
openAi: {
29-
maxTokens:
30-
(process.env.OPENAI_MAX_TOKENS &&
31-
parseInt(process.env.OPENAI_MAX_TOKENS)) ||
32-
800, // telegram messages has a char limit
3329
dalle: {
3430
isEnabled: Boolean(parseInt(process.env.IMAGE_GEN_ENABLED || "1")),
3531
telegramFileUrl: "https://api.telegram.org/file/bot",
@@ -48,6 +44,11 @@ export default {
4844
},
4945
},
5046
chatGpt: {
47+
maxTokens:
48+
(process.env.OPENAI_MAX_TOKENS &&
49+
parseInt(process.env.OPENAI_MAX_TOKENS)) ||
50+
800, // telegram messages has a char limit
51+
wordLimit: 50,
5152
wordCountBetween: process.env.WORD_COUNT_BETWEEN
5253
? parseInt(process.env.WORD_COUNT_BETWEEN)
5354
: 100,
@@ -64,13 +65,13 @@ export default {
6465
prefixes: {
6566
chatPrefix: process.env.ASK_PREFIX
6667
? process.env.ASK_PREFIX.split(",")
67-
: ["a.","?",">","."],
68+
: ["a.", "?", ">", "."],
6869
dallePrefix: process.env.DALLE_PREFIX
6970
? process.env.DALLE_PREFIX.split(",")
7071
: ["d."],
7172
newPrefix: process.env.NEW_PREFIX
7273
? process.env.NEW_PREFIX.split(",")
73-
: ["n."],
74+
: ["n.", ".."],
7475
},
7576
minimumBalance: process.env.MIN_BALANCE
7677
? parseInt(process.env.MIN_BALANCE)

src/database/stats.service.ts

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,12 @@ export interface BotPaymentLog {
1818
amountCredits: number
1919
}
2020

21+
export interface EngagementByCommand {
22+
command: string,
23+
commandCount: string,
24+
oneAmount: string,
25+
}
26+
2127
export class StatsService {
2228
public writeLog(log: BotPaymentLog) {
2329
let paymentLog = new BotLog()
@@ -87,6 +93,25 @@ export class StatsService {
8793
return rows.length ? +rows[0].count : 0
8894
}
8995

96+
public async getUserEngagementByCommand(daysPeriod = 7): Promise<EngagementByCommand[]> {
97+
const currentTime = moment();
98+
const dateStart = moment()
99+
.tz('America/Los_Angeles')
100+
.set({ hour: 0, minute: 0, second: 0 })
101+
.subtract(daysPeriod,'days')
102+
.unix()
103+
104+
const dateEnd = currentTime.unix();
105+
106+
const rows = await logRepository.createQueryBuilder('logs')
107+
.select('logs.command, count(logs.command) as "commandCount", SUM(logs.amountOne) as "oneAmount"')
108+
.where(`logs.createdAt BETWEEN TO_TIMESTAMP(${dateStart}) and TO_TIMESTAMP(${dateEnd})`)
109+
.groupBy('logs.command')
110+
.orderBy('"commandCount"', 'DESC').execute();
111+
112+
return rows;
113+
}
114+
90115
public addCommandStat({tgUserId, rawMessage, command}: {tgUserId: number, rawMessage: string, command: string}) {
91116
const stat = new StatBotCommand();
92117

src/modules/open-ai/api/openAi.ts

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ import {
1717
DalleGPTModel,
1818
DalleGPTModels,
1919
} from "../types";
20+
import { getMessageExtras } from "../helpers";
2021

2122
const openai = new OpenAI({
2223
apiKey: config.openAiKey,
@@ -98,7 +99,7 @@ export async function chatCompletion(
9899
try {
99100
const payload = {
100101
model: model,
101-
max_tokens: limitTokens ? config.openAi.maxTokens : undefined,
102+
max_tokens: limitTokens ? config.openAi.chatGpt.maxTokens : undefined,
102103
temperature: config.openAi.dalle.completions.temperature,
103104
messages: conversation,
104105
};
@@ -134,12 +135,15 @@ export const streamChatCompletion = async (
134135
const wordCountMinimum = config.openAi.chatGpt.wordCountBetween;
135136
return new Promise<string>(async (resolve, reject) => {
136137
try {
138+
// const extras = getMessageExtras({
139+
// topicId: ctx.message?.message_thread_id
140+
// })
137141
const stream = await openai.chat.completions.create({
138142
model: model,
139143
messages:
140144
conversation as OpenAI.Chat.Completions.CreateChatCompletionRequestMessage[],
141145
stream: true,
142-
max_tokens: limitTokens ? config.openAi.maxTokens : undefined,
146+
max_tokens: limitTokens ? config.openAi.chatGpt.maxTokens : undefined,
143147
temperature: config.openAi.dalle.completions.temperature,
144148
});
145149
let wordCount = 0;
@@ -157,6 +161,9 @@ export const streamChatCompletion = async (
157161
completion = completion.replaceAll("..", "");
158162
completion += "..";
159163
wordCount = 0;
164+
// const extras = getMessageExtras({
165+
// topicId: ctx.message?.message_thread_id
166+
// })
160167
await ctx.api
161168
.editMessageText(ctx.chat?.id!, msgId, completion)
162169
.catch(async (e: any) => {

src/modules/open-ai/controller/index.ts

Lines changed: 0 additions & 168 deletions
This file was deleted.

0 commit comments

Comments
 (0)