Skip to content
This repository was archived by the owner on Nov 24, 2025. It is now read-only.

Commit b8e7809

Browse files
rjmacarthyton-An
andauthored
development > main (#476)
* fix hung autocomplete (#475) * fixes and improvements for embedding (#467) * add p-queue as dependency * fix embedding network overload + wrong root path when getting file paths * fix: invalid db table name * fix embedding of whole document being canceled when chunk is duplicate * add better progress indicator using a queue + fix embedding cancel button not working * fix file name undefined warning lint * fix duplicate item canceling promise early and causing incorrect progress reporting Note: previously attempted to fix with continue (was undone by the most recent merge), but using break is the correct solution. * remove duplicate embedding queue and increased concurrency * remove duplicate import * remove console log statements * improve embedding progress report * remove console log statement * add unbreakable space and increase amount of files shown in embedding progress report * add space between embedding progress percentage and file paths * decrease amount of files shown in embedding progress slightly decreased * fix array overflow + db being overwritten in workspace with multiple folders * lint * 3.23.11 --------- Co-authored-by: Anton <anton@antons-webfabrik.eu> Co-authored-by: Anton <ton-An@outlook.de>
1 parent 1a64cef commit b8e7809

File tree

9 files changed

+248
-157
lines changed

9 files changed

+248
-157
lines changed

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "twinny",
33
"displayName": "twinny - AI Code Completion and Chat",
44
"description": "Locally hosted AI code completion plugin for vscode",
5-
"version": "3.23.10",
5+
"version": "3.23.11",
66
"icon": "assets/icon.png",
77
"keywords": [
88
"code-inference",

src/extension/chat.ts

Lines changed: 74 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ import { FileTreeProvider } from "./tree"
6060
import {
6161
getIsOpenAICompatible,
6262
getLanguage,
63+
sanitizeWorkspaceName,
6364
updateLoadingMessage
6465
} from "./utils"
6566

@@ -84,6 +85,7 @@ export class Chat extends Base {
8485
private _webView?: Webview
8586
private _isCancelled = false
8687
private _fileHandler: FileHandler
88+
private _workspaceName = sanitizeWorkspaceName(workspace.name)
8789

8890
constructor(
8991
statusBar: StatusBarItem,
@@ -124,9 +126,9 @@ export class Chat extends Base {
124126
private async getRelevantFiles(
125127
text: string | undefined
126128
): Promise<[string, number][]> {
127-
if (!this._db || !text || !workspace.name) return []
129+
if (!this._db || !text || !this._workspaceName) return []
128130

129-
const table = `${workspace.name}-file-paths`
131+
const table = `${this._workspaceName}-file-paths`
130132
if (await this._db.hasEmbeddingTable(table)) {
131133
const embedding = await this._db.fetchModelEmbedding(text)
132134
if (!embedding) return []
@@ -164,7 +166,8 @@ export class Chat extends Base {
164166
text: string | undefined,
165167
filePaths: string[] | undefined
166168
) {
167-
if (!this._db || !text || !workspace.name || !filePaths?.length) return []
169+
if (!this._db || !text || !this._workspaceName || !filePaths?.length)
170+
return []
168171

169172
const rerankThreshold = this.getRerankThreshold()
170173
logger.log(`Reranking threshold: ${rerankThreshold}`)
@@ -197,9 +200,9 @@ export class Chat extends Base {
197200
text: string | undefined,
198201
relevantFiles: [string, number][]
199202
): Promise<string> {
200-
if (!this._db || !text || !workspace.name) return ""
203+
if (!this._db || !text || !this._workspaceName) return ""
201204

202-
const table = `${workspace.name}-documents`
205+
const table = `${this._workspaceName}-documents`
203206
const rerankThreshold = this.getRerankThreshold()
204207

205208
if (await this._db.hasEmbeddingTable(table)) {
@@ -369,13 +372,15 @@ export class Chat extends Base {
369372
if (!this._tokenJs || this._isCancelled) return
370373

371374
try {
372-
logger.log(`Chat completion request: ${JSON.stringify({
373-
model: requestBody.model,
374-
messages: requestBody.messages,
375-
stream: true,
376-
temperature: requestBody.temperature,
377-
max_tokens: requestBody.max_tokens
378-
})}`)
375+
logger.log(
376+
`Chat completion request: ${JSON.stringify({
377+
model: requestBody.model,
378+
messages: requestBody.messages,
379+
stream: true,
380+
temperature: requestBody.temperature,
381+
max_tokens: requestBody.max_tokens
382+
})}`
383+
)
379384

380385
const result = await this._tokenJs.chat.completions.create(requestBody)
381386

@@ -388,22 +393,28 @@ export class Chat extends Base {
388393
}
389394

390395
const timestamp = Math.floor(Date.now() / 1000)
391-
const responseId = `chatcmpl-${timestamp}-${Math.random().toString(36).substring(2, 10)}`
392-
393-
logger.log(`Chat completion response: ${JSON.stringify({
394-
id: responseId,
395-
object: "chat.completion",
396-
created: timestamp,
397-
model: requestBody.model || "unknown",
398-
choices: [{
399-
index: 0,
400-
message: {
401-
role: "assistant",
402-
content: this._completion.trim()
403-
},
404-
finish_reason: "stop"
405-
}]
406-
})}`)
396+
const responseId = `chatcmpl-${timestamp}-${Math.random()
397+
.toString(36)
398+
.substring(2, 10)}`
399+
400+
logger.log(
401+
`Chat completion response: ${JSON.stringify({
402+
id: responseId,
403+
object: "chat.completion",
404+
created: timestamp,
405+
model: requestBody.model || "unknown",
406+
choices: [
407+
{
408+
index: 0,
409+
message: {
410+
role: "assistant",
411+
content: this._completion.trim()
412+
},
413+
finish_reason: "stop"
414+
}
415+
]
416+
})}`
417+
)
407418

408419
await this._webView?.postMessage({
409420
type: EVENT_NAME.twinnyAddMessage,
@@ -552,7 +563,6 @@ export class Chat extends Base {
552563
messageContent: string,
553564
filePaths?: FileContextItem[]
554565
): Promise<string> {
555-
556566
const editor = window.activeTextEditor
557567
const userSelection = editor?.document.getText(editor.selection)
558568

@@ -606,52 +616,57 @@ export class Chat extends Base {
606616
conversation.push({
607617
role: USER,
608618
content: `${lastMessage.content}\n\n${additionalContext.trim()}`.trim(),
609-
images: lastMessage.images,
619+
images: lastMessage.images
610620
})
611621

612622
return conversation.map((message) => {
613-
const role = message.role;
623+
const role = message.role
614624
const $ = cheerio.load(message.content as string)
615625
$("img").remove()
616626

617-
const text = $.html("body").replace(/&lt;/g, "<")
627+
const text = $.html("body")
628+
.replace(/&lt;/g, "<")
618629
.replace(/<body>|<\/body>/g, "")
619-
.replace(/@problems/g, "").trim()
620-
.replace(/@workspace/g, "").trim()
630+
.replace(/@problems/g, "")
631+
.trim()
632+
.replace(/@workspace/g, "")
633+
.trim()
621634
.replace(/&amp;/g, "&")
622635
.replace(/&gt;/g, ">")
623636
.replace(/<span[^>]*data-type="mention"[^>]*>(.*?)<\/span>/g, "$1")
624637
.trimStart()
625638

626-
const images = message.images?.map((img) => ({
627-
type: "image_url" as const,
628-
image_url: { url: typeof img === "string" ? img : img.data }
629-
})) || [];
639+
const images =
640+
message.images?.map((img) => ({
641+
type: "image_url" as const,
642+
image_url: { url: typeof img === "string" ? img : img.data }
643+
})) || []
630644

631-
const textPart = { type: "text" as const, text: images.length ? text : message.content };
632-
const contentParts = images.length > 0
633-
? [textPart, ...images]
634-
: [textPart];
645+
const textPart = {
646+
type: "text" as const,
647+
text: images.length ? text : message.content
648+
}
649+
const contentParts =
650+
images.length > 0 ? [textPart, ...images] : [textPart]
635651

636652
// eslint-disable-next-line @typescript-eslint/no-explicit-any
637653
const result: any = {
638654
role,
639-
content: contentParts,
640-
};
655+
content: contentParts
656+
}
641657

642658
if (role === "function" && message.name) {
643-
result.name = message.name;
659+
result.name = message.name
644660
}
645661

646662
if (message.id) {
647-
result.id = message.id;
663+
result.id = message.id
648664
}
649665

650-
return result as ChatCompletionMessage;
651-
});
666+
return result as ChatCompletionMessage
667+
})
652668
}
653669

654-
655670
private shouldUseStreaming(provider: TwinnyProvider): boolean {
656671
const supportsStreaming =
657672
models[provider?.provider as keyof typeof models]?.supportsStreaming
@@ -670,7 +685,7 @@ export class Chat extends Base {
670685
stream: true as const,
671686
id: conversationId,
672687
// eslint-disable-next-line @typescript-eslint/no-explicit-any
673-
provider: this.getProviderType(provider) as any,
688+
provider: this.getProviderType(provider) as any
674689
}
675690

676691
if (provider.provider !== API_PROVIDERS.Twinny) {
@@ -687,7 +702,7 @@ export class Chat extends Base {
687702
messages: this._conversation.filter((m) => m.role !== "system"),
688703
model: provider.modelName,
689704
// eslint-disable-next-line @typescript-eslint/no-explicit-any
690-
provider: this.getProviderType(provider) as any,
705+
provider: this.getProviderType(provider) as any
691706
}
692707
}
693708

@@ -703,7 +718,7 @@ export class Chat extends Base {
703718

704719
public async getTemplateMessages(
705720
template: string,
706-
context?: string,
721+
context?: string
707722
): Promise<ChatCompletionMessage[]> {
708723
this._statusBar.text = "$(loading~spin)"
709724
const { language } = getLanguage()
@@ -722,10 +737,12 @@ export class Chat extends Base {
722737
type: EVENT_NAME.twinnyAddMessage,
723738
data: {
724739
role: USER,
725-
content: `${kebabToSentence(template)}\n\n\n<pre><code>${selection}</code></pre>`.trim() || " "
740+
content:
741+
`${kebabToSentence(
742+
template
743+
)}\n\n\n<pre><code>${selection}</code></pre>`.trim() || " "
726744
}
727-
} as ServerMessage<ChatCompletionMessage>);
728-
745+
} as ServerMessage<ChatCompletionMessage>)
729746

730747
let ragContext = undefined
731748
if (["explain"].includes(template)) {
@@ -747,7 +764,6 @@ export class Chat extends Base {
747764
return this._conversation
748765
}
749766

750-
751767
public async completion(
752768
messages: ChatCompletionMessage[],
753769
fileContexts?: FileContextItem[],
@@ -776,15 +792,9 @@ export class Chat extends Base {
776792
: this.llmNoStream(this.getNoStreamOptions(provider))
777793
}
778794

779-
public async templateCompletion(
780-
promptTemplate: string,
781-
context?: string,
782-
) {
795+
public async templateCompletion(promptTemplate: string, context?: string) {
783796
this._isCancelled = false
784-
this._conversation = await this.getTemplateMessages(
785-
promptTemplate,
786-
context,
787-
)
797+
this._conversation = await this.getTemplateMessages(promptTemplate, context)
788798
const provider = this.getProvider()
789799
if (!provider) return []
790800

0 commit comments

Comments
 (0)