Skip to content

Commit e988b93

Browse files
committed
llm: updates, fixes, refactor, ...
1 parent 44ad667 commit e988b93

28 files changed

+803
-839
lines changed

src/.nvmrc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
lts/hydrogen
1+
20

src/packages/frontend/account/chatbot.ts

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,7 @@ When new models are added, e.g., Claude soon (!), they will go here.
88
*/
99

1010
import { redux } from "@cocalc/frontend/app-framework";
11-
import {
12-
getUserDefinedLLMByModel
13-
} from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";
11+
import { getUserDefinedLLMByModel } from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";
1412
import {
1513
LANGUAGE_MODELS,
1614
LANGUAGE_MODEL_PREFIXES,
@@ -19,11 +17,13 @@ import {
1917
fromCustomOpenAIModel,
2018
fromMistralService,
2119
fromOllamaModel,
20+
fromXaiService,
2221
isAnthropicService,
2322
isCustomOpenAI,
2423
isMistralService,
2524
isOllamaLLM,
26-
isUserDefinedModel
25+
isUserDefinedModel,
26+
isXaiService,
2727
} from "@cocalc/util/db-schema/llm-utils";
2828

2929
// we either check if the prefix is one of the known ones (used in some circumstances)
@@ -53,6 +53,9 @@ export function chatBotName(account_id?: string): string {
5353
if (isMistralService(account_id)) {
5454
return LLM_USERNAMES[fromMistralService(account_id)] ?? "Mistral";
5555
}
56+
if (isXaiService(account_id)) {
57+
return LLM_USERNAMES[fromXaiService(account_id)] ?? "xAI";
58+
}
5659
if (isAnthropicService(account_id)) {
5760
return LLM_USERNAMES[fromAnthropicService(account_id)] ?? "Anthropic";
5861
}

src/packages/frontend/account/user-defined-llm.tsx

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ import {
4141
toUserLLMModelName,
4242
} from "@cocalc/util/db-schema/llm-utils";
4343
import { trunc, unreachable } from "@cocalc/util/misc";
44-
import { Panel } from "../antd-bootstrap";
44+
import { Panel } from "@cocalc/frontend/antd-bootstrap";
4545

4646
// @cspell:ignore mixtral userdefined
4747

@@ -265,6 +265,8 @@ export function UserDefinedLLMComponent({ style, on_change }: Props) {
265265
return "'open-mixtral-8x22b'";
266266
case "google":
267267
return "'gemini-2.0-flash'";
268+
case "xai":
269+
return "'grok-4-1-fast-non-reasoning-16k'";
268270
default:
269271
unreachable(service);
270272
return "'llama3:latest'";

src/packages/frontend/components/language-model-icon.tsx

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ import GooglePalmLogo from "./google-palm-avatar";
1919
import MistralAvatar from "./mistral-avatar";
2020
import OllamaAvatar from "./ollama-avatar";
2121
import OpenAIAvatar from "./openai-avatar";
22+
import XAIAvatar from "./xai-avatar";
2223

2324
export function LanguageModelVendorAvatar(
2425
props: Readonly<{
@@ -101,6 +102,9 @@ export function LanguageModelVendorAvatar(
101102
case "anthropic":
102103
return <AnthropicAvatar size={size} style={style} />;
103104

105+
case "xai":
106+
return <XAIAvatar size={size} style={style} />;
107+
104108
case "user":
105109
// should never happen, because it is unpacked below
106110
return fallback();
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
import { CSSProperties } from "react";
2+
3+
import { COLORS } from "@cocalc/util/theme";
4+
5+
interface Props {
6+
size: number;
7+
backgroundColor?: string;
8+
iconColor?: string;
9+
style?: CSSProperties;
10+
innerStyle?: CSSProperties;
11+
}
12+
13+
export default function XAIAvatar({
14+
size,
15+
innerStyle,
16+
backgroundColor = "transparent",
17+
iconColor = COLORS.GRAY_DD,
18+
style,
19+
}: Props) {
20+
const topOffset = size / 4;
21+
return (
22+
<div
23+
style={{
24+
width: size,
25+
height: size,
26+
display: "inline-flex",
27+
alignItems: "center",
28+
justifyContent: "center",
29+
overflow: "hidden",
30+
...style,
31+
}}
32+
>
33+
<div
34+
style={{
35+
backgroundColor,
36+
color: iconColor,
37+
height: size,
38+
width: size,
39+
position: "relative",
40+
top: topOffset,
41+
display: "flex",
42+
alignItems: "center",
43+
justifyContent: "center",
44+
...innerStyle,
45+
}}
46+
>
47+
<svg
48+
width={size}
49+
height={size}
50+
viewBox="150 100 550 500"
51+
xmlns="http://www.w3.org/2000/svg"
52+
>
53+
<g fill="currentColor">
54+
<polygon points="557.09,211.99 565.4,538.36 631.96,538.36 640.28,93.18" />
55+
<polygon points="640.28,56.91 538.72,56.91 379.35,284.53 430.13,357.05" />
56+
<polygon points="201.61,538.36 303.17,538.36 353.96,465.84 303.17,393.31" />
57+
<polygon points="201.61,211.99 430.13,538.36 531.69,538.36 303.17,211.99" />
58+
</g>
59+
</svg>
60+
</div>
61+
</div>
62+
);
63+
}

src/packages/frontend/customize.tsx

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66
// Site Customize -- dynamically customize the look and configuration
77
// of CoCalc for the client.
88

9+
// cSpell:ignore TOSurl PAYGO nonfree tagmanager
10+
911
import { fromJS, List, Map } from "immutable";
1012
import { join } from "path";
1113
import { useIntl } from "react-intl";
@@ -107,6 +109,7 @@ export interface CustomizeState {
107109
google_vertexai_enabled: boolean;
108110
mistral_enabled: boolean;
109111
anthropic_enabled: boolean;
112+
xai_enabled: boolean;
110113
ollama_enabled: boolean;
111114
custom_openai_enabled: boolean;
112115
neural_search_enabled: boolean;
@@ -215,6 +218,7 @@ export class CustomizeStore extends Store<CustomizeState> {
215218
custom_openai: this.get("custom_openai_enabled"),
216219
mistralai: this.get("mistral_enabled"),
217220
anthropic: this.get("anthropic_enabled"),
221+
xai: this.get("xai_enabled"),
218222
user: this.get("user_defined_llm"),
219223
};
220224
}
@@ -261,7 +265,7 @@ export class CustomizeActions extends Actions<CustomizeState> {
261265
unlicensed_project_timetravel_limit: undefined,
262266
});
263267
};
264-
268+
265269
reload = async () => {
266270
await loadCustomizeState();
267271
};

src/packages/frontend/editors/markdown-input/mentionable-users.tsx

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ import GoogleGeminiLogo from "@cocalc/frontend/components/google-gemini-avatar";
1414
import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-model-icon";
1515
import MistralAvatar from "@cocalc/frontend/components/mistral-avatar";
1616
import OpenAIAvatar from "@cocalc/frontend/components/openai-avatar";
17+
import XAIAvatar from "@cocalc/frontend/components/xai-avatar";
1718
import { LLMModelPrice } from "@cocalc/frontend/frame-editors/llm/llm-selector";
1819
import { useUserDefinedLLM } from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";
1920
import { useProjectContext } from "@cocalc/frontend/project/context";
@@ -37,6 +38,7 @@ import {
3738
toCustomOpenAIModel,
3839
toOllamaModel,
3940
toUserLLMModelName,
41+
XAI_MODELS,
4042
} from "@cocalc/util/db-schema/llm-utils";
4143
import { cmp, timestamp_cmp, trunc_middle } from "@cocalc/util/misc";
4244
import { CustomLLMPublic } from "@cocalc/util/types/llm";
@@ -204,6 +206,32 @@ function mentionableUsers({
204206
}
205207
}
206208

209+
if (enabledLLMs.xai) {
210+
for (const m of XAI_MODELS) {
211+
if (!selectableLLMs.includes(m)) continue;
212+
const show_llm_main_menu = m === model;
213+
const size = show_llm_main_menu ? avatarUserSize : avatarLLMSize;
214+
const name = LLM_USERNAMES[m] ?? m;
215+
const vendor = model2vendor(m);
216+
const search_term =
217+
`${vendor.name}${m.replace(/-/g, "")}${name.replace(/ /g, "")}`.toLowerCase();
218+
if (!search || search_term.includes(search)) {
219+
mentions.push({
220+
value: model2service(m),
221+
label: (
222+
<LLMTooltip model={m}>
223+
<XAIAvatar size={size} /> {name}{" "}
224+
<LLMModelPrice model={m} floatRight />
225+
</LLMTooltip>
226+
),
227+
search: search_term,
228+
is_llm: true,
229+
show_llm_main_menu,
230+
});
231+
}
232+
}
233+
}
234+
207235
if (enabledLLMs.mistralai) {
208236
for (const m of MISTRAL_MODELS) {
209237
if (!selectableLLMs.includes(m)) continue;

src/packages/frontend/project/context.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ export const emptyProjectContext = {
7272
mistralai: false,
7373
anthropic: false,
7474
custom_openai: false,
75+
xai: false,
7576
user: false,
7677
},
7778
flipTabs: [0, () => {}],

src/packages/next/components/openai/vendor-status-check.tsx

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,13 @@ export function LLMServiceStatusCheck({
6565
</>
6666
);
6767

68+
case "xai":
69+
return (
70+
<>
71+
xAI <A href="https://status.x.ai/">status</A>.
72+
</>
73+
);
74+
6875
case "user":
6976
return <>{getLLMServiceStatusCheckMD("user")}</>;
7077

src/packages/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
"undici@<5.28.3": "^5.28.4",
2424
"postcss@<8.4.31": "^8.4.31",
2525
"retry-request@<7.0.1": "^7.0.2",
26-
"@langchain/core": "^0.3.77",
27-
"langchain": "^0.3.34",
26+
"@langchain/core": "^1.1.6",
27+
"langchain": "^1.2.1",
2828
"katex@<0.16.9": "^0.16.10",
2929
"nanoid@<3.3.8": "^3.3.8",
3030
"node-forge@<=1.3.1": "^1.3.2",

0 commit comments

Comments
 (0)