Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/.nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
lts/hydrogen
20
11 changes: 7 additions & 4 deletions src/packages/frontend/account/chatbot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@ When new models are added, e.g., Claude soon (!), they will go here.
*/

import { redux } from "@cocalc/frontend/app-framework";
import {
getUserDefinedLLMByModel
} from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";
import { getUserDefinedLLMByModel } from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";
import {
LANGUAGE_MODELS,
LANGUAGE_MODEL_PREFIXES,
Expand All @@ -19,11 +17,13 @@ import {
fromCustomOpenAIModel,
fromMistralService,
fromOllamaModel,
fromXaiService,
isAnthropicService,
isCustomOpenAI,
isMistralService,
isOllamaLLM,
isUserDefinedModel
isUserDefinedModel,
isXaiService,
} from "@cocalc/util/db-schema/llm-utils";

// we either check if the prefix is one of the known ones (used in some circumstances)
Expand Down Expand Up @@ -53,6 +53,9 @@ export function chatBotName(account_id?: string): string {
if (isMistralService(account_id)) {
return LLM_USERNAMES[fromMistralService(account_id)] ?? "Mistral";
}
if (isXaiService(account_id)) {
return LLM_USERNAMES[fromXaiService(account_id)] ?? "xAI";
}
if (isAnthropicService(account_id)) {
return LLM_USERNAMES[fromAnthropicService(account_id)] ?? "Anthropic";
}
Expand Down
4 changes: 3 additions & 1 deletion src/packages/frontend/account/user-defined-llm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ import {
toUserLLMModelName,
} from "@cocalc/util/db-schema/llm-utils";
import { trunc, unreachable } from "@cocalc/util/misc";
import { Panel } from "../antd-bootstrap";
import { Panel } from "@cocalc/frontend/antd-bootstrap";

// @cspell:ignore mixtral userdefined

Expand Down Expand Up @@ -265,6 +265,8 @@ export function UserDefinedLLMComponent({ style, on_change }: Props) {
return "'open-mixtral-8x22b'";
case "google":
return "'gemini-2.0-flash'";
case "xai":
return "'grok-4-1-fast-non-reasoning-16k'";
default:
unreachable(service);
return "'llama3:latest'";
Expand Down
4 changes: 4 additions & 0 deletions src/packages/frontend/components/language-model-icon.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import GooglePalmLogo from "./google-palm-avatar";
import MistralAvatar from "./mistral-avatar";
import OllamaAvatar from "./ollama-avatar";
import OpenAIAvatar from "./openai-avatar";
import XAIAvatar from "./xai-avatar";

export function LanguageModelVendorAvatar(
props: Readonly<{
Expand Down Expand Up @@ -101,6 +102,9 @@ export function LanguageModelVendorAvatar(
case "anthropic":
return <AnthropicAvatar size={size} style={style} />;

case "xai":
return <XAIAvatar size={size} style={style} />;

case "user":
// should never happen, because it is unpacked below
return fallback();
Expand Down
63 changes: 63 additions & 0 deletions src/packages/frontend/components/xai-avatar.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import { CSSProperties } from "react";

import { COLORS } from "@cocalc/util/theme";

interface Props {
size: number;
backgroundColor?: string;
iconColor?: string;
style?: CSSProperties;
innerStyle?: CSSProperties;
}

export default function XAIAvatar({
size,
innerStyle,
backgroundColor = "transparent",
iconColor = COLORS.GRAY_DD,
style,
}: Props) {
const topOffset = size / 4;
return (
<div
style={{
width: size,
height: size,
display: "inline-flex",
alignItems: "center",
justifyContent: "center",
overflow: "hidden",
...style,
}}
>
<div
style={{
backgroundColor,
color: iconColor,
height: size,
width: size,
position: "relative",
top: topOffset,
display: "flex",
alignItems: "center",
justifyContent: "center",
...innerStyle,
}}
>
<svg
width={size}
height={size}
viewBox="150 100 550 500"
xmlns="http://www.w3.org/2000/svg"
>
<g fill="currentColor">
<polygon points="557.09,211.99 565.4,538.36 631.96,538.36 640.28,93.18" />
<polygon points="640.28,56.91 538.72,56.91 379.35,284.53 430.13,357.05" />
<polygon points="201.61,538.36 303.17,538.36 353.96,465.84 303.17,393.31" />
<polygon points="201.61,211.99 430.13,538.36 531.69,538.36 303.17,211.99" />
</g>
</svg>
</div>
</div>
);
}
6 changes: 5 additions & 1 deletion src/packages/frontend/customize.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
// Site Customize -- dynamically customize the look and configuration
// of CoCalc for the client.

// cSpell:ignore TOSurl PAYGO nonfree tagmanager

import { fromJS, List, Map } from "immutable";
import { join } from "path";
import { useIntl } from "react-intl";
Expand Down Expand Up @@ -107,6 +109,7 @@ export interface CustomizeState {
google_vertexai_enabled: boolean;
mistral_enabled: boolean;
anthropic_enabled: boolean;
xai_enabled: boolean;
ollama_enabled: boolean;
custom_openai_enabled: boolean;
neural_search_enabled: boolean;
Expand Down Expand Up @@ -215,6 +218,7 @@ export class CustomizeStore extends Store<CustomizeState> {
custom_openai: this.get("custom_openai_enabled"),
mistralai: this.get("mistral_enabled"),
anthropic: this.get("anthropic_enabled"),
xai: this.get("xai_enabled"),
user: this.get("user_defined_llm"),
};
}
Expand Down Expand Up @@ -261,7 +265,7 @@ export class CustomizeActions extends Actions<CustomizeState> {
unlicensed_project_timetravel_limit: undefined,
});
};

reload = async () => {
await loadCustomizeState();
};
Expand Down
28 changes: 28 additions & 0 deletions src/packages/frontend/editors/markdown-input/mentionable-users.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import GoogleGeminiLogo from "@cocalc/frontend/components/google-gemini-avatar";
import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-model-icon";
import MistralAvatar from "@cocalc/frontend/components/mistral-avatar";
import OpenAIAvatar from "@cocalc/frontend/components/openai-avatar";
import XAIAvatar from "@cocalc/frontend/components/xai-avatar";
import { LLMModelPrice } from "@cocalc/frontend/frame-editors/llm/llm-selector";
import { useUserDefinedLLM } from "@cocalc/frontend/frame-editors/llm/use-userdefined-llm";
import { useProjectContext } from "@cocalc/frontend/project/context";
Expand All @@ -37,6 +38,7 @@ import {
toCustomOpenAIModel,
toOllamaModel,
toUserLLMModelName,
XAI_MODELS,
} from "@cocalc/util/db-schema/llm-utils";
import { cmp, timestamp_cmp, trunc_middle } from "@cocalc/util/misc";
import { CustomLLMPublic } from "@cocalc/util/types/llm";
Expand Down Expand Up @@ -204,6 +206,32 @@ function mentionableUsers({
}
}

if (enabledLLMs.xai) {
for (const m of XAI_MODELS) {
if (!selectableLLMs.includes(m)) continue;
const show_llm_main_menu = m === model;
const size = show_llm_main_menu ? avatarUserSize : avatarLLMSize;
const name = LLM_USERNAMES[m] ?? m;
const vendor = model2vendor(m);
const search_term =
`${vendor.name}${m.replace(/-/g, "")}${name.replace(/ /g, "")}`.toLowerCase();
if (!search || search_term.includes(search)) {
mentions.push({
value: model2service(m),
label: (
<LLMTooltip model={m}>
<XAIAvatar size={size} /> {name}{" "}
<LLMModelPrice model={m} floatRight />
</LLMTooltip>
),
search: search_term,
is_llm: true,
show_llm_main_menu,
});
}
}
}

if (enabledLLMs.mistralai) {
for (const m of MISTRAL_MODELS) {
if (!selectableLLMs.includes(m)) continue;
Expand Down
1 change: 1 addition & 0 deletions src/packages/frontend/project/context.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ export const emptyProjectContext = {
mistralai: false,
anthropic: false,
custom_openai: false,
xai: false,
user: false,
},
flipTabs: [0, () => {}],
Expand Down
7 changes: 7 additions & 0 deletions src/packages/next/components/openai/vendor-status-check.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,13 @@ export function LLMServiceStatusCheck({
</>
);

case "xai":
return (
<>
xAI <A href="https://status.x.ai/">status</A>.
</>
);

case "user":
return <>{getLLMServiceStatusCheckMD("user")}</>;

Expand Down
4 changes: 2 additions & 2 deletions src/packages/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
"undici@<5.28.3": "^5.28.4",
"postcss@<8.4.31": "^8.4.31",
"retry-request@<7.0.1": "^7.0.2",
"@langchain/core": "^0.3.77",
"langchain": "^0.3.34",
"@langchain/core": "^1.1.6",
"langchain": "^1.2.1",
"katex@<0.16.9": "^0.16.10",
"nanoid@<3.3.8": "^3.3.8",
"node-forge@<=1.3.1": "^1.3.2",
Expand Down
Loading