Skip to content

Add option to use local LLM model #19

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
-- AlterTable
ALTER TABLE "User" ADD COLUMN "subscriptionStatus" TEXT;
ALTER TABLE "User" ADD COLUMN "localModel" TEXT;
1 change: 1 addition & 0 deletions schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ model User {
hasPaid Boolean @default(false)
isUsingLn Boolean @default(false)
gptModel String @default("gpt-4o-mini")
localModel String? // New field to store the local model path
datePaid DateTime?
stripeId String?
checkoutSessionId String?
Expand Down
10 changes: 9 additions & 1 deletion src/client/MainPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import {
Radio,
Tooltip,
useDisclosure,
Select,
} from '@chakra-ui/react';
import BorderBox from './components/BorderBox';
import { LeaveATip, LoginToBegin } from './components/AlertDialog';
Expand Down Expand Up @@ -525,6 +526,13 @@ function MainPage() {
</VStack>
</FormControl>
)}
<FormControl>
<FormLabel htmlFor='modelType'>Select Model Type</FormLabel>
<Select id='modelType' {...register('modelType')}>
<option value='openai'>OpenAI</option>
<option value='local'>Local</option>
</Select>
</FormControl>
<VStack
border={'sm'}
bg='bg-contrast-xs'
Expand Down Expand Up @@ -645,4 +653,4 @@ function MainPage() {
);
}

export default MainPage;
export default MainPage;
18 changes: 17 additions & 1 deletion src/client/ProfilePage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@ import { logout } from 'wasp/client/auth';
import { stripePayment, stripeGpt4Payment, useQuery, getUserInfo } from 'wasp/client/operations';

import BorderBox from './components/BorderBox';
import { Box, Heading, Text, Button, Code, Spinner, VStack, HStack, Link } from '@chakra-ui/react';
import { Box, Heading, Text, Button, Code, Spinner, VStack, HStack, Link, FormControl, FormLabel, Input } from '@chakra-ui/react';
import { useState } from 'react';
import { IoWarningOutline } from 'react-icons/io5';

export default function ProfilePage({ user }: { user: User }) {
const [isLoading, setIsLoading] = useState(false);
const [isGpt4loading, setIsGpt4Loading] = useState(false);
const [localModel, setLocalModel] = useState(user.localModel || '');

const { data: userInfo } = useQuery(getUserInfo, { id: user.id });

Expand Down Expand Up @@ -41,6 +42,11 @@ export default function ProfilePage({ user }: { user: User }) {
setIsGpt4Loading(false);
}

async function handleLocalModelChange(event: React.ChangeEvent<HTMLInputElement>) {
setLocalModel(event.target.value);
// Add logic to update the localModel field in the backend
}

return (
<BorderBox>
{!!userInfo ? (
Expand Down Expand Up @@ -160,6 +166,16 @@ export default function ProfilePage({ user }: { user: User }) {
</VStack>
</VStack>
)}
<FormControl>
<FormLabel htmlFor='localModel'>Local Model Path</FormLabel>
<Input
id='localModel'
type='text'
value={localModel}
onChange={handleLocalModelChange}
placeholder='Enter local model path'
/>
</FormControl>
<Button alignSelf='flex-end' size='sm' onClick={() => logout()}>
Logout
</Button>
Expand Down
32 changes: 23 additions & 9 deletions src/server/actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -146,16 +146,30 @@ export const generateCoverLetter: GenerateCoverLetter<CoverLetterPayload, CoverL
});
}

const response = await fetch('https://api.openai.com/v1/chat/completions', {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${process.env.OPENAI_API_KEY!}`,
},
method: 'POST',
body: JSON.stringify(payload),
});
if (context.user.localModel) {
// Use local model
const response = await fetch(context.user.localModel, {
headers: {
'Content-Type': 'application/json',
},
method: 'POST',
body: JSON.stringify(payload),
});

json = (await response.json()) as OpenAIResponse;
json = (await response.json()) as OpenAIResponse;
} else {
// Use OpenAI model
const response = await fetch('https://api.openai.com/v1/chat/completions', {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${process.env.OPENAI_API_KEY!}`,
},
method: 'POST',
body: JSON.stringify(payload),
});

json = (await response.json()) as OpenAIResponse;
}

if (json?.error) throw new HttpError(500, json?.error?.message || 'Something went wrong');

Expand Down