@@ -33,7 +33,7 @@ export default async function chatResponse(req: NextApiRequest, res: NextApiResp
33
33
const messages = req . body ;
34
34
35
35
const ldClient = await getServerClient ( process . env . LD_SDK_KEY || "" ) ;
36
- console . log ( "ldClient" , ldClient )
36
+ // console.log("ldClient",ldClient)
37
37
const context : any = getCookie ( "ld-context" ) || { "kind" : "user" , "name" : "anonymous" , "key" : "abc-123" } ;
38
38
39
39
const model = await ldClient . variation ( "ai-chatbot" , context , {
@@ -53,23 +53,25 @@ export default async function chatResponse(req: NextApiRequest, res: NextApiResp
53
53
// ];
54
54
55
55
const chatBotModelInput = new InvokeModelCommand ( {
56
- modelId : model . modelId ,
56
+ modelId : "cohere.command-text-v14" ,
57
57
contentType : "application/json" ,
58
58
accept : "application/json" ,
59
59
body : JSON . stringify ( {
60
60
prompt : `\n\nHuman:${ messages } \n\nAssistant:` ,
61
61
temperature : model . temperature ,
62
62
max_tokens_to_sample : model ?. max_tokens_to_sample ,
63
- max_gen_len : model ?. max_gen_len ,
64
- top_p : model . top_p ,
63
+ max_tokens : 500 ,
64
+ p : 1 ,
65
+ // max_gen_len: model?.max_gen_len,
66
+ // top_p: model?.top_p,
65
67
} ) ,
66
68
} ) ;
67
69
68
70
try {
69
71
const bedrockResponse = await bedrockClient . send ( chatBotModelInput ) ;
70
72
const decoder = new TextDecoder ( ) ;
71
73
const jsontext = JSON . parse ( decoder . decode ( bedrockResponse . body ) ) ;
72
-
74
+ console . log ( jsontext )
73
75
res . status ( 200 ) . json ( jsontext ) ;
74
76
} catch ( error : any ) {
75
77
throw new Error ( error . message ) ;
0 commit comments