Skip to content

Commit 2cf2ef1

Browse files
authored
Merge pull request #269 from launchdarkly-labs/cko_2025
Cko 2025
2 parents 221166b + ab88630 commit 2cf2ef1

File tree

4 files changed

+11
-40
lines changed

4 files changed

+11
-40
lines changed

Diff for: .github/workflows/demo_provisioning_scripts/DemoBuilder.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -539,6 +539,8 @@ def add_userid_to_flags(self):
539539
res = self.ldproject.add_maintainer_to_flag("limitedTimeOfferBanner")
540540
res = self.ldproject.add_maintainer_to_flag("earlyAccessFeatureToggle")
541541
res = self.ldproject.add_maintainer_to_flag("debuggingModeForDevelopers")
542+
res = self.ldproject.add_maintainer_to_flag("release-new-search-engine")
543+
res = self.ldproject.add_maintainer_to_flag("release-new-shorten-collections-page")
542544

543545
# ############################################################################################################
544546

@@ -1133,7 +1135,8 @@ def flag_database_migration(self):
11331135
"This feature flag will trigger the database migration tool in LaunchAirways",
11341136
purpose="migration",
11351137
migration_stages=6,
1136-
tags=["release", "migration-assistant", "optional"]
1138+
tags=["release", "migration-assistant", "optional"],
1139+
temporary=True
11371140
)
11381141

11391142
def flag_database_guarded_release(self):
@@ -2263,7 +2266,6 @@ def setup_template_environment(self):
22632266
self.ldproject.copy_flag_settings("financialDBMigration", "production", "template-env")
22642267
self.ldproject.copy_flag_settings("investment-recent-trade-db", "production", "template-env")
22652268
self.ldproject.copy_flag_settings("release-new-investment-stock-api", "production", "template-env")
2266-
self.ldproject.copy_flag_settings("ai-chatbot", "production", "template-env")
22672269
self.ldproject.copy_flag_settings("storeAttentionCallout", "production", "template-env")
22682270
self.ldproject.copy_flag_settings("cartSuggestedItems", "production", "template-env")
22692271
print("Done")

Diff for: .github/workflows/demo_provisioning_scripts/LDPlatform.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -726,7 +726,7 @@ def create_release_pipeline(self, pipeline_key, pipeline_name):
726726
"releaseStrategy": "monitored-release",
727727
"requireApproval": False,
728728
"releaseGuardianConfiguration": {
729-
"monitoringWindowMilliseconds": 3000,
729+
"monitoringWindowMilliseconds": 1000,
730730
"rolloutWeight": 50000,
731731
"rollbackOnRegression": True,
732732
},
@@ -1578,6 +1578,6 @@ def advance_flag_phase(self, flag_key, status, pipeline_phase_id):
15781578
if status_code != 200:
15791579
data = json.loads(response.text)
15801580
print("Error advancing flag phase: " + data["message"])
1581-
time.sleep(2)
1581+
time.sleep(3)
15821582

15831583
return response

Diff for: components/chatbot/ChatBot.tsx

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ import { BatteryCharging } from "lucide-react";
1717
import {
1818
PERSONA_ROLE_DEVELOPER,
1919
COHERE,
20-
CLAUDE,
20+
ANTHROPIC,
2121
META,
2222
DEFAULT_AI_MODEL,
2323
} from "@/utils/constants";
@@ -237,7 +237,7 @@ export default function Chatbot({ vertical }: { vertical: string }) {
237237
: ""
238238
}
239239
${
240-
aiNewModelChatbotFlag?.model?.name?.includes(CLAUDE)
240+
aiNewModelChatbotFlag?.model?.name?.includes(ANTHROPIC)
241241
? "!text-anthropicColor"
242242
: ""
243243
}

Diff for: pages/api/chat.ts

+3-34
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ import { getCookie } from "cookies-next";
99
import { initAi } from "@launchdarkly/server-sdk-ai";
1010
import { LD_CONTEXT_COOKIE_KEY } from "@/utils/constants";
1111
import { v4 as uuidv4 } from "uuid";
12-
import { DEFAULT_AI_MODEL } from "@/utils/constants";
1312

1413
export default async function chatResponse(
1514
req: NextApiRequest,
@@ -53,44 +52,14 @@ export default async function chatResponse(
5352
const ldClient = await getServerClient(process.env.LD_SDK_KEY || "");
5453
const aiClient = initAi(ldClient);
5554
const context: any = clientSideAudienceContext || {
56-
kind: "audience",
57-
key: uuidv4().slice(0, 6),
55+
kind: "user",
56+
key: uuidv4(),
5857
};
5958

60-
const aiConfig = await aiClient.config(aiConfigKey!, context, {}, {});
59+
const aiConfig = await aiClient.config(aiConfigKey, context, {}, {});
6160
if (!aiConfig.enabled) {
6261
throw new Error("AI config is disabled");
6362
} else {
64-
const parameters = aiConfig.model?.parameters || {};
65-
const objWithoutModelId2 = Object.keys(parameters).reduce(
66-
(newObj: any, key) => {
67-
if (
68-
key === "maxTokens" &&
69-
aiConfig.model &&
70-
aiConfig.model?.parameters &&
71-
aiConfig?.model.name.includes("cohere")
72-
) {
73-
newObj["max_tokens"] = aiConfig.model.parameters[key];
74-
}
75-
if (
76-
key === "maxTokens" &&
77-
aiConfig.model &&
78-
aiConfig.model?.parameters &&
79-
aiConfig?.model.name.includes("anthropic")
80-
) {
81-
newObj["max_tokens_to_sample"] =
82-
aiConfig.model.parameters[key];
83-
}
84-
if (key !== "maxTokens" &&
85-
aiConfig.model
86-
) {
87-
newObj[key] = (aiConfig.model as any)[key];
88-
}
89-
return newObj;
90-
},
91-
{}
92-
);
93-
9463
if (!aiConfig.model) {
9564
throw new Error("AI model configuration is undefined");
9665
}

0 commit comments

Comments
 (0)