Skip to content

Commit 755142c

Browse files
committed
Merge branch 'feat/add-bedrockmantle-provider' of https://github.com/sankeyraut/k8sgpt into feat/add-bedrockmantle-provider
2 parents 505e5a4 + 257c73a commit 755142c

26 files changed

Lines changed: 1305 additions & 33 deletions

.release-please-manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
{".":"0.4.31"}
1+
{".":"0.4.32"}

CHANGELOG.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,21 @@
11
# Changelog
22

3+
## [0.4.32](https://github.com/k8sgpt-ai/k8sgpt/compare/v0.4.31...v0.4.32) (2026-04-21)
4+
5+
6+
### Features
7+
8+
* add Azure API Type Support and add Custom HTTP Header ([#1638](https://github.com/k8sgpt-ai/k8sgpt/issues/1638)) ([28fe196](https://github.com/k8sgpt-ai/k8sgpt/commit/28fe196d47ca43cf984f6b07a78cc3f877dc3cc2))
9+
* add daemonset analyzer and special cases for pod and job ([#1636](https://github.com/k8sgpt-ai/k8sgpt/issues/1636)) ([ac329d1](https://github.com/k8sgpt-ai/k8sgpt/commit/ac329d18909d61e67d16fe07e6fda22b84a7e689))
10+
11+
12+
### Bug Fixes
13+
14+
* amazonbedrockconverse claude models temp and topp ([#1629](https://github.com/k8sgpt-ai/k8sgpt/issues/1629)) ([c87a31a](https://github.com/k8sgpt-ai/k8sgpt/commit/c87a31aee13a60b343dae4abef6e1ee6eed148c9))
15+
* **deps:** update module google.golang.org/grpc to v1.79.3 [security] ([#1626](https://github.com/k8sgpt-ai/k8sgpt/issues/1626)) ([97fbf04](https://github.com/k8sgpt-ai/k8sgpt/commit/97fbf04e331b4a6f37c494b8becd8c6f0687af8b))
16+
* improve ConfigMap usage detection for sidecar patterns ([#1602](https://github.com/k8sgpt-ai/k8sgpt/issues/1602)) ([ca0d3eb](https://github.com/k8sgpt-ai/k8sgpt/commit/ca0d3eba3faaf1b786e62b1a5cabad02ae799d6d))
17+
* recognize GKE built-in ingress classes 'gce' and 'gce-internal' ([#1599](https://github.com/k8sgpt-ai/k8sgpt/issues/1599)) ([6ba8fb2](https://github.com/k8sgpt-ai/k8sgpt/commit/6ba8fb217d874e41d5737161a1f8fb1fd1acf4d4))
18+
319
## [0.4.31](https://github.com/k8sgpt-ai/k8sgpt/compare/v0.4.30...v0.4.31) (2026-03-24)
420

521

README.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -67,15 +67,15 @@ brew install k8sgpt
6767
<!---x-release-please-start-version-->
6868

6969
```
70-
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.rpm
70+
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.32/k8sgpt_386.rpm
7171
```
7272
<!---x-release-please-end-->
7373

7474
**64 bit:**
7575

7676
<!---x-release-please-start-version-->
7777
```
78-
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.rpm
78+
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.32/k8sgpt_amd64.rpm
7979
```
8080
<!---x-release-please-end-->
8181
</details>
@@ -88,7 +88,7 @@ brew install k8sgpt
8888
<!---x-release-please-start-version-->
8989

9090
```
91-
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.deb
91+
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.32/k8sgpt_386.deb
9292
sudo dpkg -i k8sgpt_386.deb
9393
```
9494

@@ -99,7 +99,7 @@ sudo dpkg -i k8sgpt_386.deb
9999
<!---x-release-please-start-version-->
100100

101101
```
102-
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.deb
102+
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.32/k8sgpt_amd64.deb
103103
sudo dpkg -i k8sgpt_amd64.deb
104104
```
105105

@@ -114,7 +114,7 @@ sudo dpkg -i k8sgpt_amd64.deb
114114

115115
<!---x-release-please-start-version-->
116116
```
117-
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.apk
117+
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.32/k8sgpt_386.apk
118118
apk add --allow-untrusted k8sgpt_386.apk
119119
```
120120
<!---x-release-please-end-->
@@ -123,7 +123,7 @@ sudo dpkg -i k8sgpt_amd64.deb
123123

124124
<!---x-release-please-start-version-->
125125
```
126-
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.apk
126+
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.32/k8sgpt_amd64.apk
127127
apk add --allow-untrusted k8sgpt_amd64.apk
128128
```
129129
<!---x-release-please-end-->

cmd/auth/add.go

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import (
2121

2222
"github.com/fatih/color"
2323
"github.com/k8sgpt-ai/k8sgpt/pkg/ai"
24+
"github.com/sashabaranov/go-openai"
2425
"github.com/spf13/cobra"
2526
"github.com/spf13/viper"
2627
"golang.org/x/term"
@@ -69,11 +70,21 @@ var addCmd = &cobra.Command{
6970
return false
7071
}
7172

72-
// check if backend is not empty and a valid value
73-
if backend == "" {
73+
switch backend {
74+
case "": // check if backend is not empty and a valid value
7475
color.Yellow(fmt.Sprintf("Warning: backend input is empty, will use the default value: %s", defaultBackend))
7576
backend = defaultBackend
76-
} else {
77+
case "azureopenai":
78+
azureAPIType, _ := cmd.Flags().GetString("azureAPIType")
79+
80+
switch openai.APIType(azureAPIType) {
81+
case "", openai.APITypeAzure, openai.APITypeAzureAD, openai.APITypeCloudflareAzure:
82+
// valid types
83+
default:
84+
color.Red("Error: Valid values of azureAPIType for azureopenai backends are AZURE, AZURE_AD or CLOUDFLARE_AZURE")
85+
os.Exit(1)
86+
}
87+
default:
7788
if !validBackend(ai.Backends, backend) {
7889
color.Red("Error: Backend AI accepted values are '%v'", strings.Join(ai.Backends, ", "))
7990
os.Exit(1)
@@ -148,6 +159,7 @@ var addCmd = &cobra.Command{
148159
MaxTokens: maxTokens,
149160
StopSequences: stopSequences,
150161
OrganizationId: organizationId,
162+
AzureAPIType: azureAPIType,
151163
}
152164

153165
if providerIndex == -1 {
@@ -194,4 +206,6 @@ func init() {
194206
addCmd.Flags().StringVarP(&compartmentId, "compartmentId", "k", "", "Compartment ID for generative AI model (only for oci backend)")
195207
// add flag for openai organization
196208
addCmd.Flags().StringVarP(&organizationId, "organizationId", "o", "", "OpenAI or AzureOpenAI Organization ID (only for openai and azureopenai backend)")
209+
// add flag for azure open ai APIType name
210+
addCmd.Flags().StringVarP(&azureAPIType, "azureAPIType", "a", "", fmt.Sprintf("AzureOpenAI API Type name. Valid values: %s, %s or %s (only for azureopenai backend)", openai.APITypeAzure, openai.APITypeAzureAD, openai.APITypeCloudflareAzure))
197211
}

cmd/auth/auth.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ var (
3434
maxTokens int
3535
stopSequences []string
3636
organizationId string
37+
azureAPIType string
3738
)
3839

3940
var configAI ai.AIConfiguration

cmd/auth/update.go

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,12 @@ limitations under the License.
1414
package auth
1515

1616
import (
17+
"fmt"
1718
"os"
1819
"strings"
1920

2021
"github.com/fatih/color"
22+
"github.com/sashabaranov/go-openai"
2123
"github.com/spf13/cobra"
2224
"github.com/spf13/viper"
2325
)
@@ -33,6 +35,16 @@ var updateCmd = &cobra.Command{
3335
if strings.ToLower(backend) == "azureopenai" {
3436
_ = cmd.MarkFlagRequired("engine")
3537
_ = cmd.MarkFlagRequired("baseurl")
38+
39+
azureAPIType, _ := cmd.Flags().GetString("azureAPIType")
40+
41+
switch openai.APIType(azureAPIType) {
42+
case "", openai.APITypeAzure, openai.APITypeAzureAD, openai.APITypeCloudflareAzure:
43+
// valid types
44+
default:
45+
color.Red("Error: Valid values of azureAPIType for azureopenai backends are AZURE, AZURE_AD or CLOUDFLARE_AZURE")
46+
os.Exit(1)
47+
}
3648
}
3749
organizationId, _ := cmd.Flags().GetString("organizationId")
3850
if strings.ToLower(backend) != "azureopenai" && strings.ToLower(backend) != "openai" {
@@ -85,6 +97,10 @@ var updateCmd = &cobra.Command{
8597
configAI.Providers[i].OrganizationId = organizationId
8698
color.Blue("Organization Id updated successfully")
8799
}
100+
if azureAPIType != "" {
101+
configAI.Providers[i].AzureAPIType = azureAPIType
102+
color.Blue("AzureAPIType updated successfully")
103+
}
88104
configAI.Providers[i].Temperature = temperature
89105
color.Green("%s updated in the AI backend provider list", backend)
90106
}
@@ -117,4 +133,6 @@ func init() {
117133
updateCmd.Flags().StringVarP(&engine, "engine", "e", "", "Update Azure AI deployment name")
118134
// update flag for organizationId
119135
updateCmd.Flags().StringVarP(&organizationId, "organizationId", "o", "", "Update OpenAI or Azure organization Id")
136+
// add flag for azure open ai APIType name
137+
updateCmd.Flags().StringVarP(&azureAPIType, "azureAPIType", "a", "", fmt.Sprintf("AzureOpenAI API Type name. Valid values: %s, %s or %s (only for azureopenai backend)", openai.APITypeAzure, openai.APITypeAzureAD, openai.APITypeCloudflareAzure))
120138
}

cmd/serve/serve.go

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,10 @@ limitations under the License.
1414
package serve
1515

1616
import (
17+
"net/http"
1718
"os"
1819
"strconv"
20+
"strings"
1921

2022
k8sgptserver "github.com/k8sgpt-ai/k8sgpt/pkg/server"
2123

@@ -120,12 +122,31 @@ var ServeCmd = &cobra.Command{
120122
}
121123
return int(maxTokens)
122124
}
125+
126+
// Parse custom headers from environment variable
127+
parseCustomHeaders := func() []http.Header {
128+
headersEnv := os.Getenv("K8SGPT_CUSTOM_HEADERS")
129+
if headersEnv == "" {
130+
return nil
131+
}
132+
133+
header := make(http.Header)
134+
headerPairs := strings.Split(headersEnv, ",")
135+
for _, pair := range headerPairs {
136+
kv := strings.SplitN(pair, ":", 2)
137+
if len(kv) == 2 {
138+
header.Add(strings.TrimSpace(kv[0]), strings.TrimSpace(kv[1]))
139+
}
140+
}
141+
return []http.Header{header}
142+
}
123143
// Check for env injection
124144
backend = os.Getenv("K8SGPT_BACKEND")
125145
password := os.Getenv("K8SGPT_PASSWORD")
126146
model := os.Getenv("K8SGPT_MODEL")
127147
baseURL := os.Getenv("K8SGPT_BASEURL")
128148
engine := os.Getenv("K8SGPT_ENGINE")
149+
azureAPIType := os.Getenv("K8SGPT_AZURE_API_TYPE")
129150
proxyEndpoint := os.Getenv("K8SGPT_PROXY_ENDPOINT")
130151
providerId := os.Getenv("K8SGPT_PROVIDER_ID")
131152
// If the envs are set, allocate in place to the aiProvider
@@ -138,6 +159,8 @@ var ServeCmd = &cobra.Command{
138159
Model: model,
139160
BaseURL: baseURL,
140161
Engine: engine,
162+
AzureAPIType: azureAPIType,
163+
CustomHeaders: parseCustomHeaders(),
141164
ProxyEndpoint: proxyEndpoint,
142165
ProviderId: providerId,
143166
Temperature: temperature(),

pkg/ai/amazonbedrockconverse.go

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,11 @@ func processError(err error, modelId string) error {
6161
}
6262
}
6363

64+
func isClaudeModel(modelId string) bool {
65+
m := strings.ToLower(modelId)
66+
return strings.Contains(m, "claude")
67+
}
68+
6469
func (a *AmazonBedrockConverseClient) Configure(config IAIConfig) error {
6570
modelInput := config.GetModel()
6671

@@ -133,15 +138,22 @@ func (a *AmazonBedrockConverseClient) GetCompletion(ctx context.Context, prompt
133138
Content: []types.ContentBlock{&content},
134139
Role: "user",
135140
}
141+
142+
var infConfig = &types.InferenceConfiguration{
143+
MaxTokens: aws.Int32(int32(a.maxTokens)),
144+
StopSequences: a.stopSequences,
145+
}
146+
147+
// Claude models only support temperature OR topP, while others support both temperature and topP. Prefer temperature for now
148+
if !isClaudeModel(a.model) {
149+
infConfig.TopP = aws.Float32(a.topP)
150+
}
151+
infConfig.Temperature = aws.Float32(a.temperature)
152+
136153
var converseInput = bedrockruntime.ConverseInput{
137154
ModelId: aws.String(a.model),
138155
Messages: []types.Message{message},
139-
InferenceConfig: &types.InferenceConfiguration{
140-
Temperature: aws.Float32(a.temperature),
141-
TopP: aws.Float32(a.topP),
142-
MaxTokens: aws.Int32(int32(a.maxTokens)),
143-
StopSequences: a.stopSequences,
144-
},
156+
InferenceConfig: infConfig,
145157
}
146158
response, err := a.client.Converse(ctx, &converseInput)
147159
if err != nil {

pkg/ai/amazonbedrockconverse_mock_test.go

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,11 @@ import (
1212
// ---- Mock Wrapper ----
1313
type mockConverseClient struct {
1414
converseFunc func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error)
15+
lastInput *bedrockruntime.ConverseInput
1516
}
1617

1718
func (m *mockConverseClient) Converse(ctx context.Context, input *bedrockruntime.ConverseInput, _ ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error) {
19+
m.lastInput = input
1820
return m.converseFunc(ctx, input)
1921
}
2022

@@ -244,6 +246,91 @@ func TestExtractTextFromConverseOutput(t *testing.T) {
244246
}
245247
}
246248

249+
func TestGetCompletion_ClaudeModel_UsesTemperatureOnly(t *testing.T) {
250+
mock := &mockConverseClient{
251+
converseFunc: func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error) {
252+
return &bedrockruntime.ConverseOutput{
253+
Output: &types.ConverseOutputMemberMessage{
254+
Value: types.Message{
255+
Content: []types.ContentBlock{
256+
&types.ContentBlockMemberText{Value: "ok"},
257+
},
258+
},
259+
},
260+
}, nil
261+
},
262+
}
263+
264+
client := &AmazonBedrockConverseClient{
265+
client: mock,
266+
model: "anthropic.claude-v2",
267+
temperature: 0.5,
268+
topP: 0.9,
269+
maxTokens: 100,
270+
}
271+
272+
_, err := client.GetCompletion(context.Background(), "hello")
273+
274+
assert.NoError(t, err)
275+
276+
inf := mock.lastInput.InferenceConfig
277+
278+
assert.NotNil(t, inf.Temperature)
279+
assert.Nil(t, inf.TopP)
280+
}
281+
282+
func TestGetCompletion_NonClaudeModel_UsesTemperatureAndTopP(t *testing.T) {
283+
mock := &mockConverseClient{
284+
converseFunc: func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error) {
285+
return &bedrockruntime.ConverseOutput{
286+
Output: &types.ConverseOutputMemberMessage{
287+
Value: types.Message{
288+
Content: []types.ContentBlock{
289+
&types.ContentBlockMemberText{Value: "ok"},
290+
},
291+
},
292+
},
293+
}, nil
294+
},
295+
}
296+
297+
client := &AmazonBedrockConverseClient{
298+
client: mock,
299+
model: "amazon.titan-text",
300+
temperature: 0.5,
301+
topP: 0.9,
302+
maxTokens: 100,
303+
}
304+
305+
_, err := client.GetCompletion(context.Background(), "hello")
306+
307+
assert.NoError(t, err)
308+
309+
inf := mock.lastInput.InferenceConfig
310+
311+
assert.NotNil(t, inf.Temperature)
312+
assert.NotNil(t, inf.TopP)
313+
assert.Equal(t, float32(0.9), *inf.TopP)
314+
}
315+
316+
func TestIsClaudeModel(t *testing.T) {
317+
tests := []struct {
318+
model string
319+
expected bool
320+
}{
321+
{"anthropic.claude-opus-4-6-v1", true},
322+
{"CLAUDE-3", true},
323+
{"amazon.titan", false},
324+
{"gpt-4", false},
325+
}
326+
327+
for _, tt := range tests {
328+
t.Run(tt.model, func(t *testing.T) {
329+
assert.Equal(t, tt.expected, isClaudeModel(tt.model))
330+
})
331+
}
332+
}
333+
247334
func TestGetName(t *testing.T) {
248335
client := &AmazonBedrockConverseClient{}
249336
assert.Equal(t, "amazonbedrockconverse", client.GetName())

0 commit comments

Comments
 (0)