Compare commits

..

1 Commits

Author SHA1 Message Date
Alex Jones
1312c00547 Fix formatting in Makefile
Signed-off-by: Alex Jones <1235925+AlexsJones@users.noreply.github.com>
2026-02-28 18:32:12 +00:00
11 changed files with 12 additions and 477 deletions

View File

@@ -1 +1 @@
{".":"0.4.31"}
{".":"0.4.30"}

View File

@@ -1,22 +1,5 @@
# Changelog
## [0.4.31](https://github.com/k8sgpt-ai/k8sgpt/compare/v0.4.30...v0.4.31) (2026-03-24)
### Features
* support amazonbedrock converse api ([#1627](https://github.com/k8sgpt-ai/k8sgpt/issues/1627)) ([fc6a83d](https://github.com/k8sgpt-ai/k8sgpt/commit/fc6a83d063e69293f4e3aa18bd887740401c8fe0))
### Other
* updated readme ([#1620](https://github.com/k8sgpt-ai/k8sgpt/issues/1620)) ([fd5bba6](https://github.com/k8sgpt-ai/k8sgpt/commit/fd5bba6ab3ad7a81ef982f1980ac9c9de23bc46c))
### Docs
* align Go version with go.mod toolchain ([#1609](https://github.com/k8sgpt-ai/k8sgpt/issues/1609)) ([19a172e](https://github.com/k8sgpt-ai/k8sgpt/commit/19a172e575ffba6cd89330479033731426358342))
## [0.4.30](https://github.com/k8sgpt-ai/k8sgpt/compare/v0.4.29...v0.4.30) (2026-02-20)

View File

@@ -6,7 +6,7 @@
# define the default goal
#
ROOT_PACKAGE=github.com/k8sgpt-ai/k8sgpt
SHELL := /bin/bash
DIRS=$(shell ls)
GO=go
@@ -160,4 +160,4 @@ helm:
chmod +x $(OUTPUT_DIR)/helm-$(GOOS)-$(GOARCH); \
rm -rf ./$(GOOS)-$(GOARCH)/; \
fi
HELM=$(OUTPUT_DIR)/helm-$(GOOS)-$(GOARCH)
HELM=$(OUTPUT_DIR)/helm-$(GOOS)-$(GOARCH)

View File

@@ -67,7 +67,7 @@ brew install k8sgpt
<!---x-release-please-start-version-->
```
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.rpm
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_386.rpm
```
<!---x-release-please-end-->
@@ -75,7 +75,7 @@ brew install k8sgpt
<!---x-release-please-start-version-->
```
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.rpm
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_amd64.rpm
```
<!---x-release-please-end-->
</details>
@@ -88,7 +88,7 @@ brew install k8sgpt
<!---x-release-please-start-version-->
```
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.deb
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_386.deb
sudo dpkg -i k8sgpt_386.deb
```
@@ -99,7 +99,7 @@ sudo dpkg -i k8sgpt_386.deb
<!---x-release-please-start-version-->
```
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.deb
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_amd64.deb
sudo dpkg -i k8sgpt_amd64.deb
```
@@ -114,7 +114,7 @@ sudo dpkg -i k8sgpt_amd64.deb
<!---x-release-please-start-version-->
```
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.apk
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_386.apk
apk add --allow-untrusted k8sgpt_386.apk
```
<!---x-release-please-end-->
@@ -123,7 +123,7 @@ sudo dpkg -i k8sgpt_amd64.deb
<!---x-release-please-start-version-->
```
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.apk
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_amd64.apk
apk add --allow-untrusted k8sgpt_amd64.apk
```
<!---x-release-please-end-->
@@ -500,21 +500,6 @@ k8sgpt auth default -p azureopenai
Default provider set to azureopenai
```
_Using Amazon Bedrock Converse with inference profiles_
_System Inference Profile_
```
k8sgpt auth add --backend amazonbedrockconverse --providerRegion us-east-1 --model arn:aws:bedrock:us-east-1:123456789012:inference-profile/my-inference-profile
```
_Application Inference Profile_
```
k8sgpt auth add --backend amazonbedrockconverse --providerRegion us-east-1 --model arn:aws:bedrock:us-east-1:123456789012:application-inference-profile/2uzp4s0w39t6
```
_Using Amazon Bedrock with inference profiles_
_System Inference Profile_

View File

@@ -24,9 +24,6 @@ K8sGPT supports a variety of AI/LLM providers (backends). Some providers have a
### Cohere
- **Model:** User-configurable (any model supported by Cohere)
### Amazon Bedrock Converse
- **Model:** User-configurable (any model supported by [Amazon Bedrock Converse](https://docs.aws.amazon.com/bedrock/latest/userguide/models-api-compatibility.html))
### Amazon Bedrock
- **Supported Models:**
- anthropic.claude-sonnet-4-20250514-v1:0
@@ -83,4 +80,4 @@ K8sGPT supports a variety of AI/LLM providers (backends). Some providers have a
---
For more details on configuring each provider and model, refer to the official K8sGPT documentation and the provider's own documentation.
For more details on configuring each provider and model, refer to the official K8sGPT documentation and the provider's own documentation.

View File

@@ -48,9 +48,6 @@ var addCmd = &cobra.Command{
if strings.ToLower(backend) == "amazonbedrock" {
_ = cmd.MarkFlagRequired("providerRegion")
}
if strings.ToLower(backend) == "amazonbedrockconverse" {
_ = cmd.MarkFlagRequired("providerRegion")
}
if strings.ToLower(backend) == "ibmwatsonxai" {
_ = cmd.MarkFlagRequired("providerId")
}
@@ -143,7 +140,6 @@ var addCmd = &cobra.Command{
TopP: topP,
TopK: topK,
MaxTokens: maxTokens,
StopSequences: stopSequences,
OrganizationId: organizationId,
}
@@ -177,14 +173,12 @@ func init() {
addCmd.Flags().Int32VarP(&topK, "topk", "c", 50, "Sampling Cutoff: Set a threshold (1-100) to restrict the sampling process to the top K most probable words at each step. Higher values lead to greater variability, lower values increases predictability.")
// max tokens
addCmd.Flags().IntVarP(&maxTokens, "maxtokens", "l", 2048, "Specify a maximum output length. Adjust (1-...) to control text length. Higher values produce longer output, lower values limit length")
// stop sequences
addCmd.Flags().StringSliceVarP(&stopSequences, "stopsequences", "s", []string{}, "Stop Sequences: Define specific tokens or phrases that signal the model to stop generating text.")
// add flag for temperature
addCmd.Flags().Float32VarP(&temperature, "temperature", "t", 0.7, "The sampling temperature, value ranges between 0 ( output be more deterministic) and 1 (more random)")
// add flag for azure open ai engine/deployment name
addCmd.Flags().StringVarP(&engine, "engine", "e", "", "Azure AI deployment name (only for azureopenai backend)")
//add flag for amazonbedrock region name
addCmd.Flags().StringVarP(&providerRegion, "providerRegion", "r", "", "Provider Region name (only for amazonbedrock, amazonbedrockconverse, googlevertexai backend)")
addCmd.Flags().StringVarP(&providerRegion, "providerRegion", "r", "", "Provider Region name (only for amazonbedrock, googlevertexai backend)")
//add flag for vertexAI/WatsonxAI Project ID
addCmd.Flags().StringVarP(&providerId, "providerId", "i", "", "Provider specific ID for e.g. project (only for googlevertexai/ibmwatsonxai backend)")
//add flag for OCI Compartment ID

View File

@@ -32,7 +32,6 @@ var (
topP float32
topK int32
maxTokens int
stopSequences []string
organizationId string
)

View File

@@ -1,161 +0,0 @@
package ai
import (
"context"
"errors"
"fmt"
"github.com/aws/aws-sdk-go-v2/aws"
awsconfig "github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
"os"
"strings"
)
const amazonBedrockConverseClientName = "amazonbedrockconverse"
type bedrockConverseAPI interface {
Converse(ctx context.Context, input *bedrockruntime.ConverseInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error)
}
type AmazonBedrockConverseClient struct {
nopCloser
client bedrockConverseAPI
model string
temperature float32
topP float32
maxTokens int
stopSequences []string
}
func getRegion(region string) string {
if os.Getenv("AWS_DEFAULT_REGION") != "" {
region = os.Getenv("AWS_DEFAULT_REGION")
}
// Return the supplied provider region if not overridden by environment variable
return region
}
func (a *AmazonBedrockConverseClient) getModelFromString(model string) (string, error) {
if model == "" {
return "", errors.New("model name cannot be empty")
}
model = strings.TrimSpace(model)
return model, nil
}
func processError(err error, modelId string) error {
errMsg := err.Error()
if strings.Contains(errMsg, "no such host") {
return fmt.Errorf(`the bedrock service is not available in the selected region.
please double-check the service availability for your region at
https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services/`)
} else if strings.Contains(errMsg, "Could not resolve the foundation model") {
return fmt.Errorf(`could not resolve the foundation model from model identifier: \"%s\".
please verify that the requested model exists and is accessible
within the specified region`, modelId)
} else {
return fmt.Errorf("could not invoke model: \"%s\". here is why: %s", modelId, err)
}
}
func (a *AmazonBedrockConverseClient) Configure(config IAIConfig) error {
modelInput := config.GetModel()
var region = getRegion(config.GetProviderRegion())
// Only create AWS clients if they haven't been injected (for testing)
if a.client == nil {
cfg, err := awsconfig.LoadDefaultConfig(context.Background(),
awsconfig.WithRegion(region),
)
if err != nil {
if strings.Contains(err.Error(), "InvalidAccessKeyId") || strings.Contains(err.Error(), "SignatureDoesNotMatch") || strings.Contains(err.Error(), "NoCredentialProviders") {
return fmt.Errorf("aws credentials are invalid or missing. Please check your environment variables or aws config. details: %v", err)
}
return fmt.Errorf("failed to load aws config for region %s: %w", region, err)
}
a.client = bedrockruntime.NewFromConfig(cfg)
}
foundModel, err := a.getModelFromString(modelInput)
if err != nil {
return fmt.Errorf("failed to find model configuration for %s: %w", modelInput, err)
}
a.model = foundModel
// Set common configuration parameters
a.temperature = config.GetTemperature()
a.topP = config.GetTopP()
a.maxTokens = config.GetMaxTokens()
a.stopSequences = config.GetStopSequences()
return nil
}
func extractTextFromConverseOutput(output types.ConverseOutput, modelId string) (string, error) {
if output == nil {
return "", fmt.Errorf("empty response from model: %s", modelId)
}
msg, ok := output.(*types.ConverseOutputMemberMessage)
if !ok {
return "", fmt.Errorf("unexpected response type from model: %s", modelId)
}
if len(msg.Value.Content) == 0 {
return "", fmt.Errorf("no content returned from model: %s", modelId)
}
var builder strings.Builder
for _, block := range msg.Value.Content {
if textBlock, ok := block.(*types.ContentBlockMemberText); ok && textBlock != nil {
builder.WriteString(textBlock.Value)
}
}
if builder.Len() == 0 {
return "", fmt.Errorf("no text content returned from model: %s", modelId)
}
return builder.String(), nil
}
func (a *AmazonBedrockConverseClient) GetCompletion(ctx context.Context, prompt string) (string, error) {
var content = types.ContentBlockMemberText{
Value: prompt,
}
var message = types.Message{
Content: []types.ContentBlock{&content},
Role: "user",
}
var converseInput = bedrockruntime.ConverseInput{
ModelId: aws.String(a.model),
Messages: []types.Message{message},
InferenceConfig: &types.InferenceConfiguration{
Temperature: aws.Float32(a.temperature),
TopP: aws.Float32(a.topP),
MaxTokens: aws.Int32(int32(a.maxTokens)),
StopSequences: a.stopSequences,
},
}
response, err := a.client.Converse(ctx, &converseInput)
if err != nil {
return "", processError(err, a.model)
}
text, err := extractTextFromConverseOutput(response.Output, a.model)
if err != nil {
return "", err
}
return text, nil
}
func (a *AmazonBedrockConverseClient) GetName() string {
return amazonBedrockConverseClientName
}

View File

@@ -1,250 +0,0 @@
package ai
import (
"context"
"errors"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
"github.com/stretchr/testify/assert"
"testing"
)
// ---- Mock Wrapper ----
type mockConverseClient struct {
converseFunc func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error)
}
func (m *mockConverseClient) Converse(ctx context.Context, input *bedrockruntime.ConverseInput, _ ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error) {
return m.converseFunc(ctx, input)
}
// ---- Tests ----
func TestGetCompletion_Success(t *testing.T) {
mock := &mockConverseClient{
converseFunc: func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error) {
return &bedrockruntime.ConverseOutput{
Output: &types.ConverseOutputMemberMessage{
Value: types.Message{
Content: []types.ContentBlock{
&types.ContentBlockMemberText{
Value: "mock response",
},
},
},
},
}, nil
},
}
client := &AmazonBedrockConverseClient{
client: mock,
model: "test-model",
}
result, err := client.GetCompletion(context.Background(), "hello")
assert.NoError(t, err)
assert.Equal(t, "mock response", result)
}
func TestGetCompletion_Error(t *testing.T) {
mock := &mockConverseClient{
converseFunc: func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error) {
return nil, errors.New("some error")
},
}
client := &AmazonBedrockConverseClient{
client: mock,
model: "test-model",
}
_, err := client.GetCompletion(context.Background(), "hello")
assert.Error(t, err)
}
func TestConfigure_WithInjectedClient(t *testing.T) {
mock := &mockConverseClient{}
cfg := &AIProvider{
Model: "test-model",
ProviderRegion: "us-west-2",
Temperature: 0.5,
TopP: 0.9,
MaxTokens: 100,
StopSequences: []string{"stop"},
}
client := &AmazonBedrockConverseClient{
client: mock,
}
err := client.Configure(cfg)
assert.NoError(t, err)
assert.Equal(t, "test-model", client.model)
assert.Equal(t, float32(0.5), client.temperature)
assert.Equal(t, float32(0.9), client.topP)
assert.Equal(t, 100, client.maxTokens)
assert.Equal(t, []string{"stop"}, client.stopSequences)
}
func TestConfigure_InvalidModel(t *testing.T) {
mock := &mockConverseClient{}
cfg := &AIProvider{
Model: "",
}
client := &AmazonBedrockConverseClient{
client: mock,
}
err := client.Configure(cfg)
assert.Error(t, err)
assert.Contains(t, err.Error(), "model name cannot be empty")
}
func TestGetRegion(t *testing.T) {
t.Run("uses provided region when env not set", func(t *testing.T) {
t.Setenv("AWS_DEFAULT_REGION", "")
result := getRegion("us-west-2")
assert.Equal(t, "us-west-2", result)
})
t.Run("env overrides provided region", func(t *testing.T) {
t.Setenv("AWS_DEFAULT_REGION", "us-east-1")
result := getRegion("us-west-2")
assert.Equal(t, "us-east-1", result)
})
}
func TestProcessError(t *testing.T) {
tests := []struct {
name string
err error
modelId string
contains string
}{
{
name: "no such host",
err: errors.New("dial tcp: no such host"),
modelId: "test-model",
contains: "bedrock service is not available",
},
{
name: "model not found",
err: errors.New("Could not resolve the foundation model"),
modelId: "test-model",
contains: "could not resolve the foundation model",
},
{
name: "generic error",
err: errors.New("something else"),
modelId: "test-model",
contains: "could not invoke model",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := processError(tt.err, tt.modelId)
assert.Contains(t, result.Error(), tt.contains)
})
}
}
func TestExtractTextFromConverseOutput(t *testing.T) {
tests := []struct {
name string
output types.ConverseOutput
expectError bool
expected string
}{
{
name: "nil output",
output: nil,
expectError: true,
},
{
name: "empty content",
output: &types.ConverseOutputMemberMessage{
Value: types.Message{
Content: []types.ContentBlock{},
},
},
expectError: true,
},
{
name: "single text block",
output: &types.ConverseOutputMemberMessage{
Value: types.Message{
Content: []types.ContentBlock{
&types.ContentBlockMemberText{Value: "hello"},
},
},
},
expected: "hello",
},
{
name: "multiple text blocks",
output: &types.ConverseOutputMemberMessage{
Value: types.Message{
Content: []types.ContentBlock{
&types.ContentBlockMemberText{Value: "hello "},
&types.ContentBlockMemberText{Value: "world"},
},
},
},
expected: "hello world",
},
{
name: "mixed content blocks",
output: &types.ConverseOutputMemberMessage{
Value: types.Message{
Content: []types.ContentBlock{
&types.ContentBlockMemberText{Value: "hello"},
// simulate non-text block
&types.ContentBlockMemberImage{},
&types.ContentBlockMemberText{Value: " world"},
},
},
},
expected: "hello world",
},
{
name: "no text blocks",
output: &types.ConverseOutputMemberMessage{
Value: types.Message{
Content: []types.ContentBlock{
&types.ContentBlockMemberImage{},
},
},
},
expectError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := extractTextFromConverseOutput(tt.output, "test-model")
if tt.expectError {
assert.Error(t, err)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.expected, result)
})
}
}
func TestGetName(t *testing.T) {
client := &AmazonBedrockConverseClient{}
assert.Equal(t, "amazonbedrockconverse", client.GetName())
}

View File

@@ -27,7 +27,6 @@ var (
&NoOpAIClient{},
&CohereClient{},
&AmazonBedRockClient{},
&AmazonBedrockConverseClient{},
&SageMakerAIClient{},
&GoogleGenAIClient{},
&HuggingfaceClient{},
@@ -44,7 +43,6 @@ var (
azureAIClientName,
cohereAIClientName,
amazonbedrockAIClientName,
amazonBedrockConverseClientName,
amazonsagemakerAIClientName,
googleAIClientName,
noopAIClientName,
@@ -87,7 +85,6 @@ type IAIConfig interface {
GetTopP() float32
GetTopK() int32
GetMaxTokens() int
GetStopSequences() []string
GetProviderId() string
GetCompartmentId() string
GetOrganizationId() string
@@ -125,7 +122,6 @@ type AIProvider struct {
TopP float32 `mapstructure:"topp" yaml:"topp,omitempty"`
TopK int32 `mapstructure:"topk" yaml:"topk,omitempty"`
MaxTokens int `mapstructure:"maxtokens" yaml:"maxtokens,omitempty"`
StopSequences []string `mapstructure:"stopsequences" yaml:"stopsequences,omitempty"`
OrganizationId string `mapstructure:"organizationid" yaml:"organizationid,omitempty"`
CustomHeaders []http.Header `mapstructure:"customHeaders"`
}
@@ -154,10 +150,6 @@ func (p *AIProvider) GetMaxTokens() int {
return p.MaxTokens
}
func (p *AIProvider) GetStopSequences() []string {
return p.StopSequences
}
func (p *AIProvider) GetPassword() string {
return p.Password
}
@@ -193,7 +185,7 @@ func (p *AIProvider) GetCustomHeaders() []http.Header {
return p.CustomHeaders
}
var passwordlessProviders = []string{"localai", "ollama", "amazonsagemaker", "amazonbedrock", "amazonbedrockconverse", "googlevertexai", "oci", "customrest"}
var passwordlessProviders = []string{"localai", "ollama", "amazonsagemaker", "amazonbedrock", "googlevertexai", "oci", "customrest"}
func NeedPassword(backend string) bool {
for _, b := range passwordlessProviders {

View File

@@ -61,10 +61,6 @@ func (m *mockConfig) GetMaxTokens() int {
return 0
}
func (m *mockConfig) GetStopSequences() []string {
return []string{"", "", "", ""}
}
func (m *mockConfig) GetEndpointName() string {
return ""
}