mirror of
https://github.com/k8sgpt-ai/k8sgpt.git
synced 2026-03-29 16:32:52 +00:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
74b1ee1c16 | ||
|
|
fc6a83d063 | ||
|
|
2276b12b0f | ||
|
|
fd5bba6ab3 | ||
|
|
19a172e575 |
@@ -1 +1 @@
|
||||
{".":"0.4.30"}
|
||||
{".":"0.4.31"}
|
||||
17
CHANGELOG.md
17
CHANGELOG.md
@@ -1,5 +1,22 @@
|
||||
# Changelog
|
||||
|
||||
## [0.4.31](https://github.com/k8sgpt-ai/k8sgpt/compare/v0.4.30...v0.4.31) (2026-03-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support amazonbedrock converse api ([#1627](https://github.com/k8sgpt-ai/k8sgpt/issues/1627)) ([fc6a83d](https://github.com/k8sgpt-ai/k8sgpt/commit/fc6a83d063e69293f4e3aa18bd887740401c8fe0))
|
||||
|
||||
|
||||
### Other
|
||||
|
||||
* updated readme ([#1620](https://github.com/k8sgpt-ai/k8sgpt/issues/1620)) ([fd5bba6](https://github.com/k8sgpt-ai/k8sgpt/commit/fd5bba6ab3ad7a81ef982f1980ac9c9de23bc46c))
|
||||
|
||||
|
||||
### Docs
|
||||
|
||||
* align Go version with go.mod toolchain ([#1609](https://github.com/k8sgpt-ai/k8sgpt/issues/1609)) ([19a172e](https://github.com/k8sgpt-ai/k8sgpt/commit/19a172e575ffba6cd89330479033731426358342))
|
||||
|
||||
## [0.4.30](https://github.com/k8sgpt-ai/k8sgpt/compare/v0.4.29...v0.4.30) (2026-02-20)
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
We're happy that you want to contribute to this project. Please read the sections to make the process as smooth as possible.
|
||||
|
||||
## Requirements
|
||||
- Golang `1.23`
|
||||
- Golang `1.24+`
|
||||
- An OpenAI API key
|
||||
* OpenAI API keys can be obtained from [OpenAI](https://platform.openai.com/account/api-keys)
|
||||
* You can set the API key for k8sgpt using `./k8sgpt auth key`
|
||||
|
||||
31
README.md
31
README.md
@@ -21,6 +21,10 @@ It has SRE experience codified into its analyzers and helps to pull out the most
|
||||
|
||||
_Out of the box integration with OpenAI, Azure, Cohere, Amazon Bedrock, Google Gemini and local models._
|
||||
|
||||
|
||||
> **Sister project:** Check out [sympozium](https://github.com/AlexsJones/sympozium/) for managing agents in Kubernetes.
|
||||
|
||||
|
||||
<a href="https://www.producthunt.com/posts/k8sgpt?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-k8sgpt" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=389489&theme=light" alt="K8sGPT - K8sGPT gives Kubernetes Superpowers to everyone | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a> <a href="https://hellogithub.com/repository/9dfe44c18dfb4d6fa0181baf8b2cf2e1" target="_blank"><img src="https://abroad.hellogithub.com/v1/widgets/recommend.svg?rid=9dfe44c18dfb4d6fa0181baf8b2cf2e1&claim_uid=gqG4wmzkMrP0eFy" alt="Featured|HelloGitHub" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
|
||||
|
||||
@@ -63,7 +67,7 @@ brew install k8sgpt
|
||||
<!---x-release-please-start-version-->
|
||||
|
||||
```
|
||||
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_386.rpm
|
||||
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.rpm
|
||||
```
|
||||
<!---x-release-please-end-->
|
||||
|
||||
@@ -71,7 +75,7 @@ brew install k8sgpt
|
||||
|
||||
<!---x-release-please-start-version-->
|
||||
```
|
||||
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_amd64.rpm
|
||||
sudo rpm -ivh https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.rpm
|
||||
```
|
||||
<!---x-release-please-end-->
|
||||
</details>
|
||||
@@ -84,7 +88,7 @@ brew install k8sgpt
|
||||
<!---x-release-please-start-version-->
|
||||
|
||||
```
|
||||
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_386.deb
|
||||
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.deb
|
||||
sudo dpkg -i k8sgpt_386.deb
|
||||
```
|
||||
|
||||
@@ -95,7 +99,7 @@ sudo dpkg -i k8sgpt_386.deb
|
||||
<!---x-release-please-start-version-->
|
||||
|
||||
```
|
||||
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_amd64.deb
|
||||
curl -LO https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.deb
|
||||
sudo dpkg -i k8sgpt_amd64.deb
|
||||
```
|
||||
|
||||
@@ -110,7 +114,7 @@ sudo dpkg -i k8sgpt_amd64.deb
|
||||
|
||||
<!---x-release-please-start-version-->
|
||||
```
|
||||
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_386.apk
|
||||
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_386.apk
|
||||
apk add --allow-untrusted k8sgpt_386.apk
|
||||
```
|
||||
<!---x-release-please-end-->
|
||||
@@ -119,7 +123,7 @@ sudo dpkg -i k8sgpt_amd64.deb
|
||||
|
||||
<!---x-release-please-start-version-->
|
||||
```
|
||||
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.30/k8sgpt_amd64.apk
|
||||
wget https://github.com/k8sgpt-ai/k8sgpt/releases/download/v0.4.31/k8sgpt_amd64.apk
|
||||
apk add --allow-untrusted k8sgpt_amd64.apk
|
||||
```
|
||||
<!---x-release-please-end-->
|
||||
@@ -496,6 +500,21 @@ k8sgpt auth default -p azureopenai
|
||||
Default provider set to azureopenai
|
||||
```
|
||||
|
||||
_Using Amazon Bedrock Converse with inference profiles_
|
||||
|
||||
_System Inference Profile_
|
||||
|
||||
```
|
||||
k8sgpt auth add --backend amazonbedrockconverse --providerRegion us-east-1 --model arn:aws:bedrock:us-east-1:123456789012:inference-profile/my-inference-profile
|
||||
|
||||
```
|
||||
|
||||
_Application Inference Profile_
|
||||
|
||||
```
|
||||
k8sgpt auth add --backend amazonbedrockconverse --providerRegion us-east-1 --model arn:aws:bedrock:us-east-1:123456789012:application-inference-profile/2uzp4s0w39t6
|
||||
|
||||
```
|
||||
_Using Amazon Bedrock with inference profiles_
|
||||
|
||||
_System Inference Profile_
|
||||
|
||||
@@ -24,6 +24,9 @@ K8sGPT supports a variety of AI/LLM providers (backends). Some providers have a
|
||||
### Cohere
|
||||
- **Model:** User-configurable (any model supported by Cohere)
|
||||
|
||||
### Amazon Bedrock Converse
|
||||
- **Model:** User-configurable (any model supported by [Amazon Bedrock Converse](https://docs.aws.amazon.com/bedrock/latest/userguide/models-api-compatibility.html))
|
||||
|
||||
### Amazon Bedrock
|
||||
- **Supported Models:**
|
||||
- anthropic.claude-sonnet-4-20250514-v1:0
|
||||
@@ -80,4 +83,4 @@ K8sGPT supports a variety of AI/LLM providers (backends). Some providers have a
|
||||
|
||||
---
|
||||
|
||||
For more details on configuring each provider and model, refer to the official K8sGPT documentation and the provider's own documentation.
|
||||
For more details on configuring each provider and model, refer to the official K8sGPT documentation and the provider's own documentation.
|
||||
|
||||
@@ -48,6 +48,9 @@ var addCmd = &cobra.Command{
|
||||
if strings.ToLower(backend) == "amazonbedrock" {
|
||||
_ = cmd.MarkFlagRequired("providerRegion")
|
||||
}
|
||||
if strings.ToLower(backend) == "amazonbedrockconverse" {
|
||||
_ = cmd.MarkFlagRequired("providerRegion")
|
||||
}
|
||||
if strings.ToLower(backend) == "ibmwatsonxai" {
|
||||
_ = cmd.MarkFlagRequired("providerId")
|
||||
}
|
||||
@@ -140,6 +143,7 @@ var addCmd = &cobra.Command{
|
||||
TopP: topP,
|
||||
TopK: topK,
|
||||
MaxTokens: maxTokens,
|
||||
StopSequences: stopSequences,
|
||||
OrganizationId: organizationId,
|
||||
}
|
||||
|
||||
@@ -173,12 +177,14 @@ func init() {
|
||||
addCmd.Flags().Int32VarP(&topK, "topk", "c", 50, "Sampling Cutoff: Set a threshold (1-100) to restrict the sampling process to the top K most probable words at each step. Higher values lead to greater variability, lower values increases predictability.")
|
||||
// max tokens
|
||||
addCmd.Flags().IntVarP(&maxTokens, "maxtokens", "l", 2048, "Specify a maximum output length. Adjust (1-...) to control text length. Higher values produce longer output, lower values limit length")
|
||||
// stop sequences
|
||||
addCmd.Flags().StringSliceVarP(&stopSequences, "stopsequences", "s", []string{}, "Stop Sequences: Define specific tokens or phrases that signal the model to stop generating text.")
|
||||
// add flag for temperature
|
||||
addCmd.Flags().Float32VarP(&temperature, "temperature", "t", 0.7, "The sampling temperature, value ranges between 0 ( output be more deterministic) and 1 (more random)")
|
||||
// add flag for azure open ai engine/deployment name
|
||||
addCmd.Flags().StringVarP(&engine, "engine", "e", "", "Azure AI deployment name (only for azureopenai backend)")
|
||||
//add flag for amazonbedrock region name
|
||||
addCmd.Flags().StringVarP(&providerRegion, "providerRegion", "r", "", "Provider Region name (only for amazonbedrock, googlevertexai backend)")
|
||||
addCmd.Flags().StringVarP(&providerRegion, "providerRegion", "r", "", "Provider Region name (only for amazonbedrock, amazonbedrockconverse, googlevertexai backend)")
|
||||
//add flag for vertexAI/WatsonxAI Project ID
|
||||
addCmd.Flags().StringVarP(&providerId, "providerId", "i", "", "Provider specific ID for e.g. project (only for googlevertexai/ibmwatsonxai backend)")
|
||||
//add flag for OCI Compartment ID
|
||||
|
||||
@@ -32,6 +32,7 @@ var (
|
||||
topP float32
|
||||
topK int32
|
||||
maxTokens int
|
||||
stopSequences []string
|
||||
organizationId string
|
||||
)
|
||||
|
||||
|
||||
161
pkg/ai/amazonbedrockconverse.go
Normal file
161
pkg/ai/amazonbedrockconverse.go
Normal file
@@ -0,0 +1,161 @@
|
||||
package ai
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/aws/aws-sdk-go-v2/aws"
|
||||
awsconfig "github.com/aws/aws-sdk-go-v2/config"
|
||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
|
||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const amazonBedrockConverseClientName = "amazonbedrockconverse"
|
||||
|
||||
type bedrockConverseAPI interface {
|
||||
Converse(ctx context.Context, input *bedrockruntime.ConverseInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error)
|
||||
}
|
||||
|
||||
type AmazonBedrockConverseClient struct {
|
||||
nopCloser
|
||||
|
||||
client bedrockConverseAPI
|
||||
model string
|
||||
temperature float32
|
||||
topP float32
|
||||
maxTokens int
|
||||
stopSequences []string
|
||||
}
|
||||
|
||||
func getRegion(region string) string {
|
||||
if os.Getenv("AWS_DEFAULT_REGION") != "" {
|
||||
region = os.Getenv("AWS_DEFAULT_REGION")
|
||||
}
|
||||
// Return the supplied provider region if not overridden by environment variable
|
||||
return region
|
||||
}
|
||||
|
||||
func (a *AmazonBedrockConverseClient) getModelFromString(model string) (string, error) {
|
||||
if model == "" {
|
||||
return "", errors.New("model name cannot be empty")
|
||||
}
|
||||
model = strings.TrimSpace(model)
|
||||
|
||||
return model, nil
|
||||
}
|
||||
|
||||
func processError(err error, modelId string) error {
|
||||
errMsg := err.Error()
|
||||
if strings.Contains(errMsg, "no such host") {
|
||||
return fmt.Errorf(`the bedrock service is not available in the selected region.
|
||||
please double-check the service availability for your region at
|
||||
https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services/`)
|
||||
} else if strings.Contains(errMsg, "Could not resolve the foundation model") {
|
||||
return fmt.Errorf(`could not resolve the foundation model from model identifier: \"%s\".
|
||||
please verify that the requested model exists and is accessible
|
||||
within the specified region`, modelId)
|
||||
} else {
|
||||
return fmt.Errorf("could not invoke model: \"%s\". here is why: %s", modelId, err)
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AmazonBedrockConverseClient) Configure(config IAIConfig) error {
|
||||
modelInput := config.GetModel()
|
||||
|
||||
var region = getRegion(config.GetProviderRegion())
|
||||
|
||||
// Only create AWS clients if they haven't been injected (for testing)
|
||||
if a.client == nil {
|
||||
cfg, err := awsconfig.LoadDefaultConfig(context.Background(),
|
||||
awsconfig.WithRegion(region),
|
||||
)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "InvalidAccessKeyId") || strings.Contains(err.Error(), "SignatureDoesNotMatch") || strings.Contains(err.Error(), "NoCredentialProviders") {
|
||||
return fmt.Errorf("aws credentials are invalid or missing. Please check your environment variables or aws config. details: %v", err)
|
||||
}
|
||||
return fmt.Errorf("failed to load aws config for region %s: %w", region, err)
|
||||
}
|
||||
|
||||
a.client = bedrockruntime.NewFromConfig(cfg)
|
||||
}
|
||||
|
||||
foundModel, err := a.getModelFromString(modelInput)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to find model configuration for %s: %w", modelInput, err)
|
||||
}
|
||||
a.model = foundModel
|
||||
|
||||
// Set common configuration parameters
|
||||
a.temperature = config.GetTemperature()
|
||||
a.topP = config.GetTopP()
|
||||
a.maxTokens = config.GetMaxTokens()
|
||||
a.stopSequences = config.GetStopSequences()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func extractTextFromConverseOutput(output types.ConverseOutput, modelId string) (string, error) {
|
||||
if output == nil {
|
||||
return "", fmt.Errorf("empty response from model: %s", modelId)
|
||||
}
|
||||
|
||||
msg, ok := output.(*types.ConverseOutputMemberMessage)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("unexpected response type from model: %s", modelId)
|
||||
}
|
||||
|
||||
if len(msg.Value.Content) == 0 {
|
||||
return "", fmt.Errorf("no content returned from model: %s", modelId)
|
||||
}
|
||||
|
||||
var builder strings.Builder
|
||||
|
||||
for _, block := range msg.Value.Content {
|
||||
if textBlock, ok := block.(*types.ContentBlockMemberText); ok && textBlock != nil {
|
||||
builder.WriteString(textBlock.Value)
|
||||
}
|
||||
}
|
||||
|
||||
if builder.Len() == 0 {
|
||||
return "", fmt.Errorf("no text content returned from model: %s", modelId)
|
||||
}
|
||||
|
||||
return builder.String(), nil
|
||||
}
|
||||
|
||||
func (a *AmazonBedrockConverseClient) GetCompletion(ctx context.Context, prompt string) (string, error) {
|
||||
var content = types.ContentBlockMemberText{
|
||||
Value: prompt,
|
||||
}
|
||||
var message = types.Message{
|
||||
Content: []types.ContentBlock{&content},
|
||||
Role: "user",
|
||||
}
|
||||
var converseInput = bedrockruntime.ConverseInput{
|
||||
ModelId: aws.String(a.model),
|
||||
Messages: []types.Message{message},
|
||||
InferenceConfig: &types.InferenceConfiguration{
|
||||
Temperature: aws.Float32(a.temperature),
|
||||
TopP: aws.Float32(a.topP),
|
||||
MaxTokens: aws.Int32(int32(a.maxTokens)),
|
||||
StopSequences: a.stopSequences,
|
||||
},
|
||||
}
|
||||
response, err := a.client.Converse(ctx, &converseInput)
|
||||
if err != nil {
|
||||
return "", processError(err, a.model)
|
||||
}
|
||||
|
||||
text, err := extractTextFromConverseOutput(response.Output, a.model)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return text, nil
|
||||
}
|
||||
|
||||
func (a *AmazonBedrockConverseClient) GetName() string {
|
||||
return amazonBedrockConverseClientName
|
||||
}
|
||||
250
pkg/ai/amazonbedrockconverse_mock_test.go
Normal file
250
pkg/ai/amazonbedrockconverse_mock_test.go
Normal file
@@ -0,0 +1,250 @@
|
||||
package ai
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
|
||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// ---- Mock Wrapper ----
|
||||
type mockConverseClient struct {
|
||||
converseFunc func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error)
|
||||
}
|
||||
|
||||
func (m *mockConverseClient) Converse(ctx context.Context, input *bedrockruntime.ConverseInput, _ ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error) {
|
||||
return m.converseFunc(ctx, input)
|
||||
}
|
||||
|
||||
// ---- Tests ----
|
||||
func TestGetCompletion_Success(t *testing.T) {
|
||||
mock := &mockConverseClient{
|
||||
converseFunc: func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error) {
|
||||
return &bedrockruntime.ConverseOutput{
|
||||
Output: &types.ConverseOutputMemberMessage{
|
||||
Value: types.Message{
|
||||
Content: []types.ContentBlock{
|
||||
&types.ContentBlockMemberText{
|
||||
Value: "mock response",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
client := &AmazonBedrockConverseClient{
|
||||
client: mock,
|
||||
model: "test-model",
|
||||
}
|
||||
|
||||
result, err := client.GetCompletion(context.Background(), "hello")
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "mock response", result)
|
||||
}
|
||||
|
||||
func TestGetCompletion_Error(t *testing.T) {
|
||||
mock := &mockConverseClient{
|
||||
converseFunc: func(ctx context.Context, input *bedrockruntime.ConverseInput) (*bedrockruntime.ConverseOutput, error) {
|
||||
return nil, errors.New("some error")
|
||||
},
|
||||
}
|
||||
|
||||
client := &AmazonBedrockConverseClient{
|
||||
client: mock,
|
||||
model: "test-model",
|
||||
}
|
||||
|
||||
_, err := client.GetCompletion(context.Background(), "hello")
|
||||
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestConfigure_WithInjectedClient(t *testing.T) {
|
||||
mock := &mockConverseClient{}
|
||||
|
||||
cfg := &AIProvider{
|
||||
Model: "test-model",
|
||||
ProviderRegion: "us-west-2",
|
||||
Temperature: 0.5,
|
||||
TopP: 0.9,
|
||||
MaxTokens: 100,
|
||||
StopSequences: []string{"stop"},
|
||||
}
|
||||
|
||||
client := &AmazonBedrockConverseClient{
|
||||
client: mock,
|
||||
}
|
||||
|
||||
err := client.Configure(cfg)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "test-model", client.model)
|
||||
assert.Equal(t, float32(0.5), client.temperature)
|
||||
assert.Equal(t, float32(0.9), client.topP)
|
||||
assert.Equal(t, 100, client.maxTokens)
|
||||
assert.Equal(t, []string{"stop"}, client.stopSequences)
|
||||
}
|
||||
|
||||
func TestConfigure_InvalidModel(t *testing.T) {
|
||||
mock := &mockConverseClient{}
|
||||
|
||||
cfg := &AIProvider{
|
||||
Model: "",
|
||||
}
|
||||
|
||||
client := &AmazonBedrockConverseClient{
|
||||
client: mock,
|
||||
}
|
||||
|
||||
err := client.Configure(cfg)
|
||||
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "model name cannot be empty")
|
||||
}
|
||||
|
||||
func TestGetRegion(t *testing.T) {
|
||||
t.Run("uses provided region when env not set", func(t *testing.T) {
|
||||
t.Setenv("AWS_DEFAULT_REGION", "")
|
||||
|
||||
result := getRegion("us-west-2")
|
||||
assert.Equal(t, "us-west-2", result)
|
||||
})
|
||||
|
||||
t.Run("env overrides provided region", func(t *testing.T) {
|
||||
t.Setenv("AWS_DEFAULT_REGION", "us-east-1")
|
||||
|
||||
result := getRegion("us-west-2")
|
||||
assert.Equal(t, "us-east-1", result)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProcessError(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
err error
|
||||
modelId string
|
||||
contains string
|
||||
}{
|
||||
{
|
||||
name: "no such host",
|
||||
err: errors.New("dial tcp: no such host"),
|
||||
modelId: "test-model",
|
||||
contains: "bedrock service is not available",
|
||||
},
|
||||
{
|
||||
name: "model not found",
|
||||
err: errors.New("Could not resolve the foundation model"),
|
||||
modelId: "test-model",
|
||||
contains: "could not resolve the foundation model",
|
||||
},
|
||||
{
|
||||
name: "generic error",
|
||||
err: errors.New("something else"),
|
||||
modelId: "test-model",
|
||||
contains: "could not invoke model",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := processError(tt.err, tt.modelId)
|
||||
assert.Contains(t, result.Error(), tt.contains)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractTextFromConverseOutput(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
output types.ConverseOutput
|
||||
expectError bool
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "nil output",
|
||||
output: nil,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "empty content",
|
||||
output: &types.ConverseOutputMemberMessage{
|
||||
Value: types.Message{
|
||||
Content: []types.ContentBlock{},
|
||||
},
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "single text block",
|
||||
output: &types.ConverseOutputMemberMessage{
|
||||
Value: types.Message{
|
||||
Content: []types.ContentBlock{
|
||||
&types.ContentBlockMemberText{Value: "hello"},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "hello",
|
||||
},
|
||||
{
|
||||
name: "multiple text blocks",
|
||||
output: &types.ConverseOutputMemberMessage{
|
||||
Value: types.Message{
|
||||
Content: []types.ContentBlock{
|
||||
&types.ContentBlockMemberText{Value: "hello "},
|
||||
&types.ContentBlockMemberText{Value: "world"},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "hello world",
|
||||
},
|
||||
{
|
||||
name: "mixed content blocks",
|
||||
output: &types.ConverseOutputMemberMessage{
|
||||
Value: types.Message{
|
||||
Content: []types.ContentBlock{
|
||||
&types.ContentBlockMemberText{Value: "hello"},
|
||||
// simulate non-text block
|
||||
&types.ContentBlockMemberImage{},
|
||||
&types.ContentBlockMemberText{Value: " world"},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "hello world",
|
||||
},
|
||||
{
|
||||
name: "no text blocks",
|
||||
output: &types.ConverseOutputMemberMessage{
|
||||
Value: types.Message{
|
||||
Content: []types.ContentBlock{
|
||||
&types.ContentBlockMemberImage{},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := extractTextFromConverseOutput(tt.output, "test-model")
|
||||
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetName(t *testing.T) {
|
||||
client := &AmazonBedrockConverseClient{}
|
||||
assert.Equal(t, "amazonbedrockconverse", client.GetName())
|
||||
}
|
||||
@@ -27,6 +27,7 @@ var (
|
||||
&NoOpAIClient{},
|
||||
&CohereClient{},
|
||||
&AmazonBedRockClient{},
|
||||
&AmazonBedrockConverseClient{},
|
||||
&SageMakerAIClient{},
|
||||
&GoogleGenAIClient{},
|
||||
&HuggingfaceClient{},
|
||||
@@ -43,6 +44,7 @@ var (
|
||||
azureAIClientName,
|
||||
cohereAIClientName,
|
||||
amazonbedrockAIClientName,
|
||||
amazonBedrockConverseClientName,
|
||||
amazonsagemakerAIClientName,
|
||||
googleAIClientName,
|
||||
noopAIClientName,
|
||||
@@ -85,6 +87,7 @@ type IAIConfig interface {
|
||||
GetTopP() float32
|
||||
GetTopK() int32
|
||||
GetMaxTokens() int
|
||||
GetStopSequences() []string
|
||||
GetProviderId() string
|
||||
GetCompartmentId() string
|
||||
GetOrganizationId() string
|
||||
@@ -122,6 +125,7 @@ type AIProvider struct {
|
||||
TopP float32 `mapstructure:"topp" yaml:"topp,omitempty"`
|
||||
TopK int32 `mapstructure:"topk" yaml:"topk,omitempty"`
|
||||
MaxTokens int `mapstructure:"maxtokens" yaml:"maxtokens,omitempty"`
|
||||
StopSequences []string `mapstructure:"stopsequences" yaml:"stopsequences,omitempty"`
|
||||
OrganizationId string `mapstructure:"organizationid" yaml:"organizationid,omitempty"`
|
||||
CustomHeaders []http.Header `mapstructure:"customHeaders"`
|
||||
}
|
||||
@@ -150,6 +154,10 @@ func (p *AIProvider) GetMaxTokens() int {
|
||||
return p.MaxTokens
|
||||
}
|
||||
|
||||
func (p *AIProvider) GetStopSequences() []string {
|
||||
return p.StopSequences
|
||||
}
|
||||
|
||||
func (p *AIProvider) GetPassword() string {
|
||||
return p.Password
|
||||
}
|
||||
@@ -185,7 +193,7 @@ func (p *AIProvider) GetCustomHeaders() []http.Header {
|
||||
return p.CustomHeaders
|
||||
}
|
||||
|
||||
var passwordlessProviders = []string{"localai", "ollama", "amazonsagemaker", "amazonbedrock", "googlevertexai", "oci", "customrest"}
|
||||
var passwordlessProviders = []string{"localai", "ollama", "amazonsagemaker", "amazonbedrock", "amazonbedrockconverse", "googlevertexai", "oci", "customrest"}
|
||||
|
||||
func NeedPassword(backend string) bool {
|
||||
for _, b := range passwordlessProviders {
|
||||
|
||||
@@ -61,6 +61,10 @@ func (m *mockConfig) GetMaxTokens() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *mockConfig) GetStopSequences() []string {
|
||||
return []string{"", "", "", ""}
|
||||
}
|
||||
|
||||
func (m *mockConfig) GetEndpointName() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user