feat: add amazonbedrock (#718)

* add amazonbedrock AI provider

Signed-off-by: Su Wei <suwei007@gmail.com>

* add amazonbedrock, change model list to const var

Signed-off-by: Su Wei <suwei007@gmail.com>

* update iai config and auth cmd, add providerRegion

Signed-off-by: Wei Su <wsuam@amazon.com>

* fix filename wrong

Signed-off-by: Wei Su <wsuam@amazon.com>

* chore: added some doc info

Signed-off-by: Alex Jones <alexsimonjones@gmail.com>

---------

Signed-off-by: Su Wei <suwei007@gmail.com>
Signed-off-by: Wei Su <wsuam@amazon.com>
Signed-off-by: Alex Jones <alexsimonjones@gmail.com>
Co-authored-by: Wei Su <wsuam@amazon.com>
Co-authored-by: Aris Boutselis <aris.boutselis@senseon.io>
Co-authored-by: Alex Jones <alexsimonjones@gmail.com>
This commit is contained in:
StevenSu 2023-10-28 18:49:09 +08:00 committed by GitHub
parent 4af0ad0303
commit f1a7801e9e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 270 additions and 18 deletions

View File

@ -14,6 +14,8 @@
It has SRE experience codified into its analyzers and helps to pull out the most relevant information to enrich it with AI.
_Out of the box integration with OpenAI, Azure, Cohere, Amazon Bedrock and local models._
<a href="https://www.producthunt.com/posts/k8sgpt?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-k8sgpt" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=389489&theme=light" alt="K8sGPT - K8sGPT&#0032;gives&#0032;Kubernetes&#0032;Superpowers&#0032;to&#0032;everyone | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
<img src="images/demo4.gif" width=650px; />
@ -359,6 +361,45 @@ k8sgpt analyze --explain --backend cohere
</details>
<details>
<summary>Amazon Bedrock provider</summary>
<em>Prerequisites</em>
Bedrock API access is needed.
<img src="images/bedrock.png" width="500px;" />
As illustrated below, you will need to enable this in the [AWS Console](https://eu-central-1.console.aws.amazon.com/bedrock/home?region=eu-central-1#/modelaccess)
In addition to this you will need to set the follow local environmental variables:
```
- AWS_ACCESS_KEY
- AWS_SECRET_ACCESS_KEY
- AWS_DEFAULT_REGION
```
```
k8sgpt auth add --backend amazonbedrock --model anthropic.claude-v2
```
TODO: Currently access key will be requested in the CLI, you can enter anything into this.
#### Usage
```
k8sgpt analyze -e -b amazonbedrock
0 argocd/argocd-application-controller(argocd-application-controller)
- Error: StatefulSet uses the service argocd/argocd-application-controller which does not exist.
You're right, I don't have enough context to determine if a StatefulSet is correctly configured to use a non-existent service. A StatefulSet manages Pods with persistent storage, and the Pods are created from the same spec. The service name referenced in the StatefulSet configuration would need to match an existing Kubernetes service for the Pods to connect to. Without more details on the specific StatefulSet and environment, I can't confirm whether the configuration is valid or not.
```
</details>
<details>
<summary>Setting a new default AI provider</summary>
@ -376,6 +417,8 @@ Active:
Unused:
> localai
> noopai
> amazonbedrock
> cohere
```

View File

@ -41,6 +41,7 @@ var addCmd = &cobra.Command{
_ = cmd.MarkFlagRequired("engine")
_ = cmd.MarkFlagRequired("baseurl")
}
},
Run: func(cmd *cobra.Command, args []string) {
@ -103,12 +104,13 @@ var addCmd = &cobra.Command{
// create new provider object
newProvider := ai.AIProvider{
Name: backend,
Model: model,
Password: password,
BaseURL: baseURL,
Engine: engine,
Temperature: temperature,
Name: backend,
Model: model,
Password: password,
BaseURL: baseURL,
Engine: engine,
Temperature: temperature,
ProviderRegion: providerRegion,
}
if providerIndex == -1 {
@ -140,4 +142,6 @@ func init() {
addCmd.Flags().Float32VarP(&temperature, "temperature", "t", 0.7, "The sampling temperature, value ranges between 0 ( output be more deterministic) and 1 (more random)")
// add flag for azure open ai engine/deployment name
addCmd.Flags().StringVarP(&engine, "engine", "e", "", "Azure AI deployment name")
//add flag for amazonbedrock region name
addCmd.Flags().StringVarP(&providerRegion, "providerRegion", "r", "", "Provider Region name")
}

View File

@ -19,12 +19,13 @@ import (
)
var (
backend string
password string
baseURL string
model string
engine string
temperature float32
backend string
password string
baseURL string
model string
engine string
temperature float32
providerRegion string
)
var configAI ai.AIConfiguration

BIN
images/bedrock.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 79 KiB

196
pkg/ai/amazonbedrock.go Normal file
View File

@ -0,0 +1,196 @@
package ai
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"strings"
"github.com/fatih/color"
"github.com/k8sgpt-ai/k8sgpt/pkg/cache"
"github.com/k8sgpt-ai/k8sgpt/pkg/util"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/bedrockruntime"
)
// AmazonBedRockClient represents the client for interacting with the Amazon Bedrock service.
type AmazonBedRockClient struct {
client *bedrockruntime.BedrockRuntime
language string
model string
temperature float32
}
// InvokeModelResponseBody represents the response body structure from the model invocation.
type InvokeModelResponseBody struct {
Completion string `json:"completion"`
Stop_reason string `json:"stop_reason"`
}
// Amazon BedRock support region list US East (N. Virginia),US West (Oregon),Asia Pacific (Singapore),Asia Pacific (Tokyo),Europe (Frankfurt)
// https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html#bedrock-regions
const BEDROCK_DEFAULT_REGION = "us-east-1" // default use us-east-1 region
const (
US_East_1 = "us-east-1"
US_West_2 = "us-west-2"
AP_Southeast_1 = "ap-southeast-1"
AP_Northeast_1 = "ap-northeast-1"
EU_Central_1 = "eu-central-1"
)
var BEDROCKER_SUPPORTED_REGION = []string{
US_East_1,
US_West_2,
AP_Southeast_1,
AP_Northeast_1,
EU_Central_1,
}
const (
ModelAnthropicClaudeV2 = "anthropic.claude-v2"
ModelAnthropicClaudeV1 = "anthropic.claude-v1"
ModelAnthropicClaudeInstantV1 = "anthropic.claude-instant-v1"
)
var BEDROCK_MODELS = []string{
ModelAnthropicClaudeV2,
ModelAnthropicClaudeV1,
ModelAnthropicClaudeInstantV1,
}
// GetModelOrDefault check config model
func GetModelOrDefault(model string) string {
// Check if the provided model is in the list
for _, m := range BEDROCK_MODELS {
if m == model {
return model // Return the provided model
}
}
// Return the default model if the provided model is not in the list
return BEDROCK_MODELS[0]
}
// GetModelOrDefault check config region
func GetRegionOrDefault(region string) string {
// Check if the provided model is in the list
for _, m := range BEDROCKER_SUPPORTED_REGION {
if m == region {
return region // Return the provided model
}
}
// Return the default model if the provided model is not in the list
return BEDROCK_DEFAULT_REGION
}
// Configure configures the AmazonBedRockClient with the provided configuration and language.
func (a *AmazonBedRockClient) Configure(config IAIConfig, language string) error {
// Create a new AWS session
providerRegion := GetRegionOrDefault(config.GetProviderRegion())
sess, err := session.NewSession(&aws.Config{
Region: aws.String(providerRegion),
})
if err != nil {
return err
}
// Create a new BedrockRuntime client
a.client = bedrockruntime.New(sess)
a.language = language
a.model = GetModelOrDefault(config.GetModel())
a.temperature = config.GetTemperature()
return nil
}
// GetCompletion sends a request to the model for generating completion based on the provided prompt.
func (a *AmazonBedRockClient) GetCompletion(ctx context.Context, prompt string, promptTmpl string) (string, error) {
// Prepare the input data for the model invocation
request := map[string]interface{}{
"prompt": fmt.Sprintf("\n\nHuman: %s \n\nAssistant:", prompt),
"max_tokens_to_sample": 1024,
"temperature": a.temperature,
"top_p": 0.9,
}
body, err := json.Marshal(request)
if err != nil {
return "", err
}
// Build the parameters for the model invocation
params := &bedrockruntime.InvokeModelInput{
Body: body,
ModelId: aws.String(a.model),
ContentType: aws.String("application/json"),
Accept: aws.String("application/json"),
}
// Invoke the model
resp, err := a.client.InvokeModelWithContext(ctx, params)
if err != nil {
return "", err
}
// Parse the response body
output := &InvokeModelResponseBody{}
err = json.Unmarshal(resp.Body, output)
if err != nil {
return "", err
}
return output.Completion, nil
}
// Parse generates a completion for the provided prompt using the Amazon Bedrock model.
func (a *AmazonBedRockClient) Parse(ctx context.Context, prompt []string, cache cache.ICache, promptTmpl string) (string, error) {
inputKey := strings.Join(prompt, " ")
// Check for cached data
cacheKey := util.GetCacheKey(a.GetName(), a.language, inputKey)
if !cache.IsCacheDisabled() && cache.Exists(cacheKey) {
response, err := cache.Load(cacheKey)
if err != nil {
return "", err
}
if response != "" {
output, err := base64.StdEncoding.DecodeString(response)
if err != nil {
color.Red("error decoding cached data: %v", err)
return "", nil
}
return string(output), nil
}
}
response, err := a.GetCompletion(ctx, inputKey, promptTmpl)
if err != nil {
return "", err
}
err = cache.Store(cacheKey, base64.StdEncoding.EncodeToString([]byte(response)))
if err != nil {
color.Red("error storing value to cache: %v", err)
return "", nil
}
return response, nil
}
// GetName returns the name of the AmazonBedRockClient.
func (a *AmazonBedRockClient) GetName() string {
return "amazonbedrock"
}

View File

@ -26,6 +26,7 @@ var (
&LocalAIClient{},
&NoOpAIClient{},
&CohereClient{},
&AmazonBedRockClient{},
}
Backends = []string{
"openai",
@ -33,6 +34,7 @@ var (
"azureopenai",
"noopai",
"cohere",
"amazonbedrock",
}
)
@ -49,6 +51,7 @@ type IAIConfig interface {
GetBaseURL() string
GetEngine() string
GetTemperature() float32
GetProviderRegion() string
}
func NewClient(provider string) IAI {
@ -67,12 +70,13 @@ type AIConfiguration struct {
}
type AIProvider struct {
Name string `mapstructure:"name"`
Model string `mapstructure:"model"`
Password string `mapstructure:"password" yaml:"password,omitempty"`
BaseURL string `mapstructure:"baseurl" yaml:"baseurl,omitempty"`
Engine string `mapstructure:"engine" yaml:"engine,omitempty"`
Temperature float32 `mapstructure:"temperature" yaml:"temperature,omitempty"`
Name string `mapstructure:"name"`
Model string `mapstructure:"model"`
Password string `mapstructure:"password" yaml:"password,omitempty"`
BaseURL string `mapstructure:"baseurl" yaml:"baseurl,omitempty"`
Engine string `mapstructure:"engine" yaml:"engine,omitempty"`
Temperature float32 `mapstructure:"temperature" yaml:"temperature,omitempty"`
ProviderRegion string `mapstructure:"providerregion" yaml:"providerregion,omitempty"`
}
func (p *AIProvider) GetBaseURL() string {
@ -94,6 +98,10 @@ func (p *AIProvider) GetTemperature() float32 {
return p.Temperature
}
func (p *AIProvider) GetProviderRegion() string {
return p.ProviderRegion
}
func NeedPassword(backend string) bool {
return backend != "localai"
}