mirror of
https://github.com/k8sgpt-ai/k8sgpt.git
synced 2025-09-27 16:04:17 +00:00
@@ -1,12 +1,10 @@
|
||||
package analyze
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/fatih/color"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/ai"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/analysis"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/kubernetes"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"os"
|
||||
@@ -49,38 +47,24 @@ var AnalyzeCmd = &cobra.Command{
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
var aiClient ai.IAI
|
||||
switch backendType {
|
||||
case "openai":
|
||||
aiClient = &ai.OpenAIClient{}
|
||||
if err := aiClient.Configure(token, language); err != nil {
|
||||
color.Red("Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
default:
|
||||
color.Red("Backend not supported")
|
||||
// AnalysisResult configuration
|
||||
|
||||
aiClient, err := ai.NewAIClient("openai")
|
||||
if err != nil {
|
||||
color.Red("Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if err := aiClient.Configure(token, language); err != nil {
|
||||
color.Red("Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
// Get kubernetes client from viper
|
||||
client := viper.Get("kubernetesClient").(*kubernetes.Client)
|
||||
// AnalysisResult configuration
|
||||
|
||||
analysis := &analysis.Analysis{
|
||||
Context: ctx,
|
||||
Namespace: namespace,
|
||||
NoCache: nocache,
|
||||
Explain: explain,
|
||||
AIClient: aiClient,
|
||||
Filters: filters,
|
||||
Client: client,
|
||||
}
|
||||
analysis := analysis.NewAnalysis(namespace, nocache, explain, filters, backend)
|
||||
|
||||
// Run analysis
|
||||
_ = analysis.RunAnalysis()
|
||||
|
||||
analysis.PrintAnalysisResult()
|
||||
analysis.PrintJsonResult()
|
||||
},
|
||||
}
|
||||
|
||||
|
@@ -1,8 +1,25 @@
|
||||
package ai
|
||||
|
||||
import "context"
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/ai/openai"
|
||||
)
|
||||
|
||||
var AIProviderMap = map[string]IAI{
|
||||
"openai": &openai.OpenAIClient{},
|
||||
}
|
||||
|
||||
type IAI interface {
|
||||
Configure(token string, language string) error
|
||||
GetCompletion(ctx context.Context, prompt string) (string, error)
|
||||
Parse(text string, prompt []string, nocache bool) (string, error)
|
||||
}
|
||||
|
||||
func NewAIClient(provider string) (IAI, error) {
|
||||
ai, ok := AIProviderMap[provider]
|
||||
if !ok {
|
||||
return nil, errors.New("AI provider not found")
|
||||
}
|
||||
return ai, nil
|
||||
}
|
||||
|
@@ -1,9 +1,13 @@
|
||||
package ai
|
||||
package openai
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/fatih/color"
|
||||
"github.com/spf13/viper"
|
||||
"strings"
|
||||
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
@@ -16,8 +20,10 @@ const (
|
||||
)
|
||||
|
||||
type OpenAIClient struct {
|
||||
context context.Context
|
||||
client *openai.Client
|
||||
language string
|
||||
nocache bool
|
||||
}
|
||||
|
||||
func (c *OpenAIClient) Configure(token string, language string) error {
|
||||
@@ -46,3 +52,41 @@ func (c *OpenAIClient) GetCompletion(ctx context.Context, prompt string) (string
|
||||
}
|
||||
return resp.Choices[0].Message.Content, nil
|
||||
}
|
||||
|
||||
func (c OpenAIClient) Parse(text string, prompt []string, nocache bool) (string, error) {
|
||||
|
||||
// parse the text with the AI backend
|
||||
inputKey := strings.Join(prompt, " ")
|
||||
// Check for cached data
|
||||
sEnc := base64.StdEncoding.EncodeToString([]byte(inputKey))
|
||||
// find in viper cache
|
||||
if viper.IsSet(sEnc) && !c.nocache {
|
||||
// retrieve data from cache
|
||||
response := viper.GetString(sEnc)
|
||||
if response == "" {
|
||||
color.Red("error retrieving cached data")
|
||||
return "", nil
|
||||
}
|
||||
output, err := base64.StdEncoding.DecodeString(response)
|
||||
if err != nil {
|
||||
color.Red("error decoding cached data: %v", err)
|
||||
return "", nil
|
||||
}
|
||||
return string(output), nil
|
||||
}
|
||||
|
||||
response, err := c.GetCompletion(c.context, inputKey)
|
||||
if err != nil {
|
||||
color.Red("error getting completion: %v", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
if !viper.IsSet(sEnc) {
|
||||
viper.Set(sEnc, base64.StdEncoding.EncodeToString([]byte(response)))
|
||||
if err := viper.WriteConfig(); err != nil {
|
||||
color.Red("error writing config: %v", err)
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
return response, nil
|
||||
}
|
@@ -2,6 +2,7 @@ package analysis
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/ai"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/analyzer"
|
||||
@@ -21,15 +22,39 @@ type Analysis struct {
|
||||
analysisResults []common.Result
|
||||
}
|
||||
|
||||
func (a *Analysis) RunAnalysis() error {
|
||||
func NewAnalysis(namespace string, noCache bool, explain bool, filters []string, aiProvider string) *Analysis {
|
||||
var aiClient ai.IAI
|
||||
var err error
|
||||
|
||||
ctx := context.Background()
|
||||
client := viper.Get("kubernetesClient").(*kubernetes.Client)
|
||||
|
||||
if explain {
|
||||
aiClient, err = ai.NewAIClient(aiProvider)
|
||||
if err != nil {
|
||||
fmt.Println("Error creating AI client: ", err)
|
||||
}
|
||||
}
|
||||
|
||||
return &Analysis{
|
||||
Context: ctx,
|
||||
Namespace: namespace,
|
||||
NoCache: noCache,
|
||||
Explain: explain,
|
||||
Filters: filters,
|
||||
Client: client,
|
||||
AIClient: aiClient,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Analysis) RunAnalysis() error {
|
||||
activeFilters := viper.GetStringSlice("active_filters")
|
||||
analyzerList := analyzer.GetAnalyzerList()
|
||||
|
||||
// if there are no filters selected and no active_filters then run all of them
|
||||
if len(a.Filters) == 0 && len(activeFilters) == 0 {
|
||||
for _, al := range analyzerList {
|
||||
thisanalysis, _ := analyzer.NewAnalyzer(al, a.Client, a.Context, a.Namespace)
|
||||
thisanalysis, _ := analyzer.NewAnalyzer(al, a.Client, a.Context, a.Namespace, a.AIClient, a.Explain)
|
||||
err := thisanalysis.Analyze()
|
||||
if err != nil {
|
||||
fmt.Println("Error running analysis: ", err)
|
||||
@@ -44,7 +69,7 @@ func (a *Analysis) RunAnalysis() error {
|
||||
for _, filter := range a.Filters {
|
||||
for _, ali := range analyzerList {
|
||||
if filter == ali {
|
||||
thisanalysis, _ := analyzer.NewAnalyzer(ali, a.Client, a.Context, a.Namespace)
|
||||
thisanalysis, _ := analyzer.NewAnalyzer(ali, a.Client, a.Context, a.Namespace, a.AIClient, a.Explain)
|
||||
err := thisanalysis.Analyze()
|
||||
if err != nil {
|
||||
fmt.Println("Error running analysis: ", err)
|
||||
@@ -63,3 +88,11 @@ func (a *Analysis) PrintAnalysisResult() {
|
||||
fmt.Println(result)
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Analysis) PrintJsonResult() {
|
||||
output, err := json.MarshalIndent(a.analysisResults, "", " ")
|
||||
if err != nil {
|
||||
fmt.Println("Error marshalling json: ", err)
|
||||
}
|
||||
fmt.Println(string(output))
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@ package analyzer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/ai"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/analyzer/common"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/analyzer/hpa"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/analyzer/ingress"
|
||||
@@ -18,109 +18,31 @@ type IAnalyzer interface {
|
||||
GetResult() []common.Result
|
||||
}
|
||||
|
||||
const (
|
||||
PodAnalyzerName = "Pod"
|
||||
ReplicaSetAnalyzerName = "ReplicaSet"
|
||||
PersistentVolumeClaimAnalyzerName = "PersistentVolumeClaim"
|
||||
ServiceAnalyzerName = "Service"
|
||||
IngressAnalyzerName = "Ingress"
|
||||
HPAAnalyzerName = "HorizontalPodAutoScaler"
|
||||
)
|
||||
var AnalyzerMap = map[string]IAnalyzer{
|
||||
"Pod": &pod.PodAnalyzer{},
|
||||
"ReplicaSet": &rs.ReplicaSetAnalyzer{},
|
||||
"PersistentVolumeClaim": &pvc.PvcAnalyzer{},
|
||||
"Service": &service.ServiceAnalyzer{},
|
||||
"Ingress": &ingress.IngressAnalyzer{},
|
||||
"HPA": &hpa.HPAAnalyzer{},
|
||||
}
|
||||
|
||||
var (
|
||||
coreAnalyzerList = []string{
|
||||
PodAnalyzerName,
|
||||
ReplicaSetAnalyzerName,
|
||||
PersistentVolumeClaimAnalyzerName,
|
||||
ServiceAnalyzerName,
|
||||
IngressAnalyzerName,
|
||||
HPAAnalyzerName,
|
||||
}
|
||||
var coreAnalyzerList = []string{"Pod", "ReplicaSet", "PersistentVolumeClaim", "Service", "Ingress"}
|
||||
var additionalAnalyzerList = []string{"HPA"}
|
||||
|
||||
additionalAnalyzers = []string{
|
||||
HPAAnalyzerName,
|
||||
}
|
||||
)
|
||||
|
||||
func NewAnalyzer(analyzer string, client *kubernetes.Client, context context.Context, namespace string) (IAnalyzer, error) {
|
||||
func NewAnalyzer(analyzer string, client *kubernetes.Client, context context.Context, namespace string, aiClient ai.IAI, explain bool) (IAnalyzer, error) {
|
||||
analyzerConfig := common.Analyzer{
|
||||
AIClient: aiClient,
|
||||
Namespace: namespace,
|
||||
Context: context,
|
||||
Client: client,
|
||||
Explain: explain,
|
||||
}
|
||||
|
||||
analyzerConfig.PreAnalysis = make(map[string]common.PreAnalysis)
|
||||
|
||||
switch analyzer {
|
||||
case PodAnalyzerName:
|
||||
return &pod.PodAnalyzer{
|
||||
Analyzer: analyzerConfig,
|
||||
}, nil
|
||||
case ReplicaSetAnalyzerName:
|
||||
return &rs.ReplicaSetAnalyzer{
|
||||
Analyzer: analyzerConfig,
|
||||
}, nil
|
||||
case IngressAnalyzerName:
|
||||
return &ingress.IngressAnalyzer{
|
||||
Analyzer: analyzerConfig,
|
||||
}, nil
|
||||
case HPAAnalyzerName:
|
||||
return &hpa.HPAAnalyzer{
|
||||
Analyzer: analyzerConfig,
|
||||
}, nil
|
||||
case PersistentVolumeClaimAnalyzerName:
|
||||
return &pvc.PvcAnalyzer{
|
||||
Analyzer: analyzerConfig,
|
||||
}, nil
|
||||
case ServiceAnalyzerName:
|
||||
return &service.ServiceAnalyzer{
|
||||
Analyzer: analyzerConfig,
|
||||
}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Analyzer %s not supported", analyzer)
|
||||
}
|
||||
return AnalyzerMap[analyzer], nil
|
||||
}
|
||||
|
||||
/*
|
||||
func ParseViaAI(ctx context.Context, config *analysis.Analysis,
|
||||
|
||||
aiClient ai.IAI, prompt []string) (string, error) {
|
||||
// parse the text with the AI backend
|
||||
inputKey := strings.Join(prompt, " ")
|
||||
// Check for cached data
|
||||
sEnc := base64.StdEncoding.EncodeToString([]byte(inputKey))
|
||||
// find in viper cache
|
||||
if viper.IsSet(sEnc) && !config.NoCache {
|
||||
// retrieve data from cache
|
||||
response := viper.GetString(sEnc)
|
||||
if response == "" {
|
||||
color.Red("error retrieving cached data")
|
||||
return "", nil
|
||||
}
|
||||
output, err := base64.StdEncoding.DecodeString(response)
|
||||
if err != nil {
|
||||
color.Red("error decoding cached data: %v", err)
|
||||
return "", nil
|
||||
}
|
||||
return string(output), nil
|
||||
}
|
||||
|
||||
response, err := aiClient.GetCompletion(ctx, inputKey)
|
||||
if err != nil {
|
||||
color.Red("error getting completion: %v", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
if !viper.IsSet(sEnc) {
|
||||
viper.Set(sEnc, base64.StdEncoding.EncodeToString([]byte(response)))
|
||||
if err := viper.WriteConfig(); err != nil {
|
||||
color.Red("error writing config: %v", err)
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
*/
|
||||
func ListFilters() ([]string, []string) {
|
||||
coreKeys := []string{}
|
||||
for _, filter := range coreAnalyzerList {
|
||||
@@ -128,7 +50,7 @@ func ListFilters() ([]string, []string) {
|
||||
}
|
||||
|
||||
additionalKeys := []string{}
|
||||
for _, filter := range additionalAnalyzers {
|
||||
for _, filter := range coreAnalyzerList {
|
||||
coreKeys = append(additionalKeys, filter)
|
||||
}
|
||||
return coreKeys, additionalKeys
|
||||
@@ -138,13 +60,12 @@ func GetAnalyzerList() []string {
|
||||
list := []string{}
|
||||
|
||||
list = append(list, coreAnalyzerList...)
|
||||
list = append(list, additionalAnalyzers...)
|
||||
list = append(list, additionalAnalyzerList...)
|
||||
|
||||
list = removeDuplicateStr(list)
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
func removeDuplicateStr(strSlice []string) []string {
|
||||
allKeys := make(map[string]bool)
|
||||
list := []string{}
|
||||
|
@@ -2,6 +2,7 @@ package common
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/ai"
|
||||
"github.com/k8sgpt-ai/k8sgpt/pkg/kubernetes"
|
||||
appsv1 "k8s.io/api/apps/v1"
|
||||
autov1 "k8s.io/api/autoscaling/v1"
|
||||
@@ -11,9 +12,12 @@ import (
|
||||
|
||||
type Analyzer struct {
|
||||
Client *kubernetes.Client
|
||||
AIClient ai.IAI
|
||||
Context context.Context
|
||||
Namespace string
|
||||
PreAnalysis map[string]PreAnalysis
|
||||
Explain bool
|
||||
NoCache bool
|
||||
Result []Result
|
||||
}
|
||||
|
||||
|
@@ -59,7 +59,6 @@ func (a *HPAAnalyzer) Analyze() error {
|
||||
FailureDetails: failures,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for key, value := range a.PreAnalysis {
|
||||
@@ -68,7 +67,6 @@ func (a *HPAAnalyzer) Analyze() error {
|
||||
Name: key,
|
||||
Error: value.FailureDetails,
|
||||
}
|
||||
|
||||
parent, _ := util.GetParent(a.Client, value.Ingress.ObjectMeta)
|
||||
currentAnalysis.ParentObject = parent
|
||||
a.Result = append(a.Result, currentAnalysis)
|
||||
|
Reference in New Issue
Block a user