mirror of
https://github.com/k8sgpt-ai/k8sgpt.git
synced 2025-09-19 18:16:09 +00:00
Merge pull request #89 from k8sgpt-ai/feat/language-support
feat: addition of simple language support & version
This commit is contained in:
@@ -13,6 +13,8 @@ builds:
|
||||
- linux
|
||||
- windows
|
||||
- darwin
|
||||
ldflags:
|
||||
- -s -w -X main.version={{.Version}}
|
||||
|
||||
archives:
|
||||
- format: tar.gz
|
||||
|
@@ -20,7 +20,7 @@ brew install k8sgpt
|
||||
* Run `k8sgpt analyze` to run a scan.
|
||||
* And use `k8sgpt analyze --explain` to get a more detailed explanation of the issues.
|
||||
|
||||
<img src="images/landing.png" width=650px; />
|
||||
<img src="images/demo4.gif" width=650px; />
|
||||
|
||||
## Analyzers
|
||||
|
||||
@@ -37,8 +37,6 @@ K8sGPT uses analyzers to triage and diagnose issues in your cluster. It has a se
|
||||
## Usage
|
||||
|
||||
```
|
||||
Kubernetes debugging powered by AI
|
||||
|
||||
Usage:
|
||||
k8sgpt [command]
|
||||
|
||||
@@ -48,6 +46,7 @@ Available Commands:
|
||||
completion Generate the autocompletion script for the specified shell
|
||||
generate Generate Key for your chosen backend (opens browser)
|
||||
help Help about any command
|
||||
version Print the version number of k8sgpt
|
||||
|
||||
Flags:
|
||||
--config string config file (default is $HOME/.k8sgpt.git.yaml)
|
||||
|
@@ -17,10 +17,11 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
explain bool
|
||||
backend string
|
||||
output string
|
||||
filters []string
|
||||
explain bool
|
||||
backend string
|
||||
output string
|
||||
filters []string
|
||||
language string
|
||||
)
|
||||
|
||||
// AnalyzeCmd represents the problems command
|
||||
@@ -53,7 +54,7 @@ var AnalyzeCmd = &cobra.Command{
|
||||
switch backendType {
|
||||
case "openai":
|
||||
aiClient = &ai.OpenAIClient{}
|
||||
if err := aiClient.Configure(token); err != nil {
|
||||
if err := aiClient.Configure(token, language); err != nil {
|
||||
color.Red("Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
@@ -141,5 +142,6 @@ func init() {
|
||||
AnalyzeCmd.Flags().StringVarP(&backend, "backend", "b", "openai", "Backend AI provider")
|
||||
// output as json
|
||||
AnalyzeCmd.Flags().StringVarP(&output, "output", "o", "text", "Output format (text, json)")
|
||||
|
||||
// add language options for output
|
||||
AnalyzeCmd.Flags().StringVarP(&language, "language", "l", "english", "Languages to use for AI (e.g. 'English', 'Spanish', 'French', 'German', 'Italian', 'Portuguese', 'Dutch', 'Russian', 'Chinese', 'Japanese', 'Korean')")
|
||||
}
|
||||
|
@@ -16,6 +16,7 @@ var (
|
||||
cfgFile string
|
||||
masterURL string
|
||||
kubeconfig string
|
||||
version string
|
||||
)
|
||||
|
||||
// rootCmd represents the base command when called without any subcommands
|
||||
@@ -30,7 +31,8 @@ var rootCmd = &cobra.Command{
|
||||
|
||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
||||
func Execute() {
|
||||
func Execute(v string) {
|
||||
version = v
|
||||
err := rootCmd.Execute()
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
|
19
cmd/version.go
Normal file
19
cmd/version.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// versionCmd represents the version command
|
||||
var versionCmd = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Print the version number of k8sgpt",
|
||||
Long: `All software has versions. This is k8sgpt's`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
cmd.Printf("k8sgpt version %s", version)
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(versionCmd)
|
||||
}
|
3
go.mod
3
go.mod
@@ -3,9 +3,9 @@ module github.com/k8sgpt-ai/k8sgpt
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/briandowns/spinner v1.23.0
|
||||
github.com/fatih/color v1.15.0
|
||||
github.com/sashabaranov/go-openai v1.5.7
|
||||
github.com/schollz/progressbar/v3 v3.13.1
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/spf13/viper v1.15.0
|
||||
golang.org/x/term v0.6.0
|
||||
@@ -44,7 +44,6 @@ require (
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.6 // indirect
|
||||
github.com/rivo/uniseg v0.4.4 // indirect
|
||||
github.com/schollz/progressbar/v3 v3.13.1 // indirect
|
||||
github.com/spf13/afero v1.9.3 // indirect
|
||||
github.com/spf13/cast v1.5.0 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
|
2
go.sum
2
go.sum
@@ -38,8 +38,6 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/briandowns/spinner v1.23.0 h1:alDF2guRWqa/FOZZYWjlMIx2L6H0wyewPxo/CH4Pt2A=
|
||||
github.com/briandowns/spinner v1.23.0/go.mod h1:rPG4gmXeN3wQV/TsAY4w8lPdIM6RX3yqeBQJSrbXjuE=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
|
BIN
images/demo4.gif
Normal file
BIN
images/demo4.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 215 KiB |
4
main.go
4
main.go
@@ -5,6 +5,8 @@ package main
|
||||
|
||||
import "github.com/k8sgpt-ai/k8sgpt/cmd"
|
||||
|
||||
var version = "dev"
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
cmd.Execute(version)
|
||||
}
|
||||
|
16
pkg/ai/ai.go
16
pkg/ai/ai.go
@@ -3,19 +3,29 @@ package ai
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
const (
|
||||
default_prompt = "Simplify the following Kubernetes error message and provide a solution in %s: %s"
|
||||
prompt_a = "Read the following input %s and provide possible scenarios for remediation in %s"
|
||||
prompt_b = "Considering the following input from the Kubernetes resource %s and the error message %s, provide possible scenarios for remediation in %s"
|
||||
prompt_c = "Reading the following %s error message and it's accompanying log message %s, how would you simplify this message?"
|
||||
)
|
||||
|
||||
type OpenAIClient struct {
|
||||
client *openai.Client
|
||||
client *openai.Client
|
||||
language string
|
||||
}
|
||||
|
||||
func (c *OpenAIClient) Configure(token string) error {
|
||||
func (c *OpenAIClient) Configure(token string, language string) error {
|
||||
client := openai.NewClient(token)
|
||||
if client == nil {
|
||||
return errors.New("error creating OpenAI client")
|
||||
}
|
||||
c.language = language
|
||||
c.client = client
|
||||
return nil
|
||||
}
|
||||
@@ -27,7 +37,7 @@ func (c *OpenAIClient) GetCompletion(ctx context.Context, prompt string) (string
|
||||
Messages: []openai.ChatCompletionMessage{
|
||||
{
|
||||
Role: "user",
|
||||
Content: "Simplify the following Kubernetes error message and provide a solution: " + prompt,
|
||||
Content: fmt.Sprintf(default_prompt, c.language, prompt),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@@ -3,6 +3,6 @@ package ai
|
||||
import "context"
|
||||
|
||||
type IAI interface {
|
||||
Configure(token string) error
|
||||
Configure(token string, language string) error
|
||||
GetCompletion(ctx context.Context, prompt string) (string, error)
|
||||
}
|
||||
|
Reference in New Issue
Block a user