Merge pull request #242 from matthisholleville/feature/add-anonymize-option

feat: add anonymize option
This commit is contained in:
Alex Jones
2023-04-12 10:50:38 +01:00
committed by GitHub
19 changed files with 306 additions and 64 deletions

View File

@@ -227,6 +227,40 @@ _Output to JSON_
k8sgpt analyze --explain --filter=Service --output=json k8sgpt analyze --explain --filter=Service --output=json
``` ```
_Anonymize during explain_
```
k8sgpt analyze --explain --filter=Service --output=json --anonymize
```
### How does anonymization work?
With this option, the data is anonymized before being sent to the AI Backend. During the analysis execution, `k8sgpt` retrieves sensitive data (Kubernetes object names, labels, etc.). This data is masked when sent to the AI backend and replaced by a key that can be used to de-anonymize the data when the solution is returned to the user.
<details>
1. Error reported during analysis:
```bash
Error: HorizontalPodAutoscaler uses StatefulSet/fake-deployment as ScaleTargetRef which does not exist.
```
2. Payload sent to the AI backend:
```bash
Error: HorizontalPodAutoscaler uses StatefulSet/tGLcCRcHa1Ce5Rs as ScaleTargetRef which does not exist.
```
3. Payload returned by the AI:
```bash
The Kubernetes system is trying to scale a StatefulSet named tGLcCRcHa1Ce5Rs using the HorizontalPodAutoscaler, but it cannot find the StatefulSet. The solution is to verify that the StatefulSet name is spelled correctly and exists in the same namespace as the HorizontalPodAutoscaler.
```
4. Payload returned to the user:
```bash
The Kubernetes system is trying to scale a StatefulSet named fake-deployment using the HorizontalPodAutoscaler, but it cannot find the StatefulSet. The solution is to verify that the StatefulSet name is spelled correctly and exists in the same namespace as the HorizontalPodAutoscaler.
```
**Anonymization does not currently apply to events.**
## Upcoming major milestones ## Upcoming major milestones
- [ ] Multiple AI backend support - [ ] Multiple AI backend support

View File

@@ -21,6 +21,7 @@ var (
language string language string
nocache bool nocache bool
namespace string namespace string
anonymize bool
) )
// AnalyzeCmd represents the problems command // AnalyzeCmd represents the problems command
@@ -93,7 +94,7 @@ var AnalyzeCmd = &cobra.Command{
} }
if explain { if explain {
err := config.GetAIResults(output) err := config.GetAIResults(output, anonymize)
if err != nil { if err != nil {
color.Red("Error: %v", err) color.Red("Error: %v", err)
os.Exit(1) os.Exit(1)
@@ -121,6 +122,8 @@ func init() {
AnalyzeCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace to analyze") AnalyzeCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace to analyze")
// no cache flag // no cache flag
AnalyzeCmd.Flags().BoolVarP(&nocache, "no-cache", "c", false, "Do not use cached data") AnalyzeCmd.Flags().BoolVarP(&nocache, "no-cache", "c", false, "Do not use cached data")
// anonymize flag
AnalyzeCmd.Flags().BoolVarP(&anonymize, "anonymize", "a", false, "Anonymize data before sending it to the AI backend. This flag masks sensitive data, such as Kubernetes object names and labels, by replacing it with a key. However, please note that this flag does not currently apply to events.")
// array of strings flag // array of strings flag
AnalyzeCmd.Flags().StringSliceVarP(&filters, "filter", "f", []string{}, "Filter for these analyzers (e.g. Pod, PersistentVolumeClaim, Service, ReplicaSet)") AnalyzeCmd.Flags().StringSliceVarP(&filters, "filter", "f", []string{}, "Filter for these analyzers (e.g. Pod, PersistentVolumeClaim, Service, ReplicaSet)")
// explain flag // explain flag

View File

@@ -55,7 +55,6 @@ func (c *OpenAIClient) GetCompletion(ctx context.Context, prompt string) (string
} }
func (a *OpenAIClient) Parse(ctx context.Context, prompt []string, nocache bool) (string, error) { func (a *OpenAIClient) Parse(ctx context.Context, prompt []string, nocache bool) (string, error) {
// parse the text with the AI backend
inputKey := strings.Join(prompt, " ") inputKey := strings.Join(prompt, " ")
// Check for cached data // Check for cached data
sEnc := base64.StdEncoding.EncodeToString([]byte(inputKey)) sEnc := base64.StdEncoding.EncodeToString([]byte(inputKey))

View File

@@ -12,6 +12,7 @@ import (
"github.com/k8sgpt-ai/k8sgpt/pkg/analyzer" "github.com/k8sgpt-ai/k8sgpt/pkg/analyzer"
"github.com/k8sgpt-ai/k8sgpt/pkg/common" "github.com/k8sgpt-ai/k8sgpt/pkg/common"
"github.com/k8sgpt-ai/k8sgpt/pkg/kubernetes" "github.com/k8sgpt-ai/k8sgpt/pkg/kubernetes"
"github.com/k8sgpt-ai/k8sgpt/pkg/util"
"github.com/schollz/progressbar/v3" "github.com/schollz/progressbar/v3"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
@@ -127,13 +128,13 @@ func (a *Analysis) PrintOutput() {
fmt.Printf("%s %s(%s)\n", color.CyanString("%d", n), fmt.Printf("%s %s(%s)\n", color.CyanString("%d", n),
color.YellowString(result.Name), color.CyanString(result.ParentObject)) color.YellowString(result.Name), color.CyanString(result.ParentObject))
for _, err := range result.Error { for _, err := range result.Error {
fmt.Printf("- %s %s\n", color.RedString("Error:"), color.RedString(err)) fmt.Printf("- %s %s\n", color.RedString("Error:"), color.RedString(err.Text))
} }
fmt.Println(color.GreenString(result.Details + "\n")) fmt.Println(color.GreenString(result.Details + "\n"))
} }
} }
func (a *Analysis) GetAIResults(output string) error { func (a *Analysis) GetAIResults(output string, anonymize bool) error {
if len(a.Results) == 0 { if len(a.Results) == 0 {
return nil return nil
} }
@@ -144,7 +145,17 @@ func (a *Analysis) GetAIResults(output string) error {
} }
for index, analysis := range a.Results { for index, analysis := range a.Results {
parsedText, err := a.AIClient.Parse(a.Context, analysis.Error, a.NoCache) var texts []string
for _, failure := range analysis.Error {
if anonymize {
for _, s := range failure.Sensitive {
failure.Text = util.ReplaceIfMatch(failure.Text, s.Unmasked, s.Masked)
}
}
texts = append(texts, failure.Text)
}
parsedText, err := a.AIClient.Parse(a.Context, texts, a.NoCache)
if err != nil { if err != nil {
// FIXME: can we avoid checking if output is json multiple times? // FIXME: can we avoid checking if output is json multiple times?
// maybe implement the progress bar better? // maybe implement the progress bar better?
@@ -159,6 +170,15 @@ func (a *Analysis) GetAIResults(output string) error {
return fmt.Errorf("failed while calling AI provider %s: %v", a.AIClient.GetName(), err) return fmt.Errorf("failed while calling AI provider %s: %v", a.AIClient.GetName(), err)
} }
} }
if anonymize {
for _, failure := range analysis.Error {
for _, s := range failure.Sensitive {
parsedText = strings.ReplaceAll(parsedText, s.Masked, s.Unmasked)
}
}
}
analysis.Details = parsedText analysis.Details = parsedText
if output != "json" { if output != "json" {
bar.Add(1) bar.Add(1)

View File

@@ -43,12 +43,16 @@ func TestAnalysis_ProblemJsonOutput(t *testing.T) {
analysis := Analysis{ analysis := Analysis{
Results: []common.Result{ Results: []common.Result{
{ {
Kind: "Deployment", Kind: "Deployment",
Name: "test-deployment", Name: "test-deployment",
Error: []string{"test-problem"}, Error: []common.Failure{
{
Text: "test-problem",
Sensitive: []common.Sensitive{},
},
},
Details: "test-solution", Details: "test-solution",
ParentObject: "parent-resource", ParentObject: "parent-resource"},
},
}, },
Namespace: "default", Namespace: "default",
} }
@@ -58,12 +62,16 @@ func TestAnalysis_ProblemJsonOutput(t *testing.T) {
Problems: 1, Problems: 1,
Results: []common.Result{ Results: []common.Result{
{ {
Kind: "Deployment", Kind: "Deployment",
Name: "test-deployment", Name: "test-deployment",
Error: []string{"test-problem"}, Error: []common.Failure{
{
Text: "test-problem",
Sensitive: []common.Sensitive{},
},
},
Details: "test-solution", Details: "test-solution",
ParentObject: "parent-resource", ParentObject: "parent-resource"},
},
}, },
} }
@@ -88,12 +96,20 @@ func TestAnalysis_MultipleProblemJsonOutput(t *testing.T) {
analysis := Analysis{ analysis := Analysis{
Results: []common.Result{ Results: []common.Result{
{ {
Kind: "Deployment", Kind: "Deployment",
Name: "test-deployment", Name: "test-deployment",
Error: []string{"test-problem", "another-test-problem"}, Error: []common.Failure{
{
Text: "test-problem",
Sensitive: []common.Sensitive{},
},
{
Text: "another-test-problem",
Sensitive: []common.Sensitive{},
},
},
Details: "test-solution", Details: "test-solution",
ParentObject: "parent-resource", ParentObject: "parent-resource"},
},
}, },
Namespace: "default", Namespace: "default",
} }
@@ -103,12 +119,20 @@ func TestAnalysis_MultipleProblemJsonOutput(t *testing.T) {
Problems: 2, Problems: 2,
Results: []common.Result{ Results: []common.Result{
{ {
Kind: "Deployment", Kind: "Deployment",
Name: "test-deployment", Name: "test-deployment",
Error: []string{"test-problem", "another-test-problem"}, Error: []common.Failure{
{
Text: "test-problem",
Sensitive: []common.Sensitive{},
},
{
Text: "another-test-problem",
Sensitive: []common.Sensitive{},
},
},
Details: "test-solution", Details: "test-solution",
ParentObject: "parent-resource", ParentObject: "parent-resource"},
},
}, },
} }

View File

@@ -2,6 +2,7 @@ package analyzer
import ( import (
"fmt" "fmt"
"github.com/k8sgpt-ai/k8sgpt/pkg/common" "github.com/k8sgpt-ai/k8sgpt/pkg/common"
"github.com/k8sgpt-ai/k8sgpt/pkg/util" "github.com/k8sgpt-ai/k8sgpt/pkg/util"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
@@ -19,7 +20,7 @@ func (HpaAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, hpa := range list.Items { for _, hpa := range list.Items {
var failures []string var failures []common.Failure
// check ScaleTargetRef exist // check ScaleTargetRef exist
scaleTargetRef := hpa.Spec.ScaleTargetRef scaleTargetRef := hpa.Spec.ScaleTargetRef
@@ -47,11 +48,22 @@ func (HpaAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
scaleTargetRefNotFound = true scaleTargetRefNotFound = true
} }
default: default:
failures = append(failures, fmt.Sprintf("HorizontalPodAutoscaler uses %s as ScaleTargetRef which does not possible option.", scaleTargetRef.Kind)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("HorizontalPodAutoscaler uses %s as ScaleTargetRef which is not an option.", scaleTargetRef.Kind),
Sensitive: []common.Sensitive{},
})
} }
if scaleTargetRefNotFound { if scaleTargetRefNotFound {
failures = append(failures, fmt.Sprintf("HorizontalPodAutoscaler uses %s/%s as ScaleTargetRef which does not exist.", scaleTargetRef.Kind, scaleTargetRef.Name)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("HorizontalPodAutoscaler uses %s/%s as ScaleTargetRef which does not exist.", scaleTargetRef.Kind, scaleTargetRef.Name),
Sensitive: []common.Sensitive{
{
Unmasked: scaleTargetRef.Name,
Masked: util.MaskString(scaleTargetRef.Name),
},
},
})
} }
if len(failures) > 0 { if len(failures) > 0 {

View File

@@ -102,7 +102,7 @@ func TestHPAAnalyzerWithUnsuportedScaleTargetRef(t *testing.T) {
var errorFound bool var errorFound bool
for _, analysis := range analysisResults { for _, analysis := range analysisResults {
for _, err := range analysis.Error { for _, err := range analysis.Error {
if strings.Contains(err, "does not possible option.") { if strings.Contains(err.Text, "which is not an option.") {
errorFound = true errorFound = true
break break
} }
@@ -149,7 +149,7 @@ func TestHPAAnalyzerWithNonExistentScaleTargetRef(t *testing.T) {
var errorFound bool var errorFound bool
for _, analysis := range analysisResults { for _, analysis := range analysisResults {
for _, err := range analysis.Error { for _, err := range analysis.Error {
if strings.Contains(err, "does not exist.") { if strings.Contains(err.Text, "does not exist.") {
errorFound = true errorFound = true
break break
} }

View File

@@ -20,14 +20,26 @@ func (IngressAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, ing := range list.Items { for _, ing := range list.Items {
var failures []string var failures []common.Failure
// get ingressClassName // get ingressClassName
ingressClassName := ing.Spec.IngressClassName ingressClassName := ing.Spec.IngressClassName
if ingressClassName == nil { if ingressClassName == nil {
ingClassValue := ing.Annotations["kubernetes.io/ingress.class"] ingClassValue := ing.Annotations["kubernetes.io/ingress.class"]
if ingClassValue == "" { if ingClassValue == "" {
failures = append(failures, fmt.Sprintf("Ingress %s/%s does not specify an Ingress class.", ing.Namespace, ing.Name)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("Ingress %s/%s does not specify an Ingress class.", ing.Namespace, ing.Name),
Sensitive: []common.Sensitive{
{
Unmasked: ing.Namespace,
Masked: util.MaskString(ing.Namespace),
},
{
Unmasked: ing.Name,
Masked: util.MaskString(ing.Name),
},
},
})
} else { } else {
ingressClassName = &ingClassValue ingressClassName = &ingClassValue
} }
@@ -37,7 +49,15 @@ func (IngressAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
if ingressClassName != nil { if ingressClassName != nil {
_, err := a.Client.GetClient().NetworkingV1().IngressClasses().Get(a.Context, *ingressClassName, metav1.GetOptions{}) _, err := a.Client.GetClient().NetworkingV1().IngressClasses().Get(a.Context, *ingressClassName, metav1.GetOptions{})
if err != nil { if err != nil {
failures = append(failures, fmt.Sprintf("Ingress uses the ingress class %s which does not exist.", *ingressClassName)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("Ingress uses the ingress class %s which does not exist.", *ingressClassName),
Sensitive: []common.Sensitive{
{
Unmasked: *ingressClassName,
Masked: util.MaskString(*ingressClassName),
},
},
})
} }
} }
@@ -47,7 +67,19 @@ func (IngressAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
for _, path := range rule.HTTP.Paths { for _, path := range rule.HTTP.Paths {
_, err := a.Client.GetClient().CoreV1().Services(ing.Namespace).Get(a.Context, path.Backend.Service.Name, metav1.GetOptions{}) _, err := a.Client.GetClient().CoreV1().Services(ing.Namespace).Get(a.Context, path.Backend.Service.Name, metav1.GetOptions{})
if err != nil { if err != nil {
failures = append(failures, fmt.Sprintf("Ingress uses the service %s/%s which does not exist.", ing.Namespace, path.Backend.Service.Name)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("Ingress uses the service %s/%s which does not exist.", ing.Namespace, path.Backend.Service.Name),
Sensitive: []common.Sensitive{
{
Unmasked: ing.Namespace,
Masked: util.MaskString(ing.Namespace),
},
{
Unmasked: path.Backend.Service.Name,
Masked: util.MaskString(path.Backend.Service.Name),
},
},
})
} }
} }
} }
@@ -55,7 +87,19 @@ func (IngressAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
for _, tls := range ing.Spec.TLS { for _, tls := range ing.Spec.TLS {
_, err := a.Client.GetClient().CoreV1().Secrets(ing.Namespace).Get(a.Context, tls.SecretName, metav1.GetOptions{}) _, err := a.Client.GetClient().CoreV1().Secrets(ing.Namespace).Get(a.Context, tls.SecretName, metav1.GetOptions{})
if err != nil { if err != nil {
failures = append(failures, fmt.Sprintf("Ingress uses the secret %s/%s as a TLS certificate which does not exist.", ing.Namespace, tls.SecretName)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("Ingress uses the secret %s/%s as a TLS certificate which does not exist.", ing.Namespace, tls.SecretName),
Sensitive: []common.Sensitive{
{
Unmasked: ing.Namespace,
Masked: util.MaskString(ing.Namespace),
},
{
Unmasked: tls.SecretName,
Masked: util.MaskString(tls.SecretName),
},
},
})
} }
} }
if len(failures) > 0 { if len(failures) > 0 {

View File

@@ -100,7 +100,7 @@ func TestIngressAnalyzerWithoutIngressClassAnnotation(t *testing.T) {
var errorFound bool var errorFound bool
for _, analysis := range analysisResults { for _, analysis := range analysisResults {
for _, err := range analysis.Error { for _, err := range analysis.Error {
if strings.Contains(err, "does not specify an Ingress class") { if strings.Contains(err.Text, "does not specify an Ingress class") {
errorFound = true errorFound = true
break break
} }

View File

@@ -20,7 +20,7 @@ func (PdbAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, pdb := range list.Items { for _, pdb := range list.Items {
var failures []string var failures []common.Failure
evt, err := FetchLatestEvent(a.Context, a.Client, pdb.Namespace, pdb.Name) evt, err := FetchLatestEvent(a.Context, a.Client, pdb.Namespace, pdb.Name)
if err != nil || evt == nil { if err != nil || evt == nil {
@@ -30,13 +30,31 @@ func (PdbAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
if evt.Reason == "NoPods" && evt.Message != "" { if evt.Reason == "NoPods" && evt.Message != "" {
if pdb.Spec.Selector != nil { if pdb.Spec.Selector != nil {
for k, v := range pdb.Spec.Selector.MatchLabels { for k, v := range pdb.Spec.Selector.MatchLabels {
failures = append(failures, fmt.Sprintf("%s, expected label %s=%s", evt.Message, k, v)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("%s, expected label %s=%s", evt.Message, k, v),
Sensitive: []common.Sensitive{
{
Unmasked: k,
Masked: util.MaskString(k),
},
{
Unmasked: v,
Masked: util.MaskString(v),
},
},
})
} }
for _, v := range pdb.Spec.Selector.MatchExpressions { for _, v := range pdb.Spec.Selector.MatchExpressions {
failures = append(failures, fmt.Sprintf("%s, expected expression %s", evt.Message, v)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("%s, expected expression %s", evt.Message, v),
Sensitive: []common.Sensitive{},
})
} }
} else { } else {
failures = append(failures, fmt.Sprintf("%s, selector is nil", evt.Message)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("%s, selector is nil", evt.Message),
Sensitive: []common.Sensitive{},
})
} }
} }

View File

@@ -20,7 +20,7 @@ func (PodAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, pod := range list.Items { for _, pod := range list.Items {
var failures []string var failures []common.Failure
// Check for pending pods // Check for pending pods
if pod.Status.Phase == "Pending" { if pod.Status.Phase == "Pending" {
@@ -28,7 +28,10 @@ func (PodAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
for _, containerStatus := range pod.Status.Conditions { for _, containerStatus := range pod.Status.Conditions {
if containerStatus.Type == "PodScheduled" && containerStatus.Reason == "Unschedulable" { if containerStatus.Type == "PodScheduled" && containerStatus.Reason == "Unschedulable" {
if containerStatus.Message != "" { if containerStatus.Message != "" {
failures = []string{containerStatus.Message} failures = append(failures, common.Failure{
Text: containerStatus.Message,
Sensitive: []common.Sensitive{},
})
} }
} }
} }
@@ -39,7 +42,10 @@ func (PodAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
if containerStatus.State.Waiting != nil { if containerStatus.State.Waiting != nil {
if containerStatus.State.Waiting.Reason == "CrashLoopBackOff" || containerStatus.State.Waiting.Reason == "ImagePullBackOff" { if containerStatus.State.Waiting.Reason == "CrashLoopBackOff" || containerStatus.State.Waiting.Reason == "ImagePullBackOff" {
if containerStatus.State.Waiting.Message != "" { if containerStatus.State.Waiting.Message != "" {
failures = append(failures, containerStatus.State.Waiting.Message) failures = append(failures, common.Failure{
Text: containerStatus.State.Waiting.Message,
Sensitive: []common.Sensitive{},
})
} }
} }
// This represents a container that is still being created or blocked due to conditions such as OOMKilled // This represents a container that is still being created or blocked due to conditions such as OOMKilled
@@ -51,7 +57,10 @@ func (PodAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
continue continue
} }
if evt.Reason == "FailedCreatePodSandBox" && evt.Message != "" { if evt.Reason == "FailedCreatePodSandBox" && evt.Message != "" {
failures = append(failures, evt.Message) failures = append(failures, common.Failure{
Text: evt.Message,
Sensitive: []common.Sensitive{},
})
} }
} }
} }

View File

@@ -21,7 +21,7 @@ func (PvcAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, pvc := range list.Items { for _, pvc := range list.Items {
var failures []string var failures []common.Failure
// Check for empty rs // Check for empty rs
if pvc.Status.Phase == "Pending" { if pvc.Status.Phase == "Pending" {
@@ -32,7 +32,10 @@ func (PvcAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
continue continue
} }
if evt.Reason == "ProvisioningFailed" && evt.Message != "" { if evt.Reason == "ProvisioningFailed" && evt.Message != "" {
failures = append(failures, evt.Message) failures = append(failures, common.Failure{
Text: evt.Message,
Sensitive: []common.Sensitive{},
})
} }
} }
if len(failures) > 0 { if len(failures) > 0 {

View File

@@ -21,7 +21,7 @@ func (ReplicaSetAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, rs := range list.Items { for _, rs := range list.Items {
var failures []string var failures []common.Failure
// Check for empty rs // Check for empty rs
if rs.Status.Replicas == 0 { if rs.Status.Replicas == 0 {
@@ -29,7 +29,11 @@ func (ReplicaSetAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
// Check through container status to check for crashes // Check through container status to check for crashes
for _, rsStatus := range rs.Status.Conditions { for _, rsStatus := range rs.Status.Conditions {
if rsStatus.Type == "ReplicaFailure" && rsStatus.Reason == "FailedCreate" { if rsStatus.Type == "ReplicaFailure" && rsStatus.Reason == "FailedCreate" {
failures = []string{rsStatus.Message} failures = append(failures, common.Failure{
Text: rsStatus.Message,
Sensitive: []common.Sensitive{},
})
} }
} }
} }

View File

@@ -22,7 +22,7 @@ func (ServiceAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, ep := range list.Items { for _, ep := range list.Items {
var failures []string var failures []common.Failure
// Check for empty service // Check for empty service
if len(ep.Subsets) == 0 { if len(ep.Subsets) == 0 {
@@ -33,7 +33,19 @@ func (ServiceAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
} }
for k, v := range svc.Spec.Selector { for k, v := range svc.Spec.Selector {
failures = append(failures, fmt.Sprintf("Service has no endpoints, expected label %s=%s", k, v)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("Service has no endpoints, expected label %s=%s", k, v),
Sensitive: []common.Sensitive{
{
Unmasked: k,
Masked: util.MaskString(k),
},
{
Unmasked: v,
Masked: util.MaskString(v),
},
},
})
} }
} else { } else {
count := 0 count := 0
@@ -46,7 +58,10 @@ func (ServiceAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
count++ count++
pods = append(pods, addresses.TargetRef.Kind+"/"+addresses.TargetRef.Name) pods = append(pods, addresses.TargetRef.Kind+"/"+addresses.TargetRef.Name)
} }
failures = append(failures, fmt.Sprintf("Service has not ready endpoints, pods: %s, expected %d", pods, count)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("Service has not ready endpoints, pods: %s, expected %d", pods, count),
Sensitive: []common.Sensitive{},
})
} }
} }
} }

View File

@@ -18,20 +18,40 @@ func (StatefulSetAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
var preAnalysis = map[string]common.PreAnalysis{} var preAnalysis = map[string]common.PreAnalysis{}
for _, sts := range list.Items { for _, sts := range list.Items {
var failures []string var failures []common.Failure
// get serviceName // get serviceName
serviceName := sts.Spec.ServiceName serviceName := sts.Spec.ServiceName
_, err := a.Client.GetClient().CoreV1().Services(sts.Namespace).Get(a.Context, serviceName, metav1.GetOptions{}) _, err := a.Client.GetClient().CoreV1().Services(sts.Namespace).Get(a.Context, serviceName, metav1.GetOptions{})
if err != nil { if err != nil {
failures = append(failures, fmt.Sprintf("StatefulSet uses the service %s/%s which does not exist.", sts.Namespace, serviceName)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("StatefulSet uses the service %s/%s which does not exist.", sts.Namespace, serviceName),
Sensitive: []common.Sensitive{
{
Unmasked: sts.Namespace,
Masked: util.MaskString(sts.Namespace),
},
{
Unmasked: serviceName,
Masked: util.MaskString(serviceName),
},
},
})
} }
if len(sts.Spec.VolumeClaimTemplates) > 0 { if len(sts.Spec.VolumeClaimTemplates) > 0 {
for _, volumeClaimTemplate := range sts.Spec.VolumeClaimTemplates { for _, volumeClaimTemplate := range sts.Spec.VolumeClaimTemplates {
if volumeClaimTemplate.Spec.StorageClassName != nil { if volumeClaimTemplate.Spec.StorageClassName != nil {
_, err := a.Client.GetClient().StorageV1().StorageClasses().Get(a.Context, *volumeClaimTemplate.Spec.StorageClassName, metav1.GetOptions{}) _, err := a.Client.GetClient().StorageV1().StorageClasses().Get(a.Context, *volumeClaimTemplate.Spec.StorageClassName, metav1.GetOptions{})
if err != nil { if err != nil {
failures = append(failures, fmt.Sprintf("StatefulSet uses the storage class %s which does not exist.", *volumeClaimTemplate.Spec.StorageClassName)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("StatefulSet uses the storage class %s which does not exist.", *volumeClaimTemplate.Spec.StorageClassName),
Sensitive: []common.Sensitive{
{
Unmasked: *volumeClaimTemplate.Spec.StorageClassName,
Masked: util.MaskString(*volumeClaimTemplate.Spec.StorageClassName),
},
},
})
} }
} }
} }

View File

@@ -67,7 +67,7 @@ func TestStatefulSetAnalyzerWithoutService(t *testing.T) {
for _, analysis := range analysisResults { for _, analysis := range analysisResults {
for _, got := range analysis.Error { for _, got := range analysis.Error {
if want == got { if want == got.Text {
errorFound = true errorFound = true
} }
} }
@@ -132,7 +132,7 @@ func TestStatefulSetAnalyzerMissingStorageClass(t *testing.T) {
for _, analysis := range analysisResults { for _, analysis := range analysisResults {
for _, got := range analysis.Error { for _, got := range analysis.Error {
if want == got { if want == got.Text {
errorFound = true errorFound = true
} }
} }

View File

@@ -28,7 +28,7 @@ type Analyzer struct {
type PreAnalysis struct { type PreAnalysis struct {
Pod v1.Pod Pod v1.Pod
FailureDetails []string FailureDetails []Failure
ReplicaSet appsv1.ReplicaSet ReplicaSet appsv1.ReplicaSet
PersistentVolumeClaim v1.PersistentVolumeClaim PersistentVolumeClaim v1.PersistentVolumeClaim
Endpoint v1.Endpoints Endpoint v1.Endpoints
@@ -41,9 +41,19 @@ type PreAnalysis struct {
} }
type Result struct { type Result struct {
Kind string `json:"kind"` Kind string `json:"kind"`
Name string `json:"name"` Name string `json:"name"`
Error []string `json:"error"` Error []Failure `json:"error"`
Details string `json:"details"` Details string `json:"details"`
ParentObject string `json:"parentObject"` ParentObject string `json:"parentObject"`
}
type Failure struct {
Text string
Sensitive []Sensitive
}
type Sensitive struct {
Unmasked string
Masked string
} }

View File

@@ -38,12 +38,15 @@ func (TrivyAnalyzer) Analyze(a common.Analyzer) ([]common.Result, error) {
for _, report := range result.Items { for _, report := range result.Items {
// For each pod there may be multiple vulnerabilities // For each pod there may be multiple vulnerabilities
var failures []string var failures []common.Failure
for _, vuln := range report.Report.Vulnerabilities { for _, vuln := range report.Report.Vulnerabilities {
if vuln.Severity == "CRITICAL" { if vuln.Severity == "CRITICAL" {
// get the vulnerability ID // get the vulnerability ID
// get the vulnerability description // get the vulnerability description
failures = append(failures, fmt.Sprintf("critical Vulnerability found ID: %s (learn more at: %s)", vuln.VulnerabilityID, vuln.PrimaryLink)) failures = append(failures, common.Failure{
Text: fmt.Sprintf("critical Vulnerability found ID: %s (learn more at: %s)", vuln.VulnerabilityID, vuln.PrimaryLink),
Sensitive: []common.Sensitive{},
})
} }
} }
if len(failures) > 0 { if len(failures) > 0 {

View File

@@ -2,11 +2,17 @@ package util
import ( import (
"context" "context"
"encoding/base64"
"fmt"
"math/rand"
"regexp"
"github.com/k8sgpt-ai/k8sgpt/pkg/kubernetes" "github.com/k8sgpt-ai/k8sgpt/pkg/kubernetes"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
) )
var anonymizePattern = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()-_=+[]{}|;':\",./<>?")
func SliceContainsString(slice []string, s string) bool { func SliceContainsString(slice []string, s string) bool {
for _, item := range slice { for _, item := range slice {
if item == s { if item == s {
@@ -105,3 +111,21 @@ func SliceDiff(source, dest []string) []string {
} }
return diff return diff
} }
func MaskString(input string) string {
key := make([]byte, len(input))
result := make([]rune, len(input))
rand.Read(key)
for i := range result {
result[i] = anonymizePattern[int(key[i])%len(anonymizePattern)]
}
return base64.StdEncoding.EncodeToString([]byte(string(result)))
}
func ReplaceIfMatch(text string, pattern string, replacement string) string {
re := regexp.MustCompile(fmt.Sprintf(`%s(\b)`, pattern))
if re.MatchString(text) {
text = re.ReplaceAllString(text, replacement)
}
return text
}