diff --git a/README.md b/README.md index 8b53eb1..c84d17a 100644 --- a/README.md +++ b/README.md @@ -399,6 +399,26 @@ _Serve mode_ k8sgpt serve ``` +_Serve mode with MCP (Model Context Protocol)_ + +``` +# Enable MCP server on default port 8089 +k8sgpt serve --mcp --mcp-http + +# Enable MCP server on custom port +k8sgpt serve --mcp --mcp-http --mcp-port 8089 + +# Full serve mode with MCP +k8sgpt serve --mcp --mcp-http --port 8080 --metrics-port 8081 --mcp-port 8089 +``` + +The MCP server enables integration with tools like Claude Desktop and other MCP-compatible clients. It runs on port 8089 by default and provides: +- Kubernetes cluster analysis via MCP protocol +- Resource information and health status +- AI-powered issue explanations and recommendations + +For Helm chart deployment with MCP support, see the `charts/k8sgpt/values-mcp-example.yaml` file. + _Analysis with serve mode_ ``` diff --git a/charts/k8sgpt/Chart.yaml b/charts/k8sgpt/Chart.yaml index a6f56e0..38fe55b 100644 --- a/charts/k8sgpt/Chart.yaml +++ b/charts/k8sgpt/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: v0.3.0 #x-release-please-version +appVersion: v0.4.23 #x-release-please-version description: A Helm chart for K8SGPT name: k8sgpt type: application diff --git a/charts/k8sgpt/templates/deployment.yaml b/charts/k8sgpt/templates/deployment.yaml index fcba48e..13c1e01 100644 --- a/charts/k8sgpt/templates/deployment.yaml +++ b/charts/k8sgpt/templates/deployment.yaml @@ -32,7 +32,13 @@ spec: image: {{ .Values.deployment.image.repository }}:{{ .Values.deployment.image.tag | default .Chart.AppVersion }} ports: - containerPort: 8080 - args: ["serve"] + {{- if .Values.deployment.mcp.enabled }} + - containerPort: {{ .Values.deployment.mcp.port | int }} + {{- end }} + args: ["serve" + {{- if .Values.deployment.mcp.enabled }}, "--mcp", "-v","--mcp-http", "--mcp-port", {{ .Values.deployment.mcp.port | quote }} + {{- end }} + ] {{- if .Values.deployment.resources }} resources: {{- toYaml .Values.deployment.resources | nindent 10 }} diff --git a/charts/k8sgpt/templates/service.yaml b/charts/k8sgpt/templates/service.yaml index 6491289..c1d83a4 100644 --- a/charts/k8sgpt/templates/service.yaml +++ b/charts/k8sgpt/templates/service.yaml @@ -19,4 +19,9 @@ spec: - name: metrics port: 8081 targetPort: 8081 + {{- if .Values.deployment.mcp.enabled }} + - name: mcp + port: {{ .Values.deployment.mcp.port | int }} + targetPort: {{ .Values.deployment.mcp.port | int }} + {{- end }} type: {{ .Values.service.type }} diff --git a/charts/k8sgpt/values-mcp-example.yaml b/charts/k8sgpt/values-mcp-example.yaml new file mode 100644 index 0000000..5c97bc3 --- /dev/null +++ b/charts/k8sgpt/values-mcp-example.yaml @@ -0,0 +1,39 @@ +# Example values file to enable MCP (Model Context Protocol) service +# Copy this file and modify as needed, then use: helm install -f values-mcp-example.yaml + +deployment: + # Enable MCP server + mcp: + enabled: true + port: "8089" # Port for MCP server (default: 8089) + http: true # Enable HTTP mode for MCP server + + # Other deployment settings remain the same + image: + repository: ghcr.io/k8sgpt-ai/k8sgpt + tag: "" # defaults to Chart.appVersion if unspecified + imagePullPolicy: Always + env: + model: "gpt-3.5-turbo" + backend: "openai" + resources: + limits: + cpu: "1" + memory: "512Mi" + requests: + cpu: "0.2" + memory: "156Mi" + +# Service configuration +service: + type: ClusterIP + annotations: {} + +# Secret configuration for AI backend +secret: + secretKey: "" # base64 encoded OpenAI token + +# ServiceMonitor for Prometheus metrics +serviceMonitor: + enabled: false + additionalLabels: {} diff --git a/charts/k8sgpt/values.yaml b/charts/k8sgpt/values.yaml index 7a48bd1..9f5b74f 100644 --- a/charts/k8sgpt/values.yaml +++ b/charts/k8sgpt/values.yaml @@ -7,6 +7,11 @@ deployment: env: model: "gpt-3.5-turbo" backend: "openai" # one of: [ openai | llama ] + # MCP (Model Context Protocol) server configuration + mcp: + enabled: false # Enable MCP server + port: "8089" # Port for MCP server + http: true # Enable HTTP mode for MCP server resources: limits: cpu: "1" diff --git a/pkg/server/mcp.go b/pkg/server/mcp.go index e9a7b36..28aa463 100644 --- a/pkg/server/mcp.go +++ b/pkg/server/mcp.go @@ -141,6 +141,8 @@ func (s *K8sGptMCPServer) registerToolsAndResources() error { ), mcp.WithArray("filters", mcp.Description("Provide filters to narrow down the analysis (e.g. ['Pods', 'Deployments'])"), + // without below line MCP server fails with Google Agent Development Kit (ADK), interestingly works fine with mcpinspector + mcp.WithStringItems(), ), ) s.server.AddTool(analyzeTool, s.handleAnalyze)