mirror of
https://github.com/kubeshark/kubeshark.git
synced 2025-09-16 06:46:12 +00:00
Refactor Mizu, define an extension API and add new protocols: AMQP, Kafka (#224)
* Separate HTTP related code into `extensions/http` as a Go plugin * Move `extensions` folder into `tap` folder * Move HTTP files into `tap/extensions/lib` for now * Replace `orcaman/concurrent-map` with `sync.Map` * Remove `grpc_assembler.go` * Remove `github.com/up9inc/mizu/tap/extensions/http/lib` * Add a build script to automatically build extensions from a known path and load them * Start to define the extension API * Implement the `run()` function for the TCP stream * Add support of defining multiple ports to the extension API * Set the extension name inside the extension * Declare the `Dissect` function in the extension API * Dissect HTTP request from inside the HTTP extension * Make the distinction of outbound and inbound ports * Dissect HTTP response from inside the HTTP extension * Bring back the HTTP request-response pair matcher * Return a `*api.RequestResponsePair` from the dissection * Bring back the gRPC-HTTP/2 parser * Fix the issues in `handleHTTP1ClientStream` and `handleHTTP1ServerStream` * Call a function pointer to emit dissected data back to the `tap` package * roee changes - trying to fix agent to work with the "api" object) - ***still not working*** * small mistake in the conflicts * Fix the issues that are introduced by the merge conflict * Add `Emitter` interface to the API and send `OutputChannelItem`(s) to `OutputChannel` * Fix the `HTTP1` handlers * Set `ConnectionInfo` in HTTP handlers * Fix the `Dockerfile` to build the extensions * remove some unwanted code * no message * Re-enable `getStreamProps` function * Migrate back from `gopacket/tcpassembly` to `gopacket/reassembly` * Introduce `HTTPPayload` struct and `HTTPPayloader` interface to `MarshalJSON()` all the data structures that are returned by the HTTP protocol * Read `socketHarOutChannel` instead of `filteredHarChannel` * Connect `OutputChannelItem` to the last WebSocket means that finally the web UI started to work again * Add `.env.example` to React app * Marshal and unmarshal `*http.Request`, `*http.Response` pairs * Move `loadExtensions` into `main.go` and map extensions into `extensionsMap` * Add `Summarize()` method to the `Dissector` interface * Add `Analyze` method to the `Dissector` interface and `MizuEntry` to the extension API * Add `Protocol` struct and make it effect the UI * Refactor `BaseEntryDetails` struct and display the source and destination ports in the UI * Display the protocol name inside the details layout * Add `Represent` method to the `Dissector` interface and manipulate the UI through this method * Make the protocol color affect the details layout color and write protocol abbreviation vertically * Remove everything HTTP related from the `tap` package and make the extension system fully functional * Fix the TypeScript warnings * Bring in the files related AMQP into `amqp` directory * Add `--nodefrag` flag to the tapper and bring in the main AMQP code * Implement the AMQP `BasicPublish` and fix some issues in the UI when the response payload is missing * Implement `representBasicPublish` method * Fix several minor issues * Implement the AMQP `BasicDeliver` * Implement the AMQP `QueueDeclare` * Implement the AMQP `ExchangeDeclare` * Implement the AMQP `ConnectionStart` * Implement the AMQP `ConnectionClose` * Implement the AMQP `QueueBind` * Implement the AMQP `BasicConsume` * Fix an issue in `ConnectionStart` * Fix a linter error * Bring in the files related Kafka into `kafka` directory * Fix the build errors in Kafka Go files * Implement `Dissect` method of Kafka and adapt request-response pair matcher to asynchronous client-server stream * Do the "Is reversed?" checked inside `getStreamProps` and fix an issue in Kafka `Dissect` method * Implement `Analyze`, `Summarize` methods of Kafka * Implement the representations for Kafka `Metadata`, `RequestHeader` and `ResponseHeader` * Refactor the AMQP and Kafka implementations to create the summary string only inside the `Analyze` method * Implement the representations for Kafka `ApiVersions` * Implement the representations for Kafka `Produce` * Implement the representations for Kafka `Fetch` * Implement the representations for Kafka `ListOffsets`, `CreateTopics` and `DeleteTopics` * Fix the encoding of AMQP `BasicPublish` and `BasicDeliver` body * Remove the unnecessary logging * Remove more logging * Introduce `Version` field to `Protocol` struct for dynamically switching the HTTP protocol to HTTP/2 * Fix the issues in analysis and representation of HTTP/2 (gRPC) protocol * Fix the issues in summary section of details layout for HTTP/2 (gRPC) protocol * Fix the read errors that freezes the sniffer in HTTP and Kafka * Fix the issues in HTTP POST data * Fix one more issue in HTTP POST data * Fix an infinite loop in Kafka * Fix another freezing issue in Kafka * Revert "UI Infra - Support multiple entry types + refactoring (#211)" This reverts commitf74a52d4dc
. * Fix more issues that are introduced by the merge * Fix the status code in the summary section * adding the cleaner again (why we removed it?). add TODO: on the extension loop . * fix dockerfile (remove deleting .env file) - it is found in dockerignore and fails to build if the file not exists * fix GetEntrties ("/entries" endpoint) - working with "tapApi.BaseEntryDetail" (moved from shared) * Fix an issue in the UI summary section * Refactor the protocol payload structs * Fix a log message in the passive tapper * Adapt `APP_PORTS` environment variable to the new extension system and change its format to `APP_PORTS='{"http": ["8001"]}' ` * Revert "fix dockerfile (remove deleting .env file) - it is found in dockerignore and fails to build if the file not exists" This reverts commit4f514ae1f4
. * Bring in the necessary changes fromf74a52d4dc
* Open the API server URL in the web browser as soon as Mizu is ready * Make the TCP reader consists of a single Go routine (instead of two) and try to dissect in both client and server mode by rewinding * Swap `TcpID` without overwriting it * Sort extension by priority * Try to dissect with looping through all the extensions * fix getStreamProps function. (it should be passed from CLI as it was before). * Turn TCP reader back into two Goroutines (client and server) * typo * Learn `isClient` from the TCP stream * Set `viewer` style `overflow: "auto"` * Fix the memory leaks in AMQP and Kafka dissectors * Revert some of the changes inbe7c65eb6d
* Remove `allExtensionPorts` since it's no longer needed * Remove `APP_PORTS` since it's no longer needed * Fix all of the minor issues in the React code * Check Kafka header size and fail-fast * Break the dissectors loop upon a successful dissection * Don't break the dissector loop. Protocols might collide * Improve the HTTP request-response counter (still not perfect) * Make the HTTP request-response counter perfect * Revert "Revert some of the changes in be7c65eb6d3fb657a059707da3ca559937e59739" This reverts commit08e7d786d8
. * Bring back `filterItems` and `isHealthCheckByUserAgent` functions * Remove some development artifacts * remove unused and commented lines that are not relevant * Fix the performance in TCP stream factory. Make it create two `tcpReader`(s) per extension * Change a log to debug * Make `*api.CounterPair` a field of `tcpReader` * Set `isTapTarget` to always `true` again since `filterAuthorities` implementation has problems * Remove a variable that's only used for logging even though not introduced by this branch * Bring back the `NumberOfRules` field of `ApplicableRules` struct * Remove the unused `NewEntry` function * Move `k8sResolver == nil` check to a more appropriate place * default healthChecksUserAgentHeaders should be empty array (like the default config value) * remove spam console.log * Rules button cause app to crash (access the service via incorrect property) * Ignore all .env* files in docker build. * Better caching in dockerfile: only copy go.mod before go mod download. * Check for errors while loading an extension * Add a comment about why `Protocol` is not a pointer * Bring back the call to `deleteOlderThan` * Remove the `nil` check * Reduce the maximum allowed AMQP message from 128MB to 1MB * Fix an error that only occurs when a Kafka broker is initiating * Revert the change inb2abd7b990
* Fix the service name resolution in all protocols * Remove the `anydirection` flag and fix the issue in `filterAuthorities` * Pass `sync.Map` by reference to `deleteOlderThan` method * Fix the packet capture issue in standalone mode that's introduced by the removal of `anydirection` * Temporarily resolve the memory exhaustion in AMQP * Fix a nil pointer dereference error * Fix the CLI build error * Fix a memory leak that's identified by `pprof` Co-authored-by: Roee Gadot <roee.gadot@up9.com> Co-authored-by: Nimrod Gilboa Markevich <nimrod@up9.com>
This commit is contained in:
384
tap/extensions/http/main.go
Normal file
384
tap/extensions/http/main.go
Normal file
@@ -0,0 +1,384 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/url"
|
||||
|
||||
"github.com/romana/rlog"
|
||||
|
||||
"github.com/up9inc/mizu/tap/api"
|
||||
)
|
||||
|
||||
var protocol api.Protocol = api.Protocol{
|
||||
Name: "http",
|
||||
LongName: "Hypertext Transfer Protocol -- HTTP/1.1",
|
||||
Abbreviation: "HTTP",
|
||||
Version: "1.1",
|
||||
BackgroundColor: "#205cf5",
|
||||
ForegroundColor: "#ffffff",
|
||||
FontSize: 12,
|
||||
ReferenceLink: "https://datatracker.ietf.org/doc/html/rfc2616",
|
||||
Ports: []string{"80", "8080", "50051"},
|
||||
Priority: 0,
|
||||
}
|
||||
|
||||
var http2Protocol api.Protocol = api.Protocol{
|
||||
Name: "http",
|
||||
LongName: "Hypertext Transfer Protocol Version 2 (HTTP/2) (gRPC)",
|
||||
Abbreviation: "HTTP/2",
|
||||
Version: "2.0",
|
||||
BackgroundColor: "#244c5a",
|
||||
ForegroundColor: "#ffffff",
|
||||
FontSize: 11,
|
||||
ReferenceLink: "https://datatracker.ietf.org/doc/html/rfc7540",
|
||||
Ports: []string{"80", "8080"},
|
||||
Priority: 0,
|
||||
}
|
||||
|
||||
const (
|
||||
TypeHttpRequest = iota
|
||||
TypeHttpResponse
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.Println("Initializing HTTP extension...")
|
||||
}
|
||||
|
||||
type dissecting string
|
||||
|
||||
func (d dissecting) Register(extension *api.Extension) {
|
||||
extension.Protocol = protocol
|
||||
extension.MatcherMap = reqResMatcher.openMessagesMap
|
||||
}
|
||||
|
||||
func (d dissecting) Ping() {
|
||||
log.Printf("pong %s\n", protocol.Name)
|
||||
}
|
||||
|
||||
func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, counterPair *api.CounterPair, emitter api.Emitter) error {
|
||||
ident := fmt.Sprintf("%s->%s:%s->%s", tcpID.SrcIP, tcpID.DstIP, tcpID.SrcPort, tcpID.DstPort)
|
||||
isHTTP2, err := checkIsHTTP2Connection(b, isClient)
|
||||
if err != nil {
|
||||
rlog.Debugf("[HTTP/2-Prepare-Connection] stream %s Failed to check if client is HTTP/2: %s (%v,%+v)", ident, err, err, err)
|
||||
// Do something?
|
||||
}
|
||||
|
||||
var grpcAssembler *GrpcAssembler
|
||||
if isHTTP2 {
|
||||
err := prepareHTTP2Connection(b, isClient)
|
||||
if err != nil {
|
||||
rlog.Debugf("[HTTP/2-Prepare-Connection-After-Check] stream %s error: %s (%v,%+v)", ident, err, err, err)
|
||||
}
|
||||
grpcAssembler = createGrpcAssembler(b)
|
||||
}
|
||||
|
||||
success := false
|
||||
for {
|
||||
if isHTTP2 {
|
||||
err = handleHTTP2Stream(grpcAssembler, tcpID, emitter)
|
||||
if err == io.EOF || err == io.ErrUnexpectedEOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
rlog.Debugf("[HTTP/2] stream %s error: %s (%v,%+v)", ident, err, err, err)
|
||||
continue
|
||||
}
|
||||
success = true
|
||||
} else if isClient {
|
||||
err = handleHTTP1ClientStream(b, tcpID, counterPair, emitter)
|
||||
if err == io.EOF || err == io.ErrUnexpectedEOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
rlog.Debugf("[HTTP-request] stream %s Request error: %s (%v,%+v)", ident, err, err, err)
|
||||
continue
|
||||
}
|
||||
success = true
|
||||
} else {
|
||||
err = handleHTTP1ServerStream(b, tcpID, counterPair, emitter)
|
||||
if err == io.EOF || err == io.ErrUnexpectedEOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
rlog.Debugf("[HTTP-response], stream %s Response error: %s (%v,%+v)", ident, err, err, err)
|
||||
continue
|
||||
}
|
||||
success = true
|
||||
}
|
||||
}
|
||||
|
||||
if !success {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func SetHostname(address, newHostname string) string {
|
||||
replacedUrl, err := url.Parse(address)
|
||||
if err != nil {
|
||||
log.Printf("error replacing hostname to %s in address %s, returning original %v", newHostname, address, err)
|
||||
return address
|
||||
}
|
||||
replacedUrl.Host = newHostname
|
||||
return replacedUrl.String()
|
||||
}
|
||||
|
||||
func (d dissecting) Analyze(item *api.OutputChannelItem, entryId string, resolvedSource string, resolvedDestination string) *api.MizuEntry {
|
||||
var host, scheme, authority, path, service string
|
||||
|
||||
request := item.Pair.Request.Payload.(map[string]interface{})
|
||||
response := item.Pair.Response.Payload.(map[string]interface{})
|
||||
reqDetails := request["details"].(map[string]interface{})
|
||||
resDetails := response["details"].(map[string]interface{})
|
||||
|
||||
for _, header := range reqDetails["headers"].([]interface{}) {
|
||||
h := header.(map[string]interface{})
|
||||
if h["name"] == "Host" {
|
||||
host = h["value"].(string)
|
||||
}
|
||||
if h["name"] == ":authority" {
|
||||
authority = h["value"].(string)
|
||||
}
|
||||
if h["name"] == ":scheme" {
|
||||
scheme = h["value"].(string)
|
||||
}
|
||||
if h["name"] == ":path" {
|
||||
path = h["value"].(string)
|
||||
}
|
||||
}
|
||||
|
||||
if item.Protocol.Version == "2.0" {
|
||||
service = fmt.Sprintf("%s://%s", scheme, authority)
|
||||
} else {
|
||||
service = fmt.Sprintf("http://%s", host)
|
||||
path = reqDetails["url"].(string)
|
||||
}
|
||||
|
||||
request["url"] = path
|
||||
if resolvedDestination != "" {
|
||||
service = SetHostname(service, resolvedDestination)
|
||||
} else if resolvedSource != "" {
|
||||
service = SetHostname(service, resolvedSource)
|
||||
}
|
||||
entryBytes, _ := json.Marshal(item.Pair)
|
||||
return &api.MizuEntry{
|
||||
ProtocolName: protocol.Name,
|
||||
ProtocolVersion: item.Protocol.Version,
|
||||
EntryId: entryId,
|
||||
Entry: string(entryBytes),
|
||||
Url: fmt.Sprintf("%s%s", service, path),
|
||||
Method: reqDetails["method"].(string),
|
||||
Status: int(resDetails["status"].(float64)),
|
||||
RequestSenderIp: item.ConnectionInfo.ClientIP,
|
||||
Service: service,
|
||||
Timestamp: item.Timestamp,
|
||||
Path: path,
|
||||
ResolvedSource: resolvedSource,
|
||||
ResolvedDestination: resolvedDestination,
|
||||
SourceIp: item.ConnectionInfo.ClientIP,
|
||||
DestinationIp: item.ConnectionInfo.ServerIP,
|
||||
SourcePort: item.ConnectionInfo.ClientPort,
|
||||
DestinationPort: item.ConnectionInfo.ServerPort,
|
||||
IsOutgoing: item.ConnectionInfo.IsOutgoing,
|
||||
}
|
||||
}
|
||||
|
||||
func (d dissecting) Summarize(entry *api.MizuEntry) *api.BaseEntryDetails {
|
||||
var p api.Protocol
|
||||
if entry.ProtocolVersion == "2.0" {
|
||||
p = http2Protocol
|
||||
} else {
|
||||
p = protocol
|
||||
}
|
||||
return &api.BaseEntryDetails{
|
||||
Id: entry.EntryId,
|
||||
Protocol: p,
|
||||
Url: entry.Url,
|
||||
RequestSenderIp: entry.RequestSenderIp,
|
||||
Service: entry.Service,
|
||||
Summary: entry.Path,
|
||||
StatusCode: entry.Status,
|
||||
Method: entry.Method,
|
||||
Timestamp: entry.Timestamp,
|
||||
SourceIp: entry.SourceIp,
|
||||
DestinationIp: entry.DestinationIp,
|
||||
SourcePort: entry.SourcePort,
|
||||
DestinationPort: entry.DestinationPort,
|
||||
IsOutgoing: entry.IsOutgoing,
|
||||
Latency: 0,
|
||||
Rules: api.ApplicableRules{
|
||||
Latency: 0,
|
||||
Status: false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func representRequest(request map[string]interface{}) []interface{} {
|
||||
repRequest := make([]interface{}, 0)
|
||||
|
||||
details, _ := json.Marshal([]map[string]string{
|
||||
{
|
||||
"name": "Method",
|
||||
"value": request["method"].(string),
|
||||
},
|
||||
{
|
||||
"name": "URL",
|
||||
"value": request["url"].(string),
|
||||
},
|
||||
{
|
||||
"name": "Body Size",
|
||||
"value": fmt.Sprintf("%g bytes", request["bodySize"].(float64)),
|
||||
},
|
||||
})
|
||||
repRequest = append(repRequest, map[string]string{
|
||||
"type": "table",
|
||||
"title": "Details",
|
||||
"data": string(details),
|
||||
})
|
||||
|
||||
headers, _ := json.Marshal(request["headers"].([]interface{}))
|
||||
repRequest = append(repRequest, map[string]string{
|
||||
"type": "table",
|
||||
"title": "Headers",
|
||||
"data": string(headers),
|
||||
})
|
||||
|
||||
cookies, _ := json.Marshal(request["cookies"].([]interface{}))
|
||||
repRequest = append(repRequest, map[string]string{
|
||||
"type": "table",
|
||||
"title": "Cookies",
|
||||
"data": string(cookies),
|
||||
})
|
||||
|
||||
queryString, _ := json.Marshal(request["queryString"].([]interface{}))
|
||||
repRequest = append(repRequest, map[string]string{
|
||||
"type": "table",
|
||||
"title": "Query String",
|
||||
"data": string(queryString),
|
||||
})
|
||||
|
||||
postData, _ := request["postData"].(map[string]interface{})
|
||||
mimeType, _ := postData["mimeType"]
|
||||
if mimeType == nil || len(mimeType.(string)) == 0 {
|
||||
mimeType = "text/html"
|
||||
}
|
||||
text, _ := postData["text"]
|
||||
if text != nil {
|
||||
repRequest = append(repRequest, map[string]string{
|
||||
"type": "body",
|
||||
"title": "POST Data (text/plain)",
|
||||
"encoding": "",
|
||||
"mime_type": mimeType.(string),
|
||||
"data": text.(string),
|
||||
})
|
||||
}
|
||||
|
||||
if postData["params"] != nil {
|
||||
params, _ := json.Marshal(postData["params"].([]interface{}))
|
||||
if len(params) > 0 {
|
||||
if mimeType == "multipart/form-data" {
|
||||
multipart, _ := json.Marshal([]map[string]string{
|
||||
{
|
||||
"name": "Files",
|
||||
"value": string(params),
|
||||
},
|
||||
})
|
||||
repRequest = append(repRequest, map[string]string{
|
||||
"type": "table",
|
||||
"title": "POST Data (multipart/form-data)",
|
||||
"data": string(multipart),
|
||||
})
|
||||
} else {
|
||||
repRequest = append(repRequest, map[string]string{
|
||||
"type": "table",
|
||||
"title": "POST Data (application/x-www-form-urlencoded)",
|
||||
"data": string(params),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return repRequest
|
||||
}
|
||||
|
||||
func representResponse(response map[string]interface{}) []interface{} {
|
||||
repResponse := make([]interface{}, 0)
|
||||
|
||||
details, _ := json.Marshal([]map[string]string{
|
||||
{
|
||||
"name": "Status",
|
||||
"value": fmt.Sprintf("%g", response["status"].(float64)),
|
||||
},
|
||||
{
|
||||
"name": "Status Text",
|
||||
"value": response["statusText"].(string),
|
||||
},
|
||||
{
|
||||
"name": "Body Size",
|
||||
"value": fmt.Sprintf("%g bytes", response["bodySize"].(float64)),
|
||||
},
|
||||
})
|
||||
repResponse = append(repResponse, map[string]string{
|
||||
"type": "table",
|
||||
"title": "Details",
|
||||
"data": string(details),
|
||||
})
|
||||
|
||||
headers, _ := json.Marshal(response["headers"].([]interface{}))
|
||||
repResponse = append(repResponse, map[string]string{
|
||||
"type": "table",
|
||||
"title": "Headers",
|
||||
"data": string(headers),
|
||||
})
|
||||
|
||||
cookies, _ := json.Marshal(response["cookies"].([]interface{}))
|
||||
repResponse = append(repResponse, map[string]string{
|
||||
"type": "table",
|
||||
"title": "Cookies",
|
||||
"data": string(cookies),
|
||||
})
|
||||
|
||||
content, _ := response["content"].(map[string]interface{})
|
||||
mimeType, _ := content["mimeType"]
|
||||
if mimeType == nil || len(mimeType.(string)) == 0 {
|
||||
mimeType = "text/html"
|
||||
}
|
||||
encoding, _ := content["encoding"]
|
||||
text, _ := content["text"]
|
||||
if text != nil {
|
||||
repResponse = append(repResponse, map[string]string{
|
||||
"type": "body",
|
||||
"title": "Body",
|
||||
"encoding": encoding.(string),
|
||||
"mime_type": mimeType.(string),
|
||||
"data": text.(string),
|
||||
})
|
||||
}
|
||||
|
||||
return repResponse
|
||||
}
|
||||
|
||||
func (d dissecting) Represent(entry *api.MizuEntry) (api.Protocol, []byte, error) {
|
||||
var p api.Protocol
|
||||
if entry.ProtocolVersion == "2.0" {
|
||||
p = http2Protocol
|
||||
} else {
|
||||
p = protocol
|
||||
}
|
||||
var root map[string]interface{}
|
||||
json.Unmarshal([]byte(entry.Entry), &root)
|
||||
representation := make(map[string]interface{}, 0)
|
||||
request := root["request"].(map[string]interface{})["payload"].(map[string]interface{})
|
||||
response := root["response"].(map[string]interface{})["payload"].(map[string]interface{})
|
||||
reqDetails := request["details"].(map[string]interface{})
|
||||
resDetails := response["details"].(map[string]interface{})
|
||||
repRequest := representRequest(reqDetails)
|
||||
repResponse := representResponse(resDetails)
|
||||
representation["request"] = repRequest
|
||||
representation["response"] = repResponse
|
||||
object, err := json.Marshal(representation)
|
||||
return p, object, err
|
||||
}
|
||||
|
||||
var Dissector dissecting
|
Reference in New Issue
Block a user