mirror of
https://github.com/kubeshark/kubeshark.git
synced 2025-09-12 12:49:57 +00:00
Always derive the summary and method fields from the entry in the database on read (#877)
* Always derive the summary and method fields from the entry in the database on read * Update the expected JSONs in the protocol unit tests * Add test cases for `Summarize` method * Remove unused `GetEntry` method, `DataUnmarshaler` struct and `UnmarshalData` method * Temporarily enable the acceptance tests * Temporarily disable Slack notification on failure * Update the Cypress tests * Fix an issue in Redis * Fix a typo and the Cypress tests * Revert "Temporarily disable Slack notification on failure" This reverts commitcad1901ea4
. * Revert "Temporarily enable the acceptance tests" This reverts commitbad7706c9b
.
This commit is contained in:
@@ -13,4 +13,4 @@ test-pull-bin:
|
||||
|
||||
test-pull-expect:
|
||||
@mkdir -p expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect/kafka/\* expect
|
||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/kafka/\* expect
|
||||
|
@@ -61,83 +61,7 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
||||
func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *api.Entry {
|
||||
request := item.Pair.Request.Payload.(map[string]interface{})
|
||||
reqDetails := request["details"].(map[string]interface{})
|
||||
apiKey := ApiKey(reqDetails["apiKey"].(float64))
|
||||
|
||||
summary := ""
|
||||
switch apiKey {
|
||||
case Metadata:
|
||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for _, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
}
|
||||
case ApiVersions:
|
||||
summary = reqDetails["clientID"].(string)
|
||||
case Produce:
|
||||
_topics := reqDetails["payload"].(map[string]interface{})["topicData"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for _, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
}
|
||||
case Fetch:
|
||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for _, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
}
|
||||
case ListOffsets:
|
||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for _, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
}
|
||||
case CreateTopics:
|
||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for _, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
}
|
||||
case DeleteTopics:
|
||||
if reqDetails["topicNames"] == nil {
|
||||
break
|
||||
}
|
||||
topicNames := reqDetails["topicNames"].([]string)
|
||||
for _, name := range topicNames {
|
||||
summary += fmt.Sprintf("%s, ", name)
|
||||
}
|
||||
}
|
||||
|
||||
request["url"] = summary
|
||||
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
|
||||
if elapsedTime < 0 {
|
||||
elapsedTime = 0
|
||||
@@ -158,13 +82,127 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
||||
Outgoing: item.ConnectionInfo.IsOutgoing,
|
||||
Request: reqDetails,
|
||||
Response: item.Pair.Response.Payload.(map[string]interface{})["details"].(map[string]interface{}),
|
||||
Method: apiNames[apiKey],
|
||||
Status: 0,
|
||||
Timestamp: item.Timestamp,
|
||||
StartTime: item.Pair.Request.CaptureTime,
|
||||
ElapsedTime: elapsedTime,
|
||||
Summary: summary,
|
||||
IsOutgoing: item.ConnectionInfo.IsOutgoing,
|
||||
}
|
||||
}
|
||||
|
||||
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||
status := 0
|
||||
statusQuery := ""
|
||||
|
||||
apiKey := ApiKey(entry.Request["apiKey"].(float64))
|
||||
method := apiNames[apiKey]
|
||||
methodQuery := fmt.Sprintf("request.apiKey == %d", int(entry.Request["apiKey"].(float64)))
|
||||
|
||||
summary := ""
|
||||
summaryQuery := ""
|
||||
switch apiKey {
|
||||
case Metadata:
|
||||
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for i, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||
}
|
||||
case ApiVersions:
|
||||
summary = entry.Request["clientID"].(string)
|
||||
summaryQuery = fmt.Sprintf(`request.clientID == "%s"`, summary)
|
||||
case Produce:
|
||||
_topics := entry.Request["payload"].(map[string]interface{})["topicData"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for i, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
||||
summaryQuery += fmt.Sprintf(`request.payload.topicData[%d].topic == "%s" and`, i, summary)
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||
}
|
||||
case Fetch:
|
||||
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for i, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
||||
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].topic == "%s" and`, i, summary)
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||
}
|
||||
case ListOffsets:
|
||||
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for i, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||
}
|
||||
case CreateTopics:
|
||||
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||
if _topics == nil {
|
||||
break
|
||||
}
|
||||
topics := _topics.([]interface{})
|
||||
for i, topic := range topics {
|
||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||
}
|
||||
case DeleteTopics:
|
||||
if entry.Request["topicNames"] == nil {
|
||||
break
|
||||
}
|
||||
topicNames := entry.Request["topicNames"].([]string)
|
||||
for i, name := range topicNames {
|
||||
summary += fmt.Sprintf("%s, ", name)
|
||||
summaryQuery += fmt.Sprintf(`request.topicNames[%d] == "%s" and`, i, summary)
|
||||
}
|
||||
if len(summary) > 0 {
|
||||
summary = summary[:len(summary)-2]
|
||||
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||
}
|
||||
}
|
||||
|
||||
return &api.BaseEntry{
|
||||
Id: entry.Id,
|
||||
Protocol: entry.Protocol,
|
||||
Summary: summary,
|
||||
SummaryQuery: summaryQuery,
|
||||
Status: status,
|
||||
StatusQuery: statusQuery,
|
||||
Method: method,
|
||||
MethodQuery: methodQuery,
|
||||
Timestamp: entry.Timestamp,
|
||||
Source: entry.Source,
|
||||
Destination: entry.Destination,
|
||||
IsOutgoing: entry.Outgoing,
|
||||
Latency: entry.ElapsedTime,
|
||||
Rules: entry.Rules,
|
||||
ContractStatus: entry.ContractStatus,
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -21,14 +21,16 @@ import (
|
||||
const (
|
||||
binDir = "bin"
|
||||
patternBin = "*_req.bin"
|
||||
patternDissect = "*.json"
|
||||
patternExpect = "*.json"
|
||||
msgDissecting = "Dissecting:"
|
||||
msgAnalyzing = "Analyzing:"
|
||||
msgSummarizing = "Summarizing:"
|
||||
msgRepresenting = "Representing:"
|
||||
respSuffix = "_res.bin"
|
||||
expectDir = "expect"
|
||||
dissectDir = "dissect"
|
||||
analyzeDir = "analyze"
|
||||
summarizeDir = "summarize"
|
||||
representDir = "represent"
|
||||
testUpdate = "TEST_UPDATE"
|
||||
)
|
||||
@@ -187,7 +189,7 @@ func TestAnalyze(t *testing.T) {
|
||||
}
|
||||
|
||||
dissector := NewDissector()
|
||||
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
|
||||
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@@ -231,6 +233,63 @@ func TestAnalyze(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestSummarize(t *testing.T) {
|
||||
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||
|
||||
expectDirAnalyze := path.Join(expectDir, analyzeDir)
|
||||
expectDirSummarize := path.Join(expectDir, summarizeDir)
|
||||
|
||||
if testUpdateEnabled {
|
||||
os.RemoveAll(expectDirSummarize)
|
||||
err := os.MkdirAll(expectDirSummarize, 0775)
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
|
||||
dissector := NewDissector()
|
||||
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, _path := range paths {
|
||||
fmt.Printf("%s %s\n", msgSummarizing, _path)
|
||||
|
||||
bytes, err := ioutil.ReadFile(_path)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var entries []*api.Entry
|
||||
err = json.Unmarshal(bytes, &entries)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var baseEntries []*api.BaseEntry
|
||||
for _, entry := range entries {
|
||||
baseEntry := dissector.Summarize(entry)
|
||||
baseEntries = append(baseEntries, baseEntry)
|
||||
}
|
||||
|
||||
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
|
||||
|
||||
marshaled, err := json.Marshal(baseEntries)
|
||||
assert.Nil(t, err)
|
||||
|
||||
if testUpdateEnabled {
|
||||
if len(baseEntries) > 0 {
|
||||
err = os.WriteFile(pathExpect, marshaled, 0644)
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
} else {
|
||||
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
|
||||
assert.Len(t, entries, 0)
|
||||
} else {
|
||||
expectedBytes, err := ioutil.ReadFile(pathExpect)
|
||||
assert.Nil(t, err)
|
||||
|
||||
assert.JSONEq(t, string(expectedBytes), string(marshaled))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRepresent(t *testing.T) {
|
||||
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||
|
||||
@@ -244,7 +303,7 @@ func TestRepresent(t *testing.T) {
|
||||
}
|
||||
|
||||
dissector := NewDissector()
|
||||
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
|
||||
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
Reference in New Issue
Block a user