mirror of
https://github.com/kubeshark/kubeshark.git
synced 2025-06-21 22:08:59 +00:00
Always derive the summary and method fields from the entry in the database on read (#877)
* Always derive the summary and method fields from the entry in the database on read * Update the expected JSONs in the protocol unit tests * Add test cases for `Summarize` method * Remove unused `GetEntry` method, `DataUnmarshaler` struct and `UnmarshalData` method * Temporarily enable the acceptance tests * Temporarily disable Slack notification on failure * Update the Cypress tests * Fix an issue in Redis * Fix a typo and the Cypress tests * Revert "Temporarily disable Slack notification on failure" This reverts commitcad1901ea4
. * Revert "Temporarily enable the acceptance tests" This reverts commitbad7706c9b
.
This commit is contained in:
parent
c1d774e53c
commit
dd430c31d5
@ -65,14 +65,14 @@ export function checkThatAllEntriesShown() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function checkFilterByMethod(funcDict) {
|
export function checkFilterByMethod(funcDict) {
|
||||||
const {protocol, method, summary, hugeMizu} = funcDict;
|
const {protocol, method, methodQuery, summary, summaryQuery} = funcDict;
|
||||||
const summaryDict = getSummeryDict(summary);
|
const summaryDict = getSummaryDict(summary, summaryQuery);
|
||||||
const methodDict = getMethodDict(method);
|
const methodDict = getMethodDict(method, methodQuery);
|
||||||
const protocolDict = getProtocolDict(protocol.name, protocol.text);
|
const protocolDict = getProtocolDict(protocol.name, protocol.text);
|
||||||
|
|
||||||
it(`Testing the method: ${method}`, function () {
|
it(`Testing the method: ${method}`, function () {
|
||||||
// applying filter
|
// applying filter
|
||||||
cy.get('.w-tc-editor-text').clear().type(`method == "${method}"`);
|
cy.get('.w-tc-editor-text').clear().type(methodQuery);
|
||||||
cy.get('[type="submit"]').click();
|
cy.get('[type="submit"]').click();
|
||||||
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
|
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
|
||||||
|
|
||||||
@ -121,7 +121,7 @@ function resizeIfNeeded(entriesLen) {
|
|||||||
function deepCheck(generalDict, protocolDict, methodDict, entry) {
|
function deepCheck(generalDict, protocolDict, methodDict, entry) {
|
||||||
const entryNum = getEntryNumById(entry.id);
|
const entryNum = getEntryNumById(entry.id);
|
||||||
const {summary, value} = generalDict;
|
const {summary, value} = generalDict;
|
||||||
const summaryDict = getSummeryDict(summary);
|
const summaryDict = getSummaryDict(summary);
|
||||||
|
|
||||||
leftOnHoverCheck(entryNum, methodDict.pathLeft, methodDict.expectedOnHover);
|
leftOnHoverCheck(entryNum, methodDict.pathLeft, methodDict.expectedOnHover);
|
||||||
leftOnHoverCheck(entryNum, protocolDict.pathLeft, protocolDict.expectedOnHover);
|
leftOnHoverCheck(entryNum, protocolDict.pathLeft, protocolDict.expectedOnHover);
|
||||||
@ -149,13 +149,13 @@ function deepCheck(generalDict, protocolDict, methodDict, entry) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSummeryDict(summary) {
|
function getSummaryDict(value, query) {
|
||||||
if (summary) {
|
if (value) {
|
||||||
return {
|
return {
|
||||||
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||||
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||||
expectedText: summary,
|
expectedText: value,
|
||||||
expectedOnHover: `summary == "${summary}"`
|
expectedOnHover: query
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@ -163,12 +163,12 @@ function getSummeryDict(summary) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getMethodDict(method) {
|
function getMethodDict(value, query) {
|
||||||
return {
|
return {
|
||||||
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||||
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||||
expectedText: method,
|
expectedText: value,
|
||||||
expectedOnHover: `method == "${method}"`
|
expectedOnHover: query
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,41 +9,53 @@ const rabbitProtocolDetails = {name: 'AMQP', text: 'Advanced Message Queuing Pro
|
|||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: rabbitProtocolDetails,
|
protocol: rabbitProtocolDetails,
|
||||||
method: 'exchange declare',
|
method: 'exchange declare',
|
||||||
|
methodQuery: 'request.method == "exchange declare"',
|
||||||
summary: 'exchange',
|
summary: 'exchange',
|
||||||
|
summaryQuery: 'request.exchange == "exchange"',
|
||||||
value: null
|
value: null
|
||||||
});
|
});
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: rabbitProtocolDetails,
|
protocol: rabbitProtocolDetails,
|
||||||
method: 'queue declare',
|
method: 'queue declare',
|
||||||
|
methodQuery: 'request.method == "queue declare"',
|
||||||
summary: 'queue',
|
summary: 'queue',
|
||||||
|
summaryQuery: 'request.queue == "queue"',
|
||||||
value: null
|
value: null
|
||||||
});
|
});
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: rabbitProtocolDetails,
|
protocol: rabbitProtocolDetails,
|
||||||
method: 'queue bind',
|
method: 'queue bind',
|
||||||
|
methodQuery: 'request.method == "queue bind"',
|
||||||
summary: 'queue',
|
summary: 'queue',
|
||||||
|
summaryQuery: 'request.queue == "queue"',
|
||||||
value: null
|
value: null
|
||||||
});
|
});
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: rabbitProtocolDetails,
|
protocol: rabbitProtocolDetails,
|
||||||
method: 'basic publish',
|
method: 'basic publish',
|
||||||
|
methodQuery: 'request.method == "basic publish"',
|
||||||
summary: 'exchange',
|
summary: 'exchange',
|
||||||
|
summaryQuery: 'request.exchange == "exchange"',
|
||||||
value: {tab: valueTabs.request, regex: /^message$/mg}
|
value: {tab: valueTabs.request, regex: /^message$/mg}
|
||||||
});
|
});
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: rabbitProtocolDetails,
|
protocol: rabbitProtocolDetails,
|
||||||
method: 'basic consume',
|
method: 'basic consume',
|
||||||
|
methodQuery: 'request.method == "basic consume"',
|
||||||
summary: 'queue',
|
summary: 'queue',
|
||||||
|
summaryQuery: 'request.queue == "queue"',
|
||||||
value: null
|
value: null
|
||||||
});
|
});
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: rabbitProtocolDetails,
|
protocol: rabbitProtocolDetails,
|
||||||
method: 'basic deliver',
|
method: 'basic deliver',
|
||||||
|
methodQuery: 'request.method == "basic deliver"',
|
||||||
summary: 'exchange',
|
summary: 'exchange',
|
||||||
|
summaryQuery: 'request.queue == "exchange"',
|
||||||
value: {tab: valueTabs.request, regex: /^message$/mg}
|
value: {tab: valueTabs.request, regex: /^message$/mg}
|
||||||
});
|
});
|
||||||
|
@ -9,34 +9,44 @@ const redisProtocolDetails = {name: 'redis', text: 'Redis Serialization Protocol
|
|||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: redisProtocolDetails,
|
protocol: redisProtocolDetails,
|
||||||
method: 'PING',
|
method: 'PING',
|
||||||
|
methodQuery: 'request.command == "PING"',
|
||||||
summary: null,
|
summary: null,
|
||||||
|
summaryQuery: '',
|
||||||
value: null
|
value: null
|
||||||
})
|
})
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: redisProtocolDetails,
|
protocol: redisProtocolDetails,
|
||||||
method: 'SET',
|
method: 'SET',
|
||||||
|
methodQuery: 'request.command == "SET"',
|
||||||
summary: 'key',
|
summary: 'key',
|
||||||
|
summaryQuery: 'request.key == "key"',
|
||||||
value: {tab: valueTabs.request, regex: /^\[value, keepttl]$/mg}
|
value: {tab: valueTabs.request, regex: /^\[value, keepttl]$/mg}
|
||||||
})
|
})
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: redisProtocolDetails,
|
protocol: redisProtocolDetails,
|
||||||
method: 'EXISTS',
|
method: 'EXISTS',
|
||||||
|
methodQuery: 'request.command == "EXISTS"',
|
||||||
summary: 'key',
|
summary: 'key',
|
||||||
|
summaryQuery: 'request.key == "key"',
|
||||||
value: {tab: valueTabs.response, regex: /^1$/mg}
|
value: {tab: valueTabs.response, regex: /^1$/mg}
|
||||||
})
|
})
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: redisProtocolDetails,
|
protocol: redisProtocolDetails,
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
methodQuery: 'request.command == "GET"',
|
||||||
summary: 'key',
|
summary: 'key',
|
||||||
|
summaryQuery: 'request.key == "key"',
|
||||||
value: {tab: valueTabs.response, regex: /^value$/mg}
|
value: {tab: valueTabs.response, regex: /^value$/mg}
|
||||||
})
|
})
|
||||||
|
|
||||||
checkFilterByMethod({
|
checkFilterByMethod({
|
||||||
protocol: redisProtocolDetails,
|
protocol: redisProtocolDetails,
|
||||||
method: 'DEL',
|
method: 'DEL',
|
||||||
|
methodQuery: 'request.command == "DEL"',
|
||||||
summary: 'key',
|
summary: 'key',
|
||||||
|
summaryQuery: 'request.key == "key"',
|
||||||
value: {tab: valueTabs.response, regex: /^1$|^0$/mg}
|
value: {tab: valueTabs.response, regex: /^1$|^0$/mg}
|
||||||
})
|
})
|
||||||
|
@ -113,7 +113,7 @@ if (Cypress.env('shouldCheckSrcAndDest')) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
checkFilter({
|
checkFilter({
|
||||||
name: 'method == "GET"',
|
name: 'request.method == "GET"',
|
||||||
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||||
leftSideExpectedText: 'GET',
|
leftSideExpectedText: 'GET',
|
||||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
|
||||||
@ -122,7 +122,7 @@ checkFilter({
|
|||||||
});
|
});
|
||||||
|
|
||||||
checkFilter({
|
checkFilter({
|
||||||
name: 'summary == "/get"',
|
name: 'request.path == "/get"',
|
||||||
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||||
leftSideExpectedText: '/get',
|
leftSideExpectedText: '/get',
|
||||||
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
|
||||||
@ -139,7 +139,7 @@ checkFilter({
|
|||||||
applyByEnter: false
|
applyByEnter: false
|
||||||
});
|
});
|
||||||
|
|
||||||
checkFilterNoResults('method == "POST"');
|
checkFilterNoResults('request.method == "POST"');
|
||||||
|
|
||||||
function checkFilterNoResults(filterName) {
|
function checkFilterNoResults(filterName) {
|
||||||
it(`checking the filter: ${filterName}. Expecting no results`, function () {
|
it(`checking the filter: ${filterName}. Expecting no results`, function () {
|
||||||
|
@ -17,6 +17,12 @@ import (
|
|||||||
tapApi "github.com/up9inc/mizu/tap/api"
|
tapApi "github.com/up9inc/mizu/tap/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var extensionsMap map[string]*tapApi.Extension // global
|
||||||
|
|
||||||
|
func InitExtensionsMap(ref map[string]*tapApi.Extension) {
|
||||||
|
extensionsMap = ref
|
||||||
|
}
|
||||||
|
|
||||||
type EventHandlers interface {
|
type EventHandlers interface {
|
||||||
WebSocketConnect(socketId int, isTapper bool)
|
WebSocketConnect(socketId int, isTapper bool)
|
||||||
WebSocketDisconnect(socketId int, isTapper bool)
|
WebSocketDisconnect(socketId int, isTapper bool)
|
||||||
@ -165,7 +171,8 @@ func websocketHandler(w http.ResponseWriter, r *http.Request, eventHandlers Even
|
|||||||
if params.EnableFullEntries {
|
if params.EnableFullEntries {
|
||||||
message, _ = models.CreateFullEntryWebSocketMessage(entry)
|
message, _ = models.CreateFullEntryWebSocketMessage(entry)
|
||||||
} else {
|
} else {
|
||||||
base := tapApi.Summarize(entry)
|
extension := extensionsMap[entry.Protocol.Name]
|
||||||
|
base := extension.Dissector.Summarize(entry)
|
||||||
message, _ = models.CreateBaseEntryWebSocketMessage(base)
|
message, _ = models.CreateBaseEntryWebSocketMessage(base)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,6 +60,7 @@ func LoadExtensions() {
|
|||||||
})
|
})
|
||||||
|
|
||||||
controllers.InitExtensionsMap(ExtensionsMap)
|
controllers.InitExtensionsMap(ExtensionsMap)
|
||||||
|
api.InitExtensionsMap(ExtensionsMap)
|
||||||
}
|
}
|
||||||
|
|
||||||
func ConfigureBasenineServer(host string, port string, dbSize int64, logLevel logging.Level, insertionFilter string) {
|
func ConfigureBasenineServer(host string, port string, dbSize int64, logLevel logging.Level, insertionFilter string) {
|
||||||
|
@ -77,7 +77,8 @@ func GetEntries(c *gin.Context) {
|
|||||||
return // exit
|
return // exit
|
||||||
}
|
}
|
||||||
|
|
||||||
base := tapApi.Summarize(entry)
|
extension := extensionsMap[entry.Protocol.Name]
|
||||||
|
base := extension.Dissector.Summarize(entry)
|
||||||
|
|
||||||
dataSlice = append(dataSlice, base)
|
dataSlice = append(dataSlice, base)
|
||||||
}
|
}
|
||||||
@ -123,6 +124,7 @@ func GetEntry(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
extension := extensionsMap[entry.Protocol.Name]
|
extension := extensionsMap[entry.Protocol.Name]
|
||||||
|
base := extension.Dissector.Summarize(entry)
|
||||||
representation, bodySize, _ := extension.Dissector.Represent(entry.Request, entry.Response)
|
representation, bodySize, _ := extension.Dissector.Represent(entry.Request, entry.Response)
|
||||||
|
|
||||||
var rules []map[string]interface{}
|
var rules []map[string]interface{}
|
||||||
@ -142,6 +144,7 @@ func GetEntry(c *gin.Context) {
|
|||||||
Representation: string(representation),
|
Representation: string(representation),
|
||||||
BodySize: bodySize,
|
BodySize: bodySize,
|
||||||
Data: entry,
|
Data: entry,
|
||||||
|
Base: base,
|
||||||
Rules: rules,
|
Rules: rules,
|
||||||
IsRulesEnabled: isRulesEnabled,
|
IsRulesEnabled: isRulesEnabled,
|
||||||
})
|
})
|
||||||
|
@ -80,11 +80,7 @@ type httpEntry struct {
|
|||||||
CreatedAt time.Time `json:"createdAt"`
|
CreatedAt time.Time `json:"createdAt"`
|
||||||
Request map[string]interface{} `json:"request"`
|
Request map[string]interface{} `json:"request"`
|
||||||
Response map[string]interface{} `json:"response"`
|
Response map[string]interface{} `json:"response"`
|
||||||
Summary string `json:"summary"`
|
|
||||||
Method string `json:"method"`
|
|
||||||
Status int `json:"status"`
|
|
||||||
ElapsedTime int64 `json:"elapsedTime"`
|
ElapsedTime int64 `json:"elapsedTime"`
|
||||||
Path string `json:"path"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (client *client) PushEntry(entry *api.Entry) {
|
func (client *client) PushEntry(entry *api.Entry) {
|
||||||
@ -103,11 +99,7 @@ func (client *client) PushEntry(entry *api.Entry) {
|
|||||||
CreatedAt: entry.StartTime,
|
CreatedAt: entry.StartTime,
|
||||||
Request: entry.Request,
|
Request: entry.Request,
|
||||||
Response: entry.Response,
|
Response: entry.Response,
|
||||||
Summary: entry.Summary,
|
|
||||||
Method: entry.Method,
|
|
||||||
Status: entry.Status,
|
|
||||||
ElapsedTime: entry.ElapsedTime,
|
ElapsedTime: entry.ElapsedTime,
|
||||||
Path: entry.Path,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
entryJson, err := json.Marshal(entryToPush)
|
entryJson, err := json.Marshal(entryToPush)
|
||||||
|
@ -12,10 +12,6 @@ import (
|
|||||||
"github.com/up9inc/mizu/tap"
|
"github.com/up9inc/mizu/tap"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetEntry(r *tapApi.Entry, v tapApi.DataUnmarshaler) error {
|
|
||||||
return v.UnmarshalData(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
type TapConfig struct {
|
type TapConfig struct {
|
||||||
TappedNamespaces map[string]bool `json:"tappedNamespaces"`
|
TappedNamespaces map[string]bool `json:"tappedNamespaces"`
|
||||||
}
|
}
|
||||||
|
@ -100,6 +100,7 @@ type Dissector interface {
|
|||||||
Ping()
|
Ping()
|
||||||
Dissect(b *bufio.Reader, isClient bool, tcpID *TcpID, counterPair *CounterPair, superTimer *SuperTimer, superIdentifier *SuperIdentifier, emitter Emitter, options *TrafficFilteringOptions, reqResMatcher RequestResponseMatcher) error
|
Dissect(b *bufio.Reader, isClient bool, tcpID *TcpID, counterPair *CounterPair, superTimer *SuperTimer, superIdentifier *SuperIdentifier, emitter Emitter, options *TrafficFilteringOptions, reqResMatcher RequestResponseMatcher) error
|
||||||
Analyze(item *OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *Entry
|
Analyze(item *OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *Entry
|
||||||
|
Summarize(entry *Entry) *BaseEntry
|
||||||
Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error)
|
Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error)
|
||||||
Macros() map[string]string
|
Macros() map[string]string
|
||||||
NewResponseRequestMatcher() RequestResponseMatcher
|
NewResponseRequestMatcher() RequestResponseMatcher
|
||||||
@ -135,12 +136,7 @@ type Entry struct {
|
|||||||
StartTime time.Time `json:"startTime"`
|
StartTime time.Time `json:"startTime"`
|
||||||
Request map[string]interface{} `json:"request"`
|
Request map[string]interface{} `json:"request"`
|
||||||
Response map[string]interface{} `json:"response"`
|
Response map[string]interface{} `json:"response"`
|
||||||
Summary string `json:"summary"`
|
|
||||||
Method string `json:"method"`
|
|
||||||
Status int `json:"status"`
|
|
||||||
ElapsedTime int64 `json:"elapsedTime"`
|
ElapsedTime int64 `json:"elapsedTime"`
|
||||||
Path string `json:"path"`
|
|
||||||
IsOutgoing bool `json:"isOutgoing,omitempty"`
|
|
||||||
Rules ApplicableRules `json:"rules,omitempty"`
|
Rules ApplicableRules `json:"rules,omitempty"`
|
||||||
ContractStatus ContractStatus `json:"contractStatus,omitempty"`
|
ContractStatus ContractStatus `json:"contractStatus,omitempty"`
|
||||||
ContractRequestReason string `json:"contractRequestReason,omitempty"`
|
ContractRequestReason string `json:"contractRequestReason,omitempty"`
|
||||||
@ -154,6 +150,7 @@ type EntryWrapper struct {
|
|||||||
Representation string `json:"representation"`
|
Representation string `json:"representation"`
|
||||||
BodySize int64 `json:"bodySize"`
|
BodySize int64 `json:"bodySize"`
|
||||||
Data *Entry `json:"data"`
|
Data *Entry `json:"data"`
|
||||||
|
Base *BaseEntry `json:"base"`
|
||||||
Rules []map[string]interface{} `json:"rulesMatched,omitempty"`
|
Rules []map[string]interface{} `json:"rulesMatched,omitempty"`
|
||||||
IsRulesEnabled bool `json:"isRulesEnabled"`
|
IsRulesEnabled bool `json:"isRulesEnabled"`
|
||||||
}
|
}
|
||||||
@ -161,11 +158,12 @@ type EntryWrapper struct {
|
|||||||
type BaseEntry struct {
|
type BaseEntry struct {
|
||||||
Id uint `json:"id"`
|
Id uint `json:"id"`
|
||||||
Protocol Protocol `json:"proto,omitempty"`
|
Protocol Protocol `json:"proto,omitempty"`
|
||||||
Url string `json:"url,omitempty"`
|
|
||||||
Path string `json:"path,omitempty"`
|
|
||||||
Summary string `json:"summary,omitempty"`
|
Summary string `json:"summary,omitempty"`
|
||||||
StatusCode int `json:"status"`
|
SummaryQuery string `json:"summaryQuery,omitempty"`
|
||||||
|
Status int `json:"status"`
|
||||||
|
StatusQuery string `json:"statusQuery"`
|
||||||
Method string `json:"method,omitempty"`
|
Method string `json:"method,omitempty"`
|
||||||
|
MethodQuery string `json:"methodQuery,omitempty"`
|
||||||
Timestamp int64 `json:"timestamp,omitempty"`
|
Timestamp int64 `json:"timestamp,omitempty"`
|
||||||
Source *TCP `json:"src"`
|
Source *TCP `json:"src"`
|
||||||
Destination *TCP `json:"dst"`
|
Destination *TCP `json:"dst"`
|
||||||
@ -190,44 +188,6 @@ type Contract struct {
|
|||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func Summarize(entry *Entry) *BaseEntry {
|
|
||||||
return &BaseEntry{
|
|
||||||
Id: entry.Id,
|
|
||||||
Protocol: entry.Protocol,
|
|
||||||
Path: entry.Path,
|
|
||||||
Summary: entry.Summary,
|
|
||||||
StatusCode: entry.Status,
|
|
||||||
Method: entry.Method,
|
|
||||||
Timestamp: entry.Timestamp,
|
|
||||||
Source: entry.Source,
|
|
||||||
Destination: entry.Destination,
|
|
||||||
IsOutgoing: entry.IsOutgoing,
|
|
||||||
Latency: entry.ElapsedTime,
|
|
||||||
Rules: entry.Rules,
|
|
||||||
ContractStatus: entry.ContractStatus,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type DataUnmarshaler interface {
|
|
||||||
UnmarshalData(*Entry) error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (bed *BaseEntry) UnmarshalData(entry *Entry) error {
|
|
||||||
bed.Protocol = entry.Protocol
|
|
||||||
bed.Id = entry.Id
|
|
||||||
bed.Path = entry.Path
|
|
||||||
bed.Summary = entry.Summary
|
|
||||||
bed.StatusCode = entry.Status
|
|
||||||
bed.Method = entry.Method
|
|
||||||
bed.Timestamp = entry.Timestamp
|
|
||||||
bed.Source = entry.Source
|
|
||||||
bed.Destination = entry.Destination
|
|
||||||
bed.IsOutgoing = entry.IsOutgoing
|
|
||||||
bed.Latency = entry.ElapsedTime
|
|
||||||
bed.ContractStatus = entry.ContractStatus
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
TABLE string = "table"
|
TABLE string = "table"
|
||||||
BODY string = "body"
|
BODY string = "body"
|
||||||
|
@ -13,4 +13,4 @@ test-pull-bin:
|
|||||||
|
|
||||||
test-pull-expect:
|
test-pull-expect:
|
||||||
@mkdir -p expect
|
@mkdir -p expect
|
||||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect/amqp/\* expect
|
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/amqp/\* expect
|
||||||
|
@ -219,31 +219,6 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
request := item.Pair.Request.Payload.(map[string]interface{})
|
request := item.Pair.Request.Payload.(map[string]interface{})
|
||||||
reqDetails := request["details"].(map[string]interface{})
|
reqDetails := request["details"].(map[string]interface{})
|
||||||
|
|
||||||
summary := ""
|
|
||||||
switch request["method"] {
|
|
||||||
case basicMethodMap[40]:
|
|
||||||
summary = reqDetails["exchange"].(string)
|
|
||||||
case basicMethodMap[60]:
|
|
||||||
summary = reqDetails["exchange"].(string)
|
|
||||||
case exchangeMethodMap[10]:
|
|
||||||
summary = reqDetails["exchange"].(string)
|
|
||||||
case queueMethodMap[10]:
|
|
||||||
summary = reqDetails["queue"].(string)
|
|
||||||
case connectionMethodMap[10]:
|
|
||||||
summary = fmt.Sprintf(
|
|
||||||
"%s.%s",
|
|
||||||
strconv.Itoa(int(reqDetails["versionMajor"].(float64))),
|
|
||||||
strconv.Itoa(int(reqDetails["versionMinor"].(float64))),
|
|
||||||
)
|
|
||||||
case connectionMethodMap[50]:
|
|
||||||
summary = reqDetails["replyText"].(string)
|
|
||||||
case queueMethodMap[20]:
|
|
||||||
summary = reqDetails["queue"].(string)
|
|
||||||
case basicMethodMap[20]:
|
|
||||||
summary = reqDetails["queue"].(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
request["url"] = summary
|
|
||||||
reqDetails["method"] = request["method"]
|
reqDetails["method"] = request["method"]
|
||||||
return &api.Entry{
|
return &api.Entry{
|
||||||
Protocol: protocol,
|
Protocol: protocol,
|
||||||
@ -260,17 +235,70 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
Namespace: namespace,
|
Namespace: namespace,
|
||||||
Outgoing: item.ConnectionInfo.IsOutgoing,
|
Outgoing: item.ConnectionInfo.IsOutgoing,
|
||||||
Request: reqDetails,
|
Request: reqDetails,
|
||||||
Method: request["method"].(string),
|
|
||||||
Status: 0,
|
|
||||||
Timestamp: item.Timestamp,
|
Timestamp: item.Timestamp,
|
||||||
StartTime: item.Pair.Request.CaptureTime,
|
StartTime: item.Pair.Request.CaptureTime,
|
||||||
ElapsedTime: 0,
|
ElapsedTime: 0,
|
||||||
Summary: summary,
|
|
||||||
IsOutgoing: item.ConnectionInfo.IsOutgoing,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||||
|
summary := ""
|
||||||
|
summaryQuery := ""
|
||||||
|
method := entry.Request["method"].(string)
|
||||||
|
methodQuery := fmt.Sprintf(`request.method == "%s"`, method)
|
||||||
|
switch method {
|
||||||
|
case basicMethodMap[40]:
|
||||||
|
summary = entry.Request["exchange"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.exchange == "%s"`, summary)
|
||||||
|
case basicMethodMap[60]:
|
||||||
|
summary = entry.Request["exchange"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.exchange == "%s"`, summary)
|
||||||
|
case exchangeMethodMap[10]:
|
||||||
|
summary = entry.Request["exchange"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.exchange == "%s"`, summary)
|
||||||
|
case queueMethodMap[10]:
|
||||||
|
summary = entry.Request["queue"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.queue == "%s"`, summary)
|
||||||
|
case connectionMethodMap[10]:
|
||||||
|
versionMajor := int(entry.Request["versionMajor"].(float64))
|
||||||
|
versionMinor := int(entry.Request["versionMinor"].(float64))
|
||||||
|
summary = fmt.Sprintf(
|
||||||
|
"%s.%s",
|
||||||
|
strconv.Itoa(versionMajor),
|
||||||
|
strconv.Itoa(versionMinor),
|
||||||
|
)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.versionMajor == %d and request.versionMinor == %d`, versionMajor, versionMinor)
|
||||||
|
case connectionMethodMap[50]:
|
||||||
|
summary = entry.Request["replyText"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.replyText == "%s"`, summary)
|
||||||
|
case queueMethodMap[20]:
|
||||||
|
summary = entry.Request["queue"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.queue == "%s"`, summary)
|
||||||
|
case basicMethodMap[20]:
|
||||||
|
summary = entry.Request["queue"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.queue == "%s"`, summary)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &api.BaseEntry{
|
||||||
|
Id: entry.Id,
|
||||||
|
Protocol: entry.Protocol,
|
||||||
|
Summary: summary,
|
||||||
|
SummaryQuery: summaryQuery,
|
||||||
|
Status: 0,
|
||||||
|
StatusQuery: "",
|
||||||
|
Method: method,
|
||||||
|
MethodQuery: methodQuery,
|
||||||
|
Timestamp: entry.Timestamp,
|
||||||
|
Source: entry.Source,
|
||||||
|
Destination: entry.Destination,
|
||||||
|
IsOutgoing: entry.Outgoing,
|
||||||
|
Latency: entry.ElapsedTime,
|
||||||
|
Rules: entry.Rules,
|
||||||
|
ContractStatus: entry.ContractStatus,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (d dissecting) Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error) {
|
func (d dissecting) Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error) {
|
||||||
bodySize = 0
|
bodySize = 0
|
||||||
representation := make(map[string]interface{})
|
representation := make(map[string]interface{})
|
||||||
|
@ -21,14 +21,16 @@ import (
|
|||||||
const (
|
const (
|
||||||
binDir = "bin"
|
binDir = "bin"
|
||||||
patternBin = "*_req.bin"
|
patternBin = "*_req.bin"
|
||||||
patternDissect = "*.json"
|
patternExpect = "*.json"
|
||||||
msgDissecting = "Dissecting:"
|
msgDissecting = "Dissecting:"
|
||||||
msgAnalyzing = "Analyzing:"
|
msgAnalyzing = "Analyzing:"
|
||||||
|
msgSummarizing = "Summarizing:"
|
||||||
msgRepresenting = "Representing:"
|
msgRepresenting = "Representing:"
|
||||||
respSuffix = "_res.bin"
|
respSuffix = "_res.bin"
|
||||||
expectDir = "expect"
|
expectDir = "expect"
|
||||||
dissectDir = "dissect"
|
dissectDir = "dissect"
|
||||||
analyzeDir = "analyze"
|
analyzeDir = "analyze"
|
||||||
|
summarizeDir = "summarize"
|
||||||
representDir = "represent"
|
representDir = "represent"
|
||||||
testUpdate = "TEST_UPDATE"
|
testUpdate = "TEST_UPDATE"
|
||||||
)
|
)
|
||||||
@ -186,7 +188,7 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -230,6 +232,63 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSummarize(t *testing.T) {
|
||||||
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
|
expectDirAnalyze := path.Join(expectDir, analyzeDir)
|
||||||
|
expectDirSummarize := path.Join(expectDir, summarizeDir)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
os.RemoveAll(expectDirSummarize)
|
||||||
|
err := os.MkdirAll(expectDirSummarize, 0775)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dissector := NewDissector()
|
||||||
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, _path := range paths {
|
||||||
|
fmt.Printf("%s %s\n", msgSummarizing, _path)
|
||||||
|
|
||||||
|
bytes, err := ioutil.ReadFile(_path)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var entries []*api.Entry
|
||||||
|
err = json.Unmarshal(bytes, &entries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var baseEntries []*api.BaseEntry
|
||||||
|
for _, entry := range entries {
|
||||||
|
baseEntry := dissector.Summarize(entry)
|
||||||
|
baseEntries = append(baseEntries, baseEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
|
||||||
|
|
||||||
|
marshaled, err := json.Marshal(baseEntries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
if len(baseEntries) > 0 {
|
||||||
|
err = os.WriteFile(pathExpect, marshaled, 0644)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
|
||||||
|
assert.Len(t, entries, 0)
|
||||||
|
} else {
|
||||||
|
expectedBytes, err := ioutil.ReadFile(pathExpect)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.JSONEq(t, string(expectedBytes), string(marshaled))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestRepresent(t *testing.T) {
|
func TestRepresent(t *testing.T) {
|
||||||
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
@ -243,7 +302,7 @@ func TestRepresent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -13,4 +13,4 @@ test-pull-bin:
|
|||||||
|
|
||||||
test-pull-expect:
|
test-pull-expect:
|
||||||
@mkdir -p expect
|
@mkdir -p expect
|
||||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect2/http/\* expect
|
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/http/\* expect
|
||||||
|
@ -231,7 +231,6 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
reqDetails["targetUri"] = reqDetails["url"]
|
reqDetails["targetUri"] = reqDetails["url"]
|
||||||
reqDetails["path"] = path
|
reqDetails["path"] = path
|
||||||
reqDetails["pathSegments"] = strings.Split(path, "/")[1:]
|
reqDetails["pathSegments"] = strings.Split(path, "/")[1:]
|
||||||
reqDetails["summary"] = path
|
|
||||||
|
|
||||||
// Rearrange the maps for the querying
|
// Rearrange the maps for the querying
|
||||||
reqDetails["_headers"] = reqDetails["headers"]
|
reqDetails["_headers"] = reqDetails["headers"]
|
||||||
@ -248,17 +247,11 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
reqDetails["_queryStringMerged"] = mapSliceMergeRepeatedKeys(reqDetails["_queryString"].([]interface{}))
|
reqDetails["_queryStringMerged"] = mapSliceMergeRepeatedKeys(reqDetails["_queryString"].([]interface{}))
|
||||||
reqDetails["queryString"] = mapSliceRebuildAsMap(reqDetails["_queryStringMerged"].([]interface{}))
|
reqDetails["queryString"] = mapSliceRebuildAsMap(reqDetails["_queryStringMerged"].([]interface{}))
|
||||||
|
|
||||||
method := reqDetails["method"].(string)
|
|
||||||
statusCode := int(resDetails["status"].(float64))
|
statusCode := int(resDetails["status"].(float64))
|
||||||
if item.Protocol.Abbreviation == "gRPC" {
|
if item.Protocol.Abbreviation == "gRPC" {
|
||||||
resDetails["statusText"] = grpcStatusCodes[statusCode]
|
resDetails["statusText"] = grpcStatusCodes[statusCode]
|
||||||
}
|
}
|
||||||
|
|
||||||
if item.Protocol.Version == "2.0" && !isRequestUpgradedH2C {
|
|
||||||
reqDetails["url"] = path
|
|
||||||
request["url"] = path
|
|
||||||
}
|
|
||||||
|
|
||||||
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
|
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
|
||||||
if elapsedTime < 0 {
|
if elapsedTime < 0 {
|
||||||
elapsedTime = 0
|
elapsedTime = 0
|
||||||
@ -280,17 +273,40 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
Outgoing: item.ConnectionInfo.IsOutgoing,
|
Outgoing: item.ConnectionInfo.IsOutgoing,
|
||||||
Request: reqDetails,
|
Request: reqDetails,
|
||||||
Response: resDetails,
|
Response: resDetails,
|
||||||
Method: method,
|
|
||||||
Status: statusCode,
|
|
||||||
Timestamp: item.Timestamp,
|
Timestamp: item.Timestamp,
|
||||||
StartTime: item.Pair.Request.CaptureTime,
|
StartTime: item.Pair.Request.CaptureTime,
|
||||||
ElapsedTime: elapsedTime,
|
ElapsedTime: elapsedTime,
|
||||||
Summary: path,
|
|
||||||
IsOutgoing: item.ConnectionInfo.IsOutgoing,
|
|
||||||
HTTPPair: string(httpPair),
|
HTTPPair: string(httpPair),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||||
|
summary := entry.Request["path"].(string)
|
||||||
|
summaryQuery := fmt.Sprintf(`request.path == "%s"`, summary)
|
||||||
|
method := entry.Request["method"].(string)
|
||||||
|
methodQuery := fmt.Sprintf(`request.method == "%s"`, method)
|
||||||
|
status := int(entry.Response["status"].(float64))
|
||||||
|
statusQuery := fmt.Sprintf(`response.status == %d`, status)
|
||||||
|
|
||||||
|
return &api.BaseEntry{
|
||||||
|
Id: entry.Id,
|
||||||
|
Protocol: entry.Protocol,
|
||||||
|
Summary: summary,
|
||||||
|
SummaryQuery: summaryQuery,
|
||||||
|
Status: status,
|
||||||
|
StatusQuery: statusQuery,
|
||||||
|
Method: method,
|
||||||
|
MethodQuery: methodQuery,
|
||||||
|
Timestamp: entry.Timestamp,
|
||||||
|
Source: entry.Source,
|
||||||
|
Destination: entry.Destination,
|
||||||
|
IsOutgoing: entry.Outgoing,
|
||||||
|
Latency: entry.ElapsedTime,
|
||||||
|
Rules: entry.Rules,
|
||||||
|
ContractStatus: entry.ContractStatus,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func representRequest(request map[string]interface{}) (repRequest []interface{}) {
|
func representRequest(request map[string]interface{}) (repRequest []interface{}) {
|
||||||
details, _ := json.Marshal([]api.TableData{
|
details, _ := json.Marshal([]api.TableData{
|
||||||
{
|
{
|
||||||
|
@ -21,14 +21,16 @@ import (
|
|||||||
const (
|
const (
|
||||||
binDir = "bin"
|
binDir = "bin"
|
||||||
patternBin = "*_req.bin"
|
patternBin = "*_req.bin"
|
||||||
patternDissect = "*.json"
|
patternExpect = "*.json"
|
||||||
msgDissecting = "Dissecting:"
|
msgDissecting = "Dissecting:"
|
||||||
msgAnalyzing = "Analyzing:"
|
msgAnalyzing = "Analyzing:"
|
||||||
|
msgSummarizing = "Summarizing:"
|
||||||
msgRepresenting = "Representing:"
|
msgRepresenting = "Representing:"
|
||||||
respSuffix = "_res.bin"
|
respSuffix = "_res.bin"
|
||||||
expectDir = "expect"
|
expectDir = "expect"
|
||||||
dissectDir = "dissect"
|
dissectDir = "dissect"
|
||||||
analyzeDir = "analyze"
|
analyzeDir = "analyze"
|
||||||
|
summarizeDir = "summarize"
|
||||||
representDir = "represent"
|
representDir = "represent"
|
||||||
testUpdate = "TEST_UPDATE"
|
testUpdate = "TEST_UPDATE"
|
||||||
)
|
)
|
||||||
@ -188,7 +190,7 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -232,6 +234,63 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSummarize(t *testing.T) {
|
||||||
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
|
expectDirAnalyze := path.Join(expectDir, analyzeDir)
|
||||||
|
expectDirSummarize := path.Join(expectDir, summarizeDir)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
os.RemoveAll(expectDirSummarize)
|
||||||
|
err := os.MkdirAll(expectDirSummarize, 0775)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dissector := NewDissector()
|
||||||
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, _path := range paths {
|
||||||
|
fmt.Printf("%s %s\n", msgSummarizing, _path)
|
||||||
|
|
||||||
|
bytes, err := ioutil.ReadFile(_path)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var entries []*api.Entry
|
||||||
|
err = json.Unmarshal(bytes, &entries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var baseEntries []*api.BaseEntry
|
||||||
|
for _, entry := range entries {
|
||||||
|
baseEntry := dissector.Summarize(entry)
|
||||||
|
baseEntries = append(baseEntries, baseEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
|
||||||
|
|
||||||
|
marshaled, err := json.Marshal(baseEntries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
if len(baseEntries) > 0 {
|
||||||
|
err = os.WriteFile(pathExpect, marshaled, 0644)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
|
||||||
|
assert.Len(t, entries, 0)
|
||||||
|
} else {
|
||||||
|
expectedBytes, err := ioutil.ReadFile(pathExpect)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.JSONEq(t, string(expectedBytes), string(marshaled))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestRepresent(t *testing.T) {
|
func TestRepresent(t *testing.T) {
|
||||||
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
@ -245,7 +304,7 @@ func TestRepresent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -13,4 +13,4 @@ test-pull-bin:
|
|||||||
|
|
||||||
test-pull-expect:
|
test-pull-expect:
|
||||||
@mkdir -p expect
|
@mkdir -p expect
|
||||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect/kafka/\* expect
|
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/kafka/\* expect
|
||||||
|
@ -61,83 +61,7 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
|
|||||||
func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *api.Entry {
|
func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *api.Entry {
|
||||||
request := item.Pair.Request.Payload.(map[string]interface{})
|
request := item.Pair.Request.Payload.(map[string]interface{})
|
||||||
reqDetails := request["details"].(map[string]interface{})
|
reqDetails := request["details"].(map[string]interface{})
|
||||||
apiKey := ApiKey(reqDetails["apiKey"].(float64))
|
|
||||||
|
|
||||||
summary := ""
|
|
||||||
switch apiKey {
|
|
||||||
case Metadata:
|
|
||||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
|
||||||
if _topics == nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
topics := _topics.([]interface{})
|
|
||||||
for _, topic := range topics {
|
|
||||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
|
||||||
}
|
|
||||||
if len(summary) > 0 {
|
|
||||||
summary = summary[:len(summary)-2]
|
|
||||||
}
|
|
||||||
case ApiVersions:
|
|
||||||
summary = reqDetails["clientID"].(string)
|
|
||||||
case Produce:
|
|
||||||
_topics := reqDetails["payload"].(map[string]interface{})["topicData"]
|
|
||||||
if _topics == nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
topics := _topics.([]interface{})
|
|
||||||
for _, topic := range topics {
|
|
||||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
|
||||||
}
|
|
||||||
if len(summary) > 0 {
|
|
||||||
summary = summary[:len(summary)-2]
|
|
||||||
}
|
|
||||||
case Fetch:
|
|
||||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
|
||||||
if _topics == nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
topics := _topics.([]interface{})
|
|
||||||
for _, topic := range topics {
|
|
||||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
|
||||||
}
|
|
||||||
if len(summary) > 0 {
|
|
||||||
summary = summary[:len(summary)-2]
|
|
||||||
}
|
|
||||||
case ListOffsets:
|
|
||||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
|
||||||
if _topics == nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
topics := _topics.([]interface{})
|
|
||||||
for _, topic := range topics {
|
|
||||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
|
||||||
}
|
|
||||||
if len(summary) > 0 {
|
|
||||||
summary = summary[:len(summary)-2]
|
|
||||||
}
|
|
||||||
case CreateTopics:
|
|
||||||
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
|
|
||||||
if _topics == nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
topics := _topics.([]interface{})
|
|
||||||
for _, topic := range topics {
|
|
||||||
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
|
||||||
}
|
|
||||||
if len(summary) > 0 {
|
|
||||||
summary = summary[:len(summary)-2]
|
|
||||||
}
|
|
||||||
case DeleteTopics:
|
|
||||||
if reqDetails["topicNames"] == nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
topicNames := reqDetails["topicNames"].([]string)
|
|
||||||
for _, name := range topicNames {
|
|
||||||
summary += fmt.Sprintf("%s, ", name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
request["url"] = summary
|
|
||||||
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
|
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
|
||||||
if elapsedTime < 0 {
|
if elapsedTime < 0 {
|
||||||
elapsedTime = 0
|
elapsedTime = 0
|
||||||
@ -158,13 +82,127 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
Outgoing: item.ConnectionInfo.IsOutgoing,
|
Outgoing: item.ConnectionInfo.IsOutgoing,
|
||||||
Request: reqDetails,
|
Request: reqDetails,
|
||||||
Response: item.Pair.Response.Payload.(map[string]interface{})["details"].(map[string]interface{}),
|
Response: item.Pair.Response.Payload.(map[string]interface{})["details"].(map[string]interface{}),
|
||||||
Method: apiNames[apiKey],
|
|
||||||
Status: 0,
|
|
||||||
Timestamp: item.Timestamp,
|
Timestamp: item.Timestamp,
|
||||||
StartTime: item.Pair.Request.CaptureTime,
|
StartTime: item.Pair.Request.CaptureTime,
|
||||||
ElapsedTime: elapsedTime,
|
ElapsedTime: elapsedTime,
|
||||||
Summary: summary,
|
}
|
||||||
IsOutgoing: item.ConnectionInfo.IsOutgoing,
|
}
|
||||||
|
|
||||||
|
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||||
|
status := 0
|
||||||
|
statusQuery := ""
|
||||||
|
|
||||||
|
apiKey := ApiKey(entry.Request["apiKey"].(float64))
|
||||||
|
method := apiNames[apiKey]
|
||||||
|
methodQuery := fmt.Sprintf("request.apiKey == %d", int(entry.Request["apiKey"].(float64)))
|
||||||
|
|
||||||
|
summary := ""
|
||||||
|
summaryQuery := ""
|
||||||
|
switch apiKey {
|
||||||
|
case Metadata:
|
||||||
|
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||||
|
if _topics == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
topics := _topics.([]interface{})
|
||||||
|
for i, topic := range topics {
|
||||||
|
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||||
|
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
|
||||||
|
}
|
||||||
|
if len(summary) > 0 {
|
||||||
|
summary = summary[:len(summary)-2]
|
||||||
|
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||||
|
}
|
||||||
|
case ApiVersions:
|
||||||
|
summary = entry.Request["clientID"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.clientID == "%s"`, summary)
|
||||||
|
case Produce:
|
||||||
|
_topics := entry.Request["payload"].(map[string]interface{})["topicData"]
|
||||||
|
if _topics == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
topics := _topics.([]interface{})
|
||||||
|
for i, topic := range topics {
|
||||||
|
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
||||||
|
summaryQuery += fmt.Sprintf(`request.payload.topicData[%d].topic == "%s" and`, i, summary)
|
||||||
|
}
|
||||||
|
if len(summary) > 0 {
|
||||||
|
summary = summary[:len(summary)-2]
|
||||||
|
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||||
|
}
|
||||||
|
case Fetch:
|
||||||
|
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||||
|
if _topics == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
topics := _topics.([]interface{})
|
||||||
|
for i, topic := range topics {
|
||||||
|
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
|
||||||
|
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].topic == "%s" and`, i, summary)
|
||||||
|
}
|
||||||
|
if len(summary) > 0 {
|
||||||
|
summary = summary[:len(summary)-2]
|
||||||
|
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||||
|
}
|
||||||
|
case ListOffsets:
|
||||||
|
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||||
|
if _topics == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
topics := _topics.([]interface{})
|
||||||
|
for i, topic := range topics {
|
||||||
|
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||||
|
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
|
||||||
|
}
|
||||||
|
if len(summary) > 0 {
|
||||||
|
summary = summary[:len(summary)-2]
|
||||||
|
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||||
|
}
|
||||||
|
case CreateTopics:
|
||||||
|
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
|
||||||
|
if _topics == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
topics := _topics.([]interface{})
|
||||||
|
for i, topic := range topics {
|
||||||
|
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
|
||||||
|
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
|
||||||
|
}
|
||||||
|
if len(summary) > 0 {
|
||||||
|
summary = summary[:len(summary)-2]
|
||||||
|
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||||
|
}
|
||||||
|
case DeleteTopics:
|
||||||
|
if entry.Request["topicNames"] == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
topicNames := entry.Request["topicNames"].([]string)
|
||||||
|
for i, name := range topicNames {
|
||||||
|
summary += fmt.Sprintf("%s, ", name)
|
||||||
|
summaryQuery += fmt.Sprintf(`request.topicNames[%d] == "%s" and`, i, summary)
|
||||||
|
}
|
||||||
|
if len(summary) > 0 {
|
||||||
|
summary = summary[:len(summary)-2]
|
||||||
|
summaryQuery = summaryQuery[:len(summaryQuery)-4]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &api.BaseEntry{
|
||||||
|
Id: entry.Id,
|
||||||
|
Protocol: entry.Protocol,
|
||||||
|
Summary: summary,
|
||||||
|
SummaryQuery: summaryQuery,
|
||||||
|
Status: status,
|
||||||
|
StatusQuery: statusQuery,
|
||||||
|
Method: method,
|
||||||
|
MethodQuery: methodQuery,
|
||||||
|
Timestamp: entry.Timestamp,
|
||||||
|
Source: entry.Source,
|
||||||
|
Destination: entry.Destination,
|
||||||
|
IsOutgoing: entry.Outgoing,
|
||||||
|
Latency: entry.ElapsedTime,
|
||||||
|
Rules: entry.Rules,
|
||||||
|
ContractStatus: entry.ContractStatus,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,14 +21,16 @@ import (
|
|||||||
const (
|
const (
|
||||||
binDir = "bin"
|
binDir = "bin"
|
||||||
patternBin = "*_req.bin"
|
patternBin = "*_req.bin"
|
||||||
patternDissect = "*.json"
|
patternExpect = "*.json"
|
||||||
msgDissecting = "Dissecting:"
|
msgDissecting = "Dissecting:"
|
||||||
msgAnalyzing = "Analyzing:"
|
msgAnalyzing = "Analyzing:"
|
||||||
|
msgSummarizing = "Summarizing:"
|
||||||
msgRepresenting = "Representing:"
|
msgRepresenting = "Representing:"
|
||||||
respSuffix = "_res.bin"
|
respSuffix = "_res.bin"
|
||||||
expectDir = "expect"
|
expectDir = "expect"
|
||||||
dissectDir = "dissect"
|
dissectDir = "dissect"
|
||||||
analyzeDir = "analyze"
|
analyzeDir = "analyze"
|
||||||
|
summarizeDir = "summarize"
|
||||||
representDir = "represent"
|
representDir = "represent"
|
||||||
testUpdate = "TEST_UPDATE"
|
testUpdate = "TEST_UPDATE"
|
||||||
)
|
)
|
||||||
@ -187,7 +189,7 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -231,6 +233,63 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSummarize(t *testing.T) {
|
||||||
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
|
expectDirAnalyze := path.Join(expectDir, analyzeDir)
|
||||||
|
expectDirSummarize := path.Join(expectDir, summarizeDir)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
os.RemoveAll(expectDirSummarize)
|
||||||
|
err := os.MkdirAll(expectDirSummarize, 0775)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dissector := NewDissector()
|
||||||
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, _path := range paths {
|
||||||
|
fmt.Printf("%s %s\n", msgSummarizing, _path)
|
||||||
|
|
||||||
|
bytes, err := ioutil.ReadFile(_path)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var entries []*api.Entry
|
||||||
|
err = json.Unmarshal(bytes, &entries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var baseEntries []*api.BaseEntry
|
||||||
|
for _, entry := range entries {
|
||||||
|
baseEntry := dissector.Summarize(entry)
|
||||||
|
baseEntries = append(baseEntries, baseEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
|
||||||
|
|
||||||
|
marshaled, err := json.Marshal(baseEntries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
if len(baseEntries) > 0 {
|
||||||
|
err = os.WriteFile(pathExpect, marshaled, 0644)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
|
||||||
|
assert.Len(t, entries, 0)
|
||||||
|
} else {
|
||||||
|
expectedBytes, err := ioutil.ReadFile(pathExpect)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.JSONEq(t, string(expectedBytes), string(marshaled))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestRepresent(t *testing.T) {
|
func TestRepresent(t *testing.T) {
|
||||||
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
@ -244,7 +303,7 @@ func TestRepresent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -13,4 +13,4 @@ test-pull-bin:
|
|||||||
|
|
||||||
test-pull-expect:
|
test-pull-expect:
|
||||||
@mkdir -p expect
|
@mkdir -p expect
|
||||||
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect/redis/\* expect
|
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/redis/\* expect
|
||||||
|
@ -65,17 +65,6 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
reqDetails := request["details"].(map[string]interface{})
|
reqDetails := request["details"].(map[string]interface{})
|
||||||
resDetails := response["details"].(map[string]interface{})
|
resDetails := response["details"].(map[string]interface{})
|
||||||
|
|
||||||
method := ""
|
|
||||||
if reqDetails["command"] != nil {
|
|
||||||
method = reqDetails["command"].(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
summary := ""
|
|
||||||
if reqDetails["key"] != nil {
|
|
||||||
summary = reqDetails["key"].(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
request["url"] = summary
|
|
||||||
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
|
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
|
||||||
if elapsedTime < 0 {
|
if elapsedTime < 0 {
|
||||||
elapsedTime = 0
|
elapsedTime = 0
|
||||||
@ -96,17 +85,50 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
|
|||||||
Outgoing: item.ConnectionInfo.IsOutgoing,
|
Outgoing: item.ConnectionInfo.IsOutgoing,
|
||||||
Request: reqDetails,
|
Request: reqDetails,
|
||||||
Response: resDetails,
|
Response: resDetails,
|
||||||
Method: method,
|
|
||||||
Status: 0,
|
|
||||||
Timestamp: item.Timestamp,
|
Timestamp: item.Timestamp,
|
||||||
StartTime: item.Pair.Request.CaptureTime,
|
StartTime: item.Pair.Request.CaptureTime,
|
||||||
ElapsedTime: elapsedTime,
|
ElapsedTime: elapsedTime,
|
||||||
Summary: summary,
|
|
||||||
IsOutgoing: item.ConnectionInfo.IsOutgoing,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
|
||||||
|
status := 0
|
||||||
|
statusQuery := ""
|
||||||
|
|
||||||
|
method := ""
|
||||||
|
methodQuery := ""
|
||||||
|
if entry.Request["command"] != nil {
|
||||||
|
method = entry.Request["command"].(string)
|
||||||
|
methodQuery = fmt.Sprintf(`request.command == "%s"`, method)
|
||||||
|
}
|
||||||
|
|
||||||
|
summary := ""
|
||||||
|
summaryQuery := ""
|
||||||
|
if entry.Request["key"] != nil {
|
||||||
|
summary = entry.Request["key"].(string)
|
||||||
|
summaryQuery = fmt.Sprintf(`request.key == "%s"`, summary)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &api.BaseEntry{
|
||||||
|
Id: entry.Id,
|
||||||
|
Protocol: entry.Protocol,
|
||||||
|
Summary: summary,
|
||||||
|
SummaryQuery: summaryQuery,
|
||||||
|
Status: status,
|
||||||
|
StatusQuery: statusQuery,
|
||||||
|
Method: method,
|
||||||
|
MethodQuery: methodQuery,
|
||||||
|
Timestamp: entry.Timestamp,
|
||||||
|
Source: entry.Source,
|
||||||
|
Destination: entry.Destination,
|
||||||
|
IsOutgoing: entry.Outgoing,
|
||||||
|
Latency: entry.ElapsedTime,
|
||||||
|
Rules: entry.Rules,
|
||||||
|
ContractStatus: entry.ContractStatus,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (d dissecting) Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error) {
|
func (d dissecting) Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error) {
|
||||||
bodySize = 0
|
bodySize = 0
|
||||||
representation := make(map[string]interface{})
|
representation := make(map[string]interface{})
|
||||||
|
@ -22,14 +22,16 @@ import (
|
|||||||
const (
|
const (
|
||||||
binDir = "bin"
|
binDir = "bin"
|
||||||
patternBin = "*_req.bin"
|
patternBin = "*_req.bin"
|
||||||
patternDissect = "*.json"
|
patternExpect = "*.json"
|
||||||
msgDissecting = "Dissecting:"
|
msgDissecting = "Dissecting:"
|
||||||
msgAnalyzing = "Analyzing:"
|
msgAnalyzing = "Analyzing:"
|
||||||
|
msgSummarizing = "Summarizing:"
|
||||||
msgRepresenting = "Representing:"
|
msgRepresenting = "Representing:"
|
||||||
respSuffix = "_res.bin"
|
respSuffix = "_res.bin"
|
||||||
expectDir = "expect"
|
expectDir = "expect"
|
||||||
dissectDir = "dissect"
|
dissectDir = "dissect"
|
||||||
analyzeDir = "analyze"
|
analyzeDir = "analyze"
|
||||||
|
summarizeDir = "summarize"
|
||||||
representDir = "represent"
|
representDir = "represent"
|
||||||
testUpdate = "TEST_UPDATE"
|
testUpdate = "TEST_UPDATE"
|
||||||
)
|
)
|
||||||
@ -187,7 +189,7 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -231,6 +233,63 @@ func TestAnalyze(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSummarize(t *testing.T) {
|
||||||
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
|
expectDirAnalyze := path.Join(expectDir, analyzeDir)
|
||||||
|
expectDirSummarize := path.Join(expectDir, summarizeDir)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
os.RemoveAll(expectDirSummarize)
|
||||||
|
err := os.MkdirAll(expectDirSummarize, 0775)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dissector := NewDissector()
|
||||||
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, _path := range paths {
|
||||||
|
fmt.Printf("%s %s\n", msgSummarizing, _path)
|
||||||
|
|
||||||
|
bytes, err := ioutil.ReadFile(_path)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var entries []*api.Entry
|
||||||
|
err = json.Unmarshal(bytes, &entries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
var baseEntries []*api.BaseEntry
|
||||||
|
for _, entry := range entries {
|
||||||
|
baseEntry := dissector.Summarize(entry)
|
||||||
|
baseEntries = append(baseEntries, baseEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
|
||||||
|
|
||||||
|
marshaled, err := json.Marshal(baseEntries)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
if testUpdateEnabled {
|
||||||
|
if len(baseEntries) > 0 {
|
||||||
|
err = os.WriteFile(pathExpect, marshaled, 0644)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
|
||||||
|
assert.Len(t, entries, 0)
|
||||||
|
} else {
|
||||||
|
expectedBytes, err := ioutil.ReadFile(pathExpect)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
assert.JSONEq(t, string(expectedBytes), string(marshaled))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestRepresent(t *testing.T) {
|
func TestRepresent(t *testing.T) {
|
||||||
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
_, testUpdateEnabled := os.LookupEnv(testUpdate)
|
||||||
|
|
||||||
@ -244,7 +303,7 @@ func TestRepresent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dissector := NewDissector()
|
dissector := NewDissector()
|
||||||
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
|
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,7 @@ export const EntryDetailed = () => {
|
|||||||
bodySize={entryData.bodySize}
|
bodySize={entryData.bodySize}
|
||||||
elapsedTime={entryData.data.elapsedTime}
|
elapsedTime={entryData.data.elapsedTime}
|
||||||
/>}
|
/>}
|
||||||
{entryData && <EntrySummary entry={entryData.data}/>}
|
{entryData && <EntrySummary entry={entryData.base}/>}
|
||||||
<>
|
<>
|
||||||
{entryData && <EntryViewer
|
{entryData && <EntryViewer
|
||||||
representation={entryData.representation}
|
representation={entryData.representation}
|
||||||
|
@ -25,9 +25,12 @@ interface TCPInterface {
|
|||||||
interface Entry {
|
interface Entry {
|
||||||
proto: ProtocolInterface,
|
proto: ProtocolInterface,
|
||||||
method?: string,
|
method?: string,
|
||||||
|
methodQuery?: string,
|
||||||
summary: string,
|
summary: string,
|
||||||
|
summaryQuery: string,
|
||||||
id: number,
|
id: number,
|
||||||
status?: number;
|
status?: number;
|
||||||
|
statusQuery?: string;
|
||||||
timestamp: Date;
|
timestamp: Date;
|
||||||
src: TCPInterface,
|
src: TCPInterface,
|
||||||
dst: TCPInterface,
|
dst: TCPInterface,
|
||||||
@ -152,10 +155,10 @@ export const EntryItem: React.FC<EntryProps> = ({entry, style, headingMode}) =>
|
|||||||
horizontal={false}
|
horizontal={false}
|
||||||
/> : null}
|
/> : null}
|
||||||
{isStatusCodeEnabled && <div>
|
{isStatusCodeEnabled && <div>
|
||||||
<StatusCode statusCode={entry.status}/>
|
<StatusCode statusCode={entry.status} statusQuery={entry.statusQuery}/>
|
||||||
</div>}
|
</div>}
|
||||||
<div className={styles.endpointServiceContainer} style={{paddingLeft: endpointServiceContainer}}>
|
<div className={styles.endpointServiceContainer} style={{paddingLeft: endpointServiceContainer}}>
|
||||||
<Summary method={entry.method} summary={entry.summary}/>
|
<Summary method={entry.method} methodQuery={entry.methodQuery} summary={entry.summary} summaryQuery={entry.summaryQuery}/>
|
||||||
<div className={styles.resolvedName}>
|
<div className={styles.resolvedName}>
|
||||||
<Queryable
|
<Queryable
|
||||||
query={`src.name == "${entry.src.name}"`}
|
query={`src.name == "${entry.src.name}"`}
|
||||||
|
@ -10,14 +10,15 @@ export enum StatusCodeClassification {
|
|||||||
|
|
||||||
interface EntryProps {
|
interface EntryProps {
|
||||||
statusCode: number
|
statusCode: number
|
||||||
|
statusQuery: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const StatusCode: React.FC<EntryProps> = ({statusCode}) => {
|
const StatusCode: React.FC<EntryProps> = ({statusCode, statusQuery}) => {
|
||||||
|
|
||||||
const classification = getClassification(statusCode)
|
const classification = getClassification(statusCode)
|
||||||
|
|
||||||
return <Queryable
|
return <Queryable
|
||||||
query={`response.status == ${statusCode}`}
|
query={statusQuery}
|
||||||
displayIconOnMouseOver={true}
|
displayIconOnMouseOver={true}
|
||||||
flipped={true}
|
flipped={true}
|
||||||
iconStyle={{marginTop: "40px", paddingLeft: "10px"}}
|
iconStyle={{marginTop: "40px", paddingLeft: "10px"}}
|
||||||
|
@ -5,14 +5,16 @@ import Queryable from "./Queryable";
|
|||||||
|
|
||||||
interface SummaryProps {
|
interface SummaryProps {
|
||||||
method: string
|
method: string
|
||||||
|
methodQuery: string
|
||||||
summary: string
|
summary: string
|
||||||
|
summaryQuery: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export const Summary: React.FC<SummaryProps> = ({method, summary}) => {
|
export const Summary: React.FC<SummaryProps> = ({method, methodQuery, summary, summaryQuery}) => {
|
||||||
|
|
||||||
return <div className={styles.container}>
|
return <div className={styles.container}>
|
||||||
{method && <Queryable
|
{method && <Queryable
|
||||||
query={`method == "${method}"`}
|
query={methodQuery}
|
||||||
className={`${miscStyles.protocol} ${miscStyles.method}`}
|
className={`${miscStyles.protocol} ${miscStyles.method}`}
|
||||||
displayIconOnMouseOver={true}
|
displayIconOnMouseOver={true}
|
||||||
style={{whiteSpace: "nowrap"}}
|
style={{whiteSpace: "nowrap"}}
|
||||||
@ -24,7 +26,7 @@ export const Summary: React.FC<SummaryProps> = ({method, summary}) => {
|
|||||||
</span>
|
</span>
|
||||||
</Queryable>}
|
</Queryable>}
|
||||||
{summary && <Queryable
|
{summary && <Queryable
|
||||||
query={`summary == "${summary}"`}
|
query={summaryQuery}
|
||||||
displayIconOnMouseOver={true}
|
displayIconOnMouseOver={true}
|
||||||
flipped={true}
|
flipped={true}
|
||||||
iconStyle={{zIndex:"5",position:"relative",right:"14px"}}
|
iconStyle={{zIndex:"5",position:"relative",right:"14px"}}
|
||||||
|
Loading…
Reference in New Issue
Block a user