mirror of
https://github.com/kubeshark/kubeshark.git
synced 2025-09-29 14:26:18 +00:00
* Fix the OOMKilled error by calling `debug.FreeOSMemory` periodically * Remove `MAX_NUMBER_OF_GOROUTINES` environment variable * Change the line * Increase the default value of `TCP_STREAM_CHANNEL_TIMEOUT_MS` to `10000` * Write the client and integrate to the new real-time database * Refactor the WebSocket implementaiton for `/ws` * Adapt the UI to the new filtering system * Fix the rest of the issues in the UI * Increase the buffer of the scanner * Implement accessing single records * Increase the buffer of another scanner * Populate `Request` and `Response` fields of `MizuEntry` * Add syntax highlighting for the query * Add database to `Dockerfile` * Fix some issues * Update the `realtime_dbms` Git module commit hash * Upgrade Gin version and print the query string * Revert "Upgrade Gin version and print the query string" This reverts commitaa09f904ee
. * Use WebSocket's itself to query instead of the query string * Fix some errors related to conversion to HAR * Fix the issues caused by the latest merge * Fix the build error * Fix PR validation GitHub workflow * Replace the git submodule with latest Basenine version `0.1.0` Remove `realtime_client.go` and use the official client library `github.com/up9inc/basenine/client/go` instead. * Move Basenine host and port constants to `shared` module * Reliably execute and wait for Basenine to become available * Upgrade Basenine version * Properly close WebSocket and data channel * Fix the issues caused by the recent merge commit * Clean up the TypeScript code * Update `.gitignore` * Limit the database size * Add `Macros` method signature to `Dissector` interface and set the macros provided by the protocol extensions * Run `go mod tidy` on `agent` * Upgrade `github.com/up9inc/basenine/client/go` version * Implement a mechanism to update the query using click events in the UI and use it for protocol macros * Update the query on click to timestamps * Fix some issues in the WebSocket and channel handling * Update the query on clicks to status code * Update the query on clicks to method, path and service * Update the query on clicks to is outgoing, source and destination ports * Add an API endpoint to validate the query against syntax errors * Move the query background color state into `TrafficPage` * Fix the logic in `setQuery` * Display a toast message in case of a syntax error in the query * Remove a call to `fmt.Printf` * Upgrade Basenine version to `0.1.3` * Fix an issue related to getting `MAX_ENTRIES_DB_BYTES` environment variable * Have the `path` key in request details, in HTTP * Rearrange the HTTP headers for the querying * Do the same thing for `cookies` and `queryString` * Update the query on click to table elements Add the selectors for `TABLE` type representations in HTTP extension. * Update the query on click to `bodySize` and `elapsedTime` in `EntryTitle` * Add the selectors for `TABLE` type representations in AMQP extension * Add the selectors for `TABLE` type representations in Kafka extension * Add the selectors for `TABLE` type representations in Redis extension * Define a struct in `tap/api.go` for the section representation data * Add the selectors for `BODY` type representations * Add `request.path` to the HTTP request details * Change the summary string's field name from `path` to `summary` * Introduce `queryable` CSS class for queryable UI elements and underline them on hover * Instead of `N requests` at the bottom, make it `Displaying N results (queried X/Y)` and live update the values Upgrade Basenine version to `0.2.0`. * Verify the sha256sum of Basenine executable inside `Dockerfile` * Pass the start time to web UI through WebSocket and always show the `EntriesList` footer * Pipe the `stderr` of Basenine as well * Fix the layout issues related to `CodeEditor` in the UI * Use the correct `shasum` command in `Dockerfile` * Upgrade Basenine version to `0.2.1` * Limit the height of `CodeEditor` container * Remove `Paused` enum `ConnectionStatus` in UI * Fix the issue caused by the recent merge * Add the filtering guide (cheatsheet) * Update open cheatsheet button's title * Update cheatsheet content * Remove the old SQLite code, adapt the `--analyze` related code to Basenine * Change the method signature of `NewEntry` * Change the method signature of `Represent` * Introduce `HTTPPair` field in `MizuEntry` specific to HTTP * Remove `Entry`, `EntryId` and `EstimatedSizeBytes` fields from `MizuEntry` Also remove the `getEstimatedEntrySizeBytes` method. * Remove `gorm.io/gorm` dependency * Remove unused `sensitiveDataFiltering` folder * Increase the left margin of open cheatsheet button * Add `overflow: auto` to the cheatsheet `Modal` * Fix `GetEntry` method * Fix the macro for gRPC * Fix an interface conversion in case of AMQP * Fix two more interface conversion errors in AMQP * Make the `syncEntriesImpl` method blocking * Fix a grammar mistake in the cheatsheet * Adapt to the changes in the recent merge commit * Improve the cheatsheet text * Always display the timestamp in `en-US` * Upgrade Basenine version to `0.2.2` * Fix the order of closing Basenine connections and channels * Don't close the Basenine channels at all * Upgrade Basenine version to `0.2.3` * Set the initial filter to `rlimit(100)` * Make Basenine persistent * Upgrade Basenine version to `0.2.4` * Update `debug.Dockerfile` * Fix a failing test * Upgrade Basenine version to `0.2.5` * Revert "Do not show play icon when disconnected (#428)" This reverts commit8af2e562f8
. * Upgrade Basenine version to `0.2.6` * Make all non-informative things informative * Make `100` a constant * Use `===` in JavaScript no matter what * Remove a forgotten `console.log` * Add a comment and update the `query` in `syncEntriesImpl` * Don't call `panic` in `GetEntry` * Replace `panic` calls in `startBasenineServer` with `logger.Log.Panicf` * Remove unnecessary `\n` characters in the logs
348 lines
10 KiB
Go
348 lines
10 KiB
Go
package api
|
|
|
|
import (
|
|
"bufio"
|
|
"bytes"
|
|
"encoding/json"
|
|
"errors"
|
|
"fmt"
|
|
"io/ioutil"
|
|
"net/http"
|
|
"plugin"
|
|
"sync"
|
|
"time"
|
|
|
|
"github.com/google/martian/har"
|
|
)
|
|
|
|
type Protocol struct {
|
|
Name string `json:"name"`
|
|
LongName string `json:"longName"`
|
|
Abbreviation string `json:"abbr"`
|
|
Macro string `json:"macro"`
|
|
Version string `json:"version"`
|
|
BackgroundColor string `json:"backgroundColor"`
|
|
ForegroundColor string `json:"foregroundColor"`
|
|
FontSize int8 `json:"fontSize"`
|
|
ReferenceLink string `json:"referenceLink"`
|
|
Ports []string `json:"ports"`
|
|
Priority uint8 `json:"priority"`
|
|
}
|
|
|
|
type TCP struct {
|
|
IP string `json:"ip"`
|
|
Port string `json:"port"`
|
|
Name string `json:"name"`
|
|
}
|
|
|
|
type Extension struct {
|
|
Protocol *Protocol
|
|
Path string
|
|
Plug *plugin.Plugin
|
|
Dissector Dissector
|
|
MatcherMap *sync.Map
|
|
}
|
|
|
|
type ConnectionInfo struct {
|
|
ClientIP string
|
|
ClientPort string
|
|
ServerIP string
|
|
ServerPort string
|
|
IsOutgoing bool
|
|
}
|
|
|
|
type TcpID struct {
|
|
SrcIP string
|
|
DstIP string
|
|
SrcPort string
|
|
DstPort string
|
|
Ident string
|
|
}
|
|
|
|
type CounterPair struct {
|
|
Request uint
|
|
Response uint
|
|
}
|
|
|
|
type GenericMessage struct {
|
|
IsRequest bool `json:"isRequest"`
|
|
CaptureTime time.Time `json:"captureTime"`
|
|
Payload interface{} `json:"payload"`
|
|
}
|
|
|
|
type RequestResponsePair struct {
|
|
Request GenericMessage `json:"request"`
|
|
Response GenericMessage `json:"response"`
|
|
}
|
|
|
|
// `Protocol` is modified in the later stages of data propagation. Therefore it's not a pointer.
|
|
type OutputChannelItem struct {
|
|
Protocol Protocol
|
|
Timestamp int64
|
|
ConnectionInfo *ConnectionInfo
|
|
Pair *RequestResponsePair
|
|
Summary *BaseEntryDetails
|
|
}
|
|
|
|
type SuperTimer struct {
|
|
CaptureTime time.Time
|
|
}
|
|
|
|
type SuperIdentifier struct {
|
|
Protocol *Protocol
|
|
IsClosedOthers bool
|
|
}
|
|
|
|
type Dissector interface {
|
|
Register(*Extension)
|
|
Ping()
|
|
Dissect(b *bufio.Reader, isClient bool, tcpID *TcpID, counterPair *CounterPair, superTimer *SuperTimer, superIdentifier *SuperIdentifier, emitter Emitter, options *TrafficFilteringOptions) error
|
|
Analyze(item *OutputChannelItem, resolvedSource string, resolvedDestination string) *MizuEntry
|
|
Summarize(entry *MizuEntry) *BaseEntryDetails
|
|
Represent(pIn Protocol, request map[string]interface{}, response map[string]interface{}) (pOut Protocol, object []byte, bodySize int64, err error)
|
|
Macros() map[string]string
|
|
}
|
|
|
|
type Emitting struct {
|
|
AppStats *AppStats
|
|
OutputChannel chan *OutputChannelItem
|
|
}
|
|
|
|
type Emitter interface {
|
|
Emit(item *OutputChannelItem)
|
|
}
|
|
|
|
func (e *Emitting) Emit(item *OutputChannelItem) {
|
|
e.OutputChannel <- item
|
|
e.AppStats.IncMatchedPairs()
|
|
}
|
|
|
|
type MizuEntry struct {
|
|
Id uint `json:"id"`
|
|
Protocol Protocol `json:"proto"`
|
|
Source *TCP `json:"src"`
|
|
Destination *TCP `json:"dst"`
|
|
Outgoing bool `json:"outgoing"`
|
|
Timestamp int64 `json:"timestamp"`
|
|
StartTime time.Time `json:"startTime"`
|
|
Request map[string]interface{} `json:"request"`
|
|
Response map[string]interface{} `json:"response"`
|
|
Base *BaseEntryDetails `json:"base"`
|
|
Summary string `json:"summary"`
|
|
Url string `json:"url"`
|
|
Method string `json:"method"`
|
|
Status int `json:"status"`
|
|
RequestSenderIp string `json:"requestSenderIp"`
|
|
Service string `json:"service"`
|
|
ElapsedTime int64 `json:"elapsedTime"`
|
|
Path string `json:"path"`
|
|
ResolvedSource string `json:"resolvedSource,omitempty"`
|
|
ResolvedDestination string `json:"resolvedDestination,omitempty"`
|
|
SourceIp string `json:"sourceIp,omitempty"`
|
|
DestinationIp string `json:"destinationIp,omitempty"`
|
|
SourcePort string `json:"sourcePort,omitempty"`
|
|
DestinationPort string `json:"destinationPort,omitempty"`
|
|
IsOutgoing bool `json:"isOutgoing,omitempty"`
|
|
ContractStatus ContractStatus `json:"contractStatus,omitempty"`
|
|
ContractRequestReason string `json:"contractRequestReason,omitempty"`
|
|
ContractResponseReason string `json:"contractResponseReason,omitempty"`
|
|
ContractContent string `json:"contractContent,omitempty"`
|
|
HTTPPair string `json:"httpPair,omitempty"`
|
|
}
|
|
|
|
type MizuEntryWrapper struct {
|
|
Protocol Protocol `json:"protocol"`
|
|
Representation string `json:"representation"`
|
|
BodySize int64 `json:"bodySize"`
|
|
Data MizuEntry `json:"data"`
|
|
Rules []map[string]interface{} `json:"rulesMatched,omitempty"`
|
|
IsRulesEnabled bool `json:"isRulesEnabled"`
|
|
}
|
|
|
|
type BaseEntryDetails struct {
|
|
Id uint `json:"id"`
|
|
Protocol Protocol `json:"protocol,omitempty"`
|
|
Url string `json:"url,omitempty"`
|
|
RequestSenderIp string `json:"requestSenderIp,omitempty"`
|
|
Service string `json:"service,omitempty"`
|
|
Path string `json:"path,omitempty"`
|
|
Summary string `json:"summary,omitempty"`
|
|
StatusCode int `json:"statusCode"`
|
|
Method string `json:"method,omitempty"`
|
|
Timestamp int64 `json:"timestamp,omitempty"`
|
|
SourceIp string `json:"sourceIp,omitempty"`
|
|
DestinationIp string `json:"destinationIp,omitempty"`
|
|
SourcePort string `json:"sourcePort,omitempty"`
|
|
DestinationPort string `json:"destinationPort,omitempty"`
|
|
IsOutgoing bool `json:"isOutgoing,omitempty"`
|
|
Latency int64 `json:"latency"`
|
|
Rules ApplicableRules `json:"rules,omitempty"`
|
|
ContractStatus ContractStatus `json:"contractStatus"`
|
|
}
|
|
|
|
type ApplicableRules struct {
|
|
Latency int64 `json:"latency,omitempty"`
|
|
Status bool `json:"status,omitempty"`
|
|
NumberOfRules int `json:"numberOfRules,omitempty"`
|
|
}
|
|
|
|
type ContractStatus int
|
|
|
|
type Contract struct {
|
|
Status ContractStatus `json:"status"`
|
|
RequestReason string `json:"requestReason"`
|
|
ResponseReason string `json:"responseReason"`
|
|
Content string `json:"content"`
|
|
}
|
|
|
|
type DataUnmarshaler interface {
|
|
UnmarshalData(*MizuEntry) error
|
|
}
|
|
|
|
func (bed *BaseEntryDetails) UnmarshalData(entry *MizuEntry) error {
|
|
bed.Protocol = entry.Protocol
|
|
bed.Id = entry.Id
|
|
bed.Url = entry.Url
|
|
bed.RequestSenderIp = entry.RequestSenderIp
|
|
bed.Service = entry.Service
|
|
bed.Path = entry.Path
|
|
bed.Summary = entry.Path
|
|
bed.StatusCode = entry.Status
|
|
bed.Method = entry.Method
|
|
bed.Timestamp = entry.Timestamp
|
|
bed.SourceIp = entry.SourceIp
|
|
bed.DestinationIp = entry.DestinationIp
|
|
bed.SourcePort = entry.SourcePort
|
|
bed.DestinationPort = entry.DestinationPort
|
|
bed.IsOutgoing = entry.IsOutgoing
|
|
bed.Latency = entry.ElapsedTime
|
|
bed.ContractStatus = entry.ContractStatus
|
|
return nil
|
|
}
|
|
|
|
const (
|
|
TABLE string = "table"
|
|
BODY string = "body"
|
|
)
|
|
|
|
type SectionData struct {
|
|
Type string `json:"type"`
|
|
Title string `json:"title"`
|
|
Data string `json:"data"`
|
|
Encoding string `json:"encoding,omitempty"`
|
|
MimeType string `json:"mimeType,omitempty"`
|
|
Selector string `json:"selector,omitempty"`
|
|
}
|
|
|
|
type TableData struct {
|
|
Name string `json:"name"`
|
|
Value interface{} `json:"value"`
|
|
Selector string `json:"selector"`
|
|
}
|
|
|
|
const (
|
|
TypeHttpRequest = iota
|
|
TypeHttpResponse
|
|
)
|
|
|
|
type HTTPPayload struct {
|
|
Type uint8
|
|
Data interface{}
|
|
}
|
|
|
|
type HTTPPayloader interface {
|
|
MarshalJSON() ([]byte, error)
|
|
}
|
|
|
|
type HTTPWrapper struct {
|
|
Method string `json:"method"`
|
|
Url string `json:"url"`
|
|
Details interface{} `json:"details"`
|
|
RawRequest *HTTPRequestWrapper `json:"rawRequest"`
|
|
RawResponse *HTTPResponseWrapper `json:"rawResponse"`
|
|
}
|
|
|
|
func (h HTTPPayload) MarshalJSON() ([]byte, error) {
|
|
switch h.Type {
|
|
case TypeHttpRequest:
|
|
harRequest, err := har.NewRequest(h.Data.(*http.Request), true)
|
|
if err != nil {
|
|
return nil, errors.New("Failed converting request to HAR")
|
|
}
|
|
return json.Marshal(&HTTPWrapper{
|
|
Method: harRequest.Method,
|
|
Url: "",
|
|
Details: harRequest,
|
|
RawRequest: &HTTPRequestWrapper{Request: h.Data.(*http.Request)},
|
|
})
|
|
case TypeHttpResponse:
|
|
harResponse, err := har.NewResponse(h.Data.(*http.Response), true)
|
|
if err != nil {
|
|
return nil, errors.New("Failed converting response to HAR")
|
|
}
|
|
return json.Marshal(&HTTPWrapper{
|
|
Method: "",
|
|
Url: "",
|
|
Details: harResponse,
|
|
RawResponse: &HTTPResponseWrapper{Response: h.Data.(*http.Response)},
|
|
})
|
|
default:
|
|
panic(fmt.Sprintf("HTTP payload cannot be marshaled: %s\n", h.Type))
|
|
}
|
|
}
|
|
|
|
type HTTPWrapperTricky struct {
|
|
Method string `json:"method"`
|
|
Url string `json:"url"`
|
|
Details interface{} `json:"details"`
|
|
RawRequest *http.Request `json:"rawRequest"`
|
|
RawResponse *http.Response `json:"rawResponse"`
|
|
}
|
|
|
|
type HTTPMessage struct {
|
|
IsRequest bool `json:"isRequest"`
|
|
CaptureTime time.Time `json:"captureTime"`
|
|
Payload HTTPWrapperTricky `json:"payload"`
|
|
}
|
|
|
|
type HTTPRequestResponsePair struct {
|
|
Request HTTPMessage `json:"request"`
|
|
Response HTTPMessage `json:"response"`
|
|
}
|
|
|
|
type HTTPRequestWrapper struct {
|
|
*http.Request
|
|
}
|
|
|
|
func (r *HTTPRequestWrapper) MarshalJSON() ([]byte, error) {
|
|
body, _ := ioutil.ReadAll(r.Request.Body)
|
|
r.Request.Body = ioutil.NopCloser(bytes.NewBuffer(body))
|
|
return json.Marshal(&struct {
|
|
Body string `json:"Body,omitempty"`
|
|
GetBody string `json:"GetBody,omitempty"`
|
|
Cancel string `json:"Cancel,omitempty"`
|
|
*http.Request
|
|
}{
|
|
Body: string(body),
|
|
Request: r.Request,
|
|
})
|
|
}
|
|
|
|
type HTTPResponseWrapper struct {
|
|
*http.Response
|
|
}
|
|
|
|
func (r *HTTPResponseWrapper) MarshalJSON() ([]byte, error) {
|
|
body, _ := ioutil.ReadAll(r.Response.Body)
|
|
r.Response.Body = ioutil.NopCloser(bytes.NewBuffer(body))
|
|
return json.Marshal(&struct {
|
|
Body string `json:"Body,omitempty"`
|
|
GetBody string `json:"GetBody,omitempty"`
|
|
Cancel string `json:"Cancel,omitempty"`
|
|
*http.Response
|
|
}{
|
|
Body: string(body),
|
|
Response: r.Response,
|
|
})
|
|
}
|