mirror of
https://github.com/kubeshark/kubeshark.git
synced 2025-09-27 21:38:06 +00:00
* modified Dockerfile to work for both amd64 (Intel) and arm64 (M1) * added changelog * Update `Dockerfile` to have `ARCH` build argument * Remove `docs/CHANGES.md` * Upgrade the Basenine version from `v0.3.0` to `v0.4.6` * Update `publish.yml` to have `ARCH` build argument * Switch `BasenineImageRepo` to Docker Hub * Have separate build arguments for `ARCH` and `GOARCH` * Upgrade the Basenine version from `v0.4.6` to `v0.4.10` * Oops forgot to update the 10th duplicated shell script * Fix the oopsie and reduce duplications * Fix `Dockerfile` * Fix the incompatibility issue between Go plugins and gold linker in Alpine inside `Dockerfile` * Fix `asm: xxhash_amd64.s:120: when dynamic linking, R15 is clobbered by a global variable access` error * Update `Dockerfile` to have cross-compilation on an AMD64 machine Also revert changes in the shell scripts * Delete `debug.Dockerfile` * Create a custom base (`debian:buster-slim` based) image for the shipped image * Replace `mertyildiran/debian-pcap` with `up9inc/debian-pcap` * Upgrade Basenine version to `v0.4.12` * Use `debian:stable-slim` as the base * Fix the indentation in the `Dockerfile` * Update `publish.yml` * Enable `publish.yml` for `feature/multiarch_build` branch * Tag correctly and set `ARCH` Docker argument * Remove the lines that are forgotten to be removed from the shell scripts * Add `MizuAgentImageRepo` constant and use it as default `AgentImage` value * Bring back `Set up Cloud SDK` step to `Build the CLI and publish` job * Build ARM64 CLI for Linux as well * Revert "Enable `publish.yml` for `feature/multiarch_build` branch" This reverts commitd30be4c1f0
. * Revert Go 1.17 upgrade * Remove `build_extensions_debug.sh` as well * Make the `Dockerfile` to compile the agent statically * Statically link the protocol extensions * Fix `Dockerfile` * Bring back `-s -w` flags * Verify the signatures of the downloads in `dockcross/linux-arm64-musl` * Revert modifications in some shell scripts * Make the `BUILDARCH` and `TARGETARCH` separation in the `Dockerfile` * Separate cross-compilation builder image into a separate repo named `up9inc/linux-arm64-musl-go-libpcap` * Fill the shell script and specify the tag for `dockcross/linux-arm64-musl` * Remove the unnecessary dependencies from `builder-native-base` * Improve the comments in the `Dockerfile` * Upgrade Basenine version to `v0.4.13` * Fix `Dockerfile` * Revert "Revert "Enable `publish.yml` for `feature/multiarch_build` branch"" This reverts commit303e466bdc
. * Revert "Revert "Revert "Enable `publish.yml` for `feature/multiarch_build` branch""" This reverts commit0fe252bbdb
. * Remove `push-docker-debug` from the `Makefile` * Rename `publish.yml` to `release.yml` Co-authored-by: Alex Haiut <alex@up9.com>
144 lines
2.8 KiB
Go
144 lines
2.8 KiB
Go
package kafka
|
|
|
|
import (
|
|
"fmt"
|
|
"sort"
|
|
"strings"
|
|
"text/tabwriter"
|
|
)
|
|
|
|
type Cluster struct {
|
|
ClusterID string
|
|
Controller int32
|
|
Brokers map[int32]Broker
|
|
Topics map[string]Topic
|
|
}
|
|
|
|
func (c Cluster) BrokerIDs() []int32 {
|
|
brokerIDs := make([]int32, 0, len(c.Brokers))
|
|
for id := range c.Brokers {
|
|
brokerIDs = append(brokerIDs, id)
|
|
}
|
|
sort.Slice(brokerIDs, func(i, j int) bool {
|
|
return brokerIDs[i] < brokerIDs[j]
|
|
})
|
|
return brokerIDs
|
|
}
|
|
|
|
func (c Cluster) TopicNames() []string {
|
|
topicNames := make([]string, 0, len(c.Topics))
|
|
for name := range c.Topics {
|
|
topicNames = append(topicNames, name)
|
|
}
|
|
sort.Strings(topicNames)
|
|
return topicNames
|
|
}
|
|
|
|
func (c Cluster) IsZero() bool {
|
|
return c.ClusterID == "" && c.Controller == 0 && len(c.Brokers) == 0 && len(c.Topics) == 0
|
|
}
|
|
|
|
func (c Cluster) Format(w fmt.State, _ rune) {
|
|
tw := new(tabwriter.Writer)
|
|
fmt.Fprintf(w, "CLUSTER: %q\n\n", c.ClusterID)
|
|
|
|
tw.Init(w, 0, 8, 2, ' ', 0)
|
|
fmt.Fprint(tw, " BROKER\tHOST\tPORT\tRACK\tCONTROLLER\n")
|
|
|
|
for _, id := range c.BrokerIDs() {
|
|
broker := c.Brokers[id]
|
|
fmt.Fprintf(tw, " %d\t%s\t%d\t%s\t%t\n", broker.ID, broker.Host, broker.Port, broker.Rack, broker.ID == c.Controller)
|
|
}
|
|
|
|
tw.Flush()
|
|
fmt.Fprintln(w)
|
|
|
|
tw.Init(w, 0, 8, 2, ' ', 0)
|
|
fmt.Fprint(tw, " TOPIC\tPARTITIONS\tBROKERS\n")
|
|
topicNames := c.TopicNames()
|
|
brokers := make(map[int32]struct{}, len(c.Brokers))
|
|
brokerIDs := make([]int32, 0, len(c.Brokers))
|
|
|
|
for _, name := range topicNames {
|
|
topic := c.Topics[name]
|
|
|
|
for _, p := range topic.Partitions {
|
|
for _, id := range p.Replicas {
|
|
brokers[id] = struct{}{}
|
|
}
|
|
}
|
|
|
|
for id := range brokers {
|
|
brokerIDs = append(brokerIDs, id)
|
|
}
|
|
|
|
fmt.Fprintf(tw, " %s\t%d\t%s\n", topic.Name, len(topic.Partitions), formatBrokerIDs(brokerIDs, -1))
|
|
|
|
for id := range brokers {
|
|
delete(brokers, id)
|
|
}
|
|
|
|
brokerIDs = brokerIDs[:0]
|
|
}
|
|
|
|
tw.Flush()
|
|
fmt.Fprintln(w)
|
|
|
|
if w.Flag('+') {
|
|
for _, name := range topicNames {
|
|
fmt.Fprintf(w, " TOPIC: %q\n\n", name)
|
|
|
|
tw.Init(w, 0, 8, 2, ' ', 0)
|
|
fmt.Fprint(tw, " PARTITION\tREPLICAS\tISR\tOFFLINE\n")
|
|
|
|
for _, p := range c.Topics[name].Partitions {
|
|
fmt.Fprintf(tw, " %d\t%s\t%s\t%s\n", p.ID,
|
|
formatBrokerIDs(p.Replicas, -1),
|
|
formatBrokerIDs(p.ISR, p.Leader),
|
|
formatBrokerIDs(p.Offline, -1),
|
|
)
|
|
}
|
|
|
|
tw.Flush()
|
|
fmt.Fprintln(w)
|
|
}
|
|
}
|
|
}
|
|
|
|
func formatBrokerIDs(brokerIDs []int32, leader int32) string {
|
|
if len(brokerIDs) == 0 {
|
|
return ""
|
|
}
|
|
|
|
if len(brokerIDs) == 1 {
|
|
return itoa(brokerIDs[0])
|
|
}
|
|
|
|
sort.Slice(brokerIDs, func(i, j int) bool {
|
|
id1 := brokerIDs[i]
|
|
id2 := brokerIDs[j]
|
|
|
|
if id1 == leader {
|
|
return true
|
|
}
|
|
|
|
if id2 == leader {
|
|
return false
|
|
}
|
|
|
|
return id1 < id2
|
|
})
|
|
|
|
brokerNames := make([]string, len(brokerIDs))
|
|
|
|
for i, id := range brokerIDs {
|
|
brokerNames[i] = itoa(id)
|
|
}
|
|
|
|
return strings.Join(brokerNames, ",")
|
|
}
|
|
|
|
var (
|
|
_ fmt.Formatter = Cluster{}
|
|
)
|