Port Kubernetes Response Engine to AWS Technology (#460)

* Add a falco-sns utility which publishes to an AWS SNS topic

* Add an script for deploying function in AWS Lambda

* Bump dependencies

* Use an empty topic and pass AWS_DEFAULT_REGION environment variable

* Add gitignore

* Install ca-certificates.

Are used when we publish to a SNS topic.

* Add myself as a maintainer

* Decode events from SNS based messages

* Add Terraform manifests for getting an EKS up and running

Please, take attention to setup kubectl  and how to join workers:

https://www.terraform.io/docs/providers/aws/guides/eks-getting-started.html#obtaining-kubectl-configuration-from-terraform
https://www.terraform.io/docs/providers/aws/guides/eks-getting-started.html#required-kubernetes-configuration-to-join-worker-nodes

* Ignore terraform generated files

* Remove autogenerated files

* Also publish MessageAttributes which allows to use Filter Policies

This allows to subscribe only to errors, or warnings or several
priorities or by rule names.

It covers same funcionality than NATS publishe does.

* Add kubeconfig and aws-iam-authenticator from heptio to Lambda environment

* Add role trust from cluster creator to lambda role

* Enable CloudWatch for Lambda stuff

* Generate kubeconfig, kubeconfig for lambdas and the lambda arn

This is used by deployment script

* Just a cosmetic change

* Add a Makefile which creates the cluster and configures it

* Use terraform and artifacts which belongs to this repository for deploying

* Move CNCF related deployment to its own directory

* Create only SNS and Lambda stuff.

Assume that the EKS cluster will be created outside

* Bridge IAM with RBAC

This allows to use the role for lambdas for authenticating against
Kubernetes

* Do not rely on terraform for deploying a playbook in lambda

* Clean whitespace

* Move rebased playbooks to functions

* Fix rebase issues with deployment and rbac stuff

* Add a clean target to Makefile

* Inject sys.path modification to Kubeless function deployment

* Add documentation and instructions
This commit is contained in:
Néstor Salceda 2018-11-07 17:34:13 +01:00 committed by Mark Stemm
parent 32f8e304eb
commit 071e8de075
34 changed files with 392 additions and 85 deletions

View File

@ -0,0 +1,4 @@
.terraform/*
.terraform.*
terraform.*
*.yaml

View File

@ -0,0 +1,11 @@
all: create configure
create:
terraform apply
configure:
kubectl get -n kube-system configmap/aws-auth -o yaml | awk "/mapRoles: \|/{print;print \"$(shell terraform output patch_for_aws_auth)\";next}1" > aws-auth-patch.yml
kubectl -n kube-system replace -f aws-auth-patch.yml
clean:
terraform destroy

View File

@ -0,0 +1,23 @@
# Terraform manifests for Kubernetes Response Engine running on AWS
In this directory are the Terraform manifests for creating required infrasturcture
for the Kubernetes Response Engine running with AWS technology: SNS for messaging
and Lambda for executing the playbooks.
## Deploy
For creating the resources, just run default Makefile target:
```
make
```
This will ask for an IAM user which creates the bridge between EKS rbac and AWS IAM.
## Clean
You can clean IAM roles and SNS topics with:
```
make clean
```

View File

@ -0,0 +1,25 @@
resource "aws_iam_role" "iam-for-lambda" {
name = "iam_for_lambda"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "lambda.amazonaws.com",
"AWS": "${var.iam-user-arn}"
},
"Effect": "Allow",
"Sid": ""
}
]
}
EOF
}
resource "aws_iam_role_policy_attachment" "iam-for-lambda" {
policy_arn = "arn:aws:iam::aws:policy/CloudWatchFullAccess"
role = "${aws_iam_role.iam-for-lambda.name}"
}

View File

@ -0,0 +1,16 @@
locals {
patch_for_aws_auth = <<CONFIGMAPAWSAUTH
- rolearn: ${aws_iam_role.iam-for-lambda.arn}\n
username: kubernetes-admin\n
groups:\n
- system:masters
CONFIGMAPAWSAUTH
}
output "patch_for_aws_auth" {
value = "${local.patch_for_aws_auth}"
}
output "iam_for_lambda" {
value = "${aws_iam_role.iam-for-lambda.arn}"
}

View File

@ -0,0 +1,3 @@
resource "aws_sns_topic" "falco-alerts" {
name = "falco-alerts"
}

View File

@ -0,0 +1,3 @@
variable "iam-user-arn" {
type = "string"
}

View File

@ -0,0 +1 @@
falco-sns

View File

@ -0,0 +1,8 @@
FROM alpine:latest
MAINTAINER Néstor Salceda<nestor.salceda@sysdig.com>
RUN apk add --no-cache ca-certificates
COPY ./falco-sns /bin/
CMD ["/bin/falco-sns"]

View File

@ -0,0 +1,12 @@
build:
GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build -ldflags="-s" -o falco-sns main.go
deps:
go get -u github.com/aws/aws-sdk-go/
clean:
rm falco-sns
docker: build
docker build -t sysdig/falco-sns .
docker push sysdig/falco-sns

View File

@ -0,0 +1,26 @@
# SNS output for Sysdig Falco
As Falco does not support AWS SNS output natively, we have created this small
golang utility wich reads Falco alerts from a named pipe and sends them to a
SNS topic.
This utility is designed to being run in a sidecar container in the same
Pod as Falco.
## Configuration
You have a [complete Kubernetes manifest available](https://github.com/draios/falco/tree/kubernetes-response-engine/deployment/falco/falco-daemonset.yaml) for future reading.
Take a look at sidecar container and to the initContainers directive which
craetes the shared pipe between containers.
### Container image
You have this adapter available as a container image. Its name is *sysdig/falco-sns*.
### Parameters Reference
* -t: Specifies the ARN SNS topic where message will be published.
* -f: Specifies the named pipe path where Falco publishes its alerts. By default
is: */var/run/falco/nats*

View File

@ -0,0 +1,101 @@
// Copyright 2012-2018 The Sysdig Tech Marketing Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build ignore
package main
import (
"bufio"
"encoding/json"
"flag"
"log"
"os"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/sns"
)
func main() {
var topic = flag.String("t", "", "The AWS SNS topic ARN")
var pipePath = flag.String("f", "/var/run/falco/nats", "The named pipe path")
log.SetFlags(0)
flag.Usage = usage
flag.Parse()
session, err := session.NewSession(&aws.Config{Region: aws.String(os.Getenv("AWS_DEFAULT_REGION"))})
if err != nil {
log.Fatal(err)
}
svc := sns.New(session)
pipe, err := os.OpenFile(*pipePath, os.O_RDONLY, 0600)
if err != nil {
log.Fatal(err)
}
log.Printf("Opened pipe %s", *pipePath)
reader := bufio.NewReader(pipe)
scanner := bufio.NewScanner(reader)
log.Printf("Scanning %s", *pipePath)
for scanner.Scan() {
msg := []byte(scanner.Text())
alert := parseAlert(msg)
params := &sns.PublishInput{
Message: aws.String(string(msg)),
MessageAttributes: map[string]*sns.MessageAttributeValue{
"priority": &sns.MessageAttributeValue{
DataType: aws.String("String"),
StringValue: aws.String(alert.Priority),
},
"rule": &sns.MessageAttributeValue{
DataType: aws.String("String"),
StringValue: aws.String(alert.Rule),
},
},
TopicArn: aws.String(*topic),
}
_, err := svc.Publish(params)
if err != nil {
log.Fatal(err)
} else {
log.Printf("Published [%s] : '%s'\n", *topic, msg)
}
}
}
func usage() {
log.Fatalf("Usage: falco-sns -t topic <subject> <msg> \n")
}
type parsedAlert struct {
Priority string `json:"priority"`
Rule string `json:"rule"`
}
func parseAlert(alert []byte) *parsedAlert {
var result parsedAlert
err := json.Unmarshal(alert, &result)
if err != nil {
log.Fatal(err)
}
return &result
}

View File

@ -32,10 +32,10 @@
},
"cachetools": {
"hashes": [
"sha256:90f1d559512fc073483fe573ef5ceb39bf6ad3d39edc98dc55178a2b2b176fa3",
"sha256:d1c398969c478d336f767ba02040fa22617333293fb0b8968e79b16028dfee35"
"sha256:0a258d82933a1dd18cb540aca4ac5d5690731e24d1239a08577b814998f49785",
"sha256:4621965b0d9d4c82a79a29edbad19946f5e7702df4afae7d1ed2df951559a8cc"
],
"version": "==2.1.0"
"version": "==3.0.0"
},
"certifi": {
"hashes": [
@ -123,16 +123,6 @@
"editable": true,
"path": "."
},
"enum34": {
"hashes": [
"sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
"sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
"sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
"sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
],
"markers": "python_version < '3'",
"version": "==1.1.6"
},
"google-auth": {
"hashes": [
"sha256:9ca363facbf2622d9ba828017536ccca2e0f58bd15e659b52f312172f8815530",
@ -153,21 +143,13 @@
],
"version": "==2.7"
},
"ipaddress": {
"hashes": [
"sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
"sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
],
"markers": "python_version < '3'",
"version": "==1.0.22"
},
"kubernetes": {
"hashes": [
"sha256:5ee6e2e949ca800ad8a73da6f67c2a637c2c803945b006e6105beae83e43b273",
"sha256:84dfb4319afac189e8327b71b9332b5329d2a78074f58958c5f06a870edf32ba"
"sha256:0cc9ce02d838da660efa0a67270b4b7d47e6beb8889673cd45c86f897e2d6821",
"sha256:54f8e7bb1dd9a55cf416dff76a63c4ae441764280942d9913f2243676f29d02c"
],
"index": "pypi",
"version": "==7.0.0"
"version": "==8.0.0"
},
"maya": {
"hashes": [
@ -228,24 +210,24 @@
},
"python-dateutil": {
"hashes": [
"sha256:1adb80e7a782c12e52ef9a8182bebeb73f1d7e24e374397af06fb4956c8dc5c0",
"sha256:e27001de32f627c22380a688bcc43ce83504a7bc5da472209b4c70f02829f0b8"
"sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93",
"sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"
],
"version": "==2.7.3"
"version": "==2.7.5"
},
"pytz": {
"hashes": [
"sha256:a061aa0a9e06881eb8b3b2b43f05b9439d6583c206d0a6c340ff72a7b6669053",
"sha256:ffb9ef1de172603304d9d2819af6f5ece76f2e85ec10692a524dd876e72bf277"
"sha256:31cb35c89bd7d333cd32c5f278fca91b523b0834369e757f4c5641ea252236ca",
"sha256:8e0f8568c118d3077b46be7d654cc8167fa916092e28320cde048e54bfc9f1e6"
],
"version": "==2018.5"
"version": "==2018.7"
},
"pytzdata": {
"hashes": [
"sha256:1d936da41ee06216d89fdc7ead1ee9a5da2811a8787515a976b646e110c3f622",
"sha256:e4ef42e82b0b493c5849eed98b5ab49d6767caf982127e9a33167f1153b36cc5"
"sha256:10c74b0cfc51a9269031f86ecd11096c9c6a141f5bb15a3b8a88f9979f6361e2",
"sha256:279cbd9900d5da9a8f9053e60db0db7f42d9a799673744b76aaeb6b4f14abe77"
],
"version": "==2018.5"
"version": "==2018.7"
},
"pyyaml": {
"hashes": [
@ -265,23 +247,17 @@
},
"regex": {
"hashes": [
"sha256:22d7ef8c2df344328a8a3c61edade2ee714e5de9360911d22a9213931c769faa",
"sha256:3a699780c6b712c67dc23207b129ccc6a7e1270233f7aadead3ea3f83c893702",
"sha256:42f460d349baebd5faec02a0c920988fb0300b24baf898d9c139886565b66b6c",
"sha256:43bf3d79940cbdf19adda838d8b26b28b47bec793cda46590b5b25703742f440",
"sha256:47d6c7f0588ef33464e00023067c4e7cce68e0d6a686a73c7ee15abfdad503d4",
"sha256:5b879f59f25ed9b91bc8693a9a994014b431f224f492519ad0255ce6b54b83e5",
"sha256:8ba0093c412900f636b0f826c597a0c3ea0e395344bc99894ddefe88b76c9c7e",
"sha256:a4789254a1a0bd7a637036cce0b7ed72d8cc864e93f2e9cfd10ac00ae27bb7b0",
"sha256:b73cea07117dca888b0c3671770b501bef19aac9c45c8ffdb5bea2cca2377b0a",
"sha256:d3eb59fa3e5b5438438ec97acd9dc86f077428e020b015b43987e35bea68ef4c",
"sha256:d51d232b4e2f106deaf286001f563947fee255bc5bd209a696f027e15cf0a1e7",
"sha256:d59b03131a8e35061b47a8f186324a95eaf30d5f6ee9cc0637e7b87d29c7c9b5",
"sha256:dd705df1b47470388fc4630e4df3cbbe7677e2ab80092a1c660cae630a307b2d",
"sha256:e87fffa437a4b00afb17af785da9b01618425d6cd984c677639deb937037d8f2",
"sha256:ed40e0474ab5ab228a8d133759d451b31d3ccdebaff698646e54aff82c3de4f8"
"sha256:384c78351ceb08b9f04e28552edea9af837d05ad4fda9a187a7bbd82759f29b6",
"sha256:41b70db2608726396de185e7571a70391507ab47a64b564f59861ff13f2c50a5",
"sha256:50f4b57696883fdbb0494cf1ff1cf6e04790d5e1848dff0b2cf28a2b97614351",
"sha256:81515123132f9ab0cc8128d035ba7db7783206e4616bdabd3faba335b9add185",
"sha256:91e965833a9f93b3e6abfef815026ccb8a9abe12c0958c723fc6c0d396384602",
"sha256:9cb058e53c2488b6cba85a7e6ce6d659b3f33ebe00f613dc9fda46de788a1298",
"sha256:b41a81228c3994789d4785d9fef96770f9a6b564a30c10af671bd5a4078da6f4",
"sha256:cf20d6539e00021793df23c2a98d57aff84f9402f81ac5896fffb4f8c8a08897",
"sha256:f937fdbcdb1e455c23709f5cf6df91a0ecfe8c23268f601606173232958daa8d"
],
"version": "==2018.8.29"
"version": "==2018.11.6"
},
"requests": {
"hashes": [
@ -326,17 +302,17 @@
},
"urllib3": {
"hashes": [
"sha256:41c3db2fc01e5b907288010dec72f9d0a74e37d6994e6eb56849f59fea2265ae",
"sha256:8819bba37a02d143296a4d032373c4dd4aca11f6d4c9973335ca75f9c8475f59"
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
],
"version": "==1.24"
"version": "==1.24.1"
},
"websocket-client": {
"hashes": [
"sha256:c42b71b68f9ef151433d6dcc6a7cb98ac72d2ad1e3a74981ca22bc5d9134f166",
"sha256:f5889b1d0a994258cfcbc8f2dc3e457f6fc7b32a8d74873033d12e4eab4bdf63"
"sha256:8c8bf2d4f800c3ed952df206b18c28f7070d9e3dcbd6ca6291127574f57ee786",
"sha256:e51562c91ddb8148e791f0155fdb01325d99bb52c4cdbb291aee7a3563fd0849"
],
"version": "==0.53.0"
"version": "==0.54.0"
}
},
"develop": {
@ -356,6 +332,7 @@
"hashes": [
"sha256:03481e81d558d30d230bc12999e3edffe392d244349a90f4ef9b88425fac74ba",
"sha256:0b136648de27201056c1869a6c0d4e23f464750fd9a9ba9750b8336a244429ed",
"sha256:0bf8cbbd71adfff0ef1f3a1531e6402d13b7b01ac50a79c97ca15f030dba6306",
"sha256:10a46017fef60e16694a30627319f38a2b9b52e90182dddb6e37dcdab0f4bf95",
"sha256:198626739a79b09fa0a2f06e083ffd12eb55449b5f8bfdbeed1df4910b2ca640",
"sha256:23d341cdd4a0371820eb2b0bd6b88f5003a7438bbedb33688cd33b8eae59affd",
@ -384,6 +361,7 @@
"sha256:c1bb572fab8208c400adaf06a8133ac0712179a334c09224fb11393e920abcdd",
"sha256:de4418dadaa1c01d497e539210cb6baa015965526ff5afc078c57ca69160108d",
"sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6",
"sha256:f05a636b4564104120111800021a92e43397bc12a5c72fed7036be8556e0029e",
"sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80"
],
"version": "==4.5.1"
@ -404,17 +382,17 @@
},
"expects": {
"hashes": [
"sha256:37538d7b0fa9c0d53e37d07b0e8c07d89754d3deec1f0f8ed1be27f4f10363dd"
"sha256:419902ccafe81b7e9559eeb6b7a07ef9d5c5604eddb93000f0642b3b2d594f4c"
],
"index": "pypi",
"version": "==0.8.0"
"version": "==0.9.0"
},
"mamba": {
"hashes": [
"sha256:63e70a8666039cf143a255000e23f29be4ea4b5b8169f2b053f94eb73a2ea9e2"
"sha256:25328151ea94d97a0b461d7256dc7350c99b5f8d2de22d355978378edfeac545"
],
"index": "pypi",
"version": "==0.9.3"
"version": "==0.10"
},
"playbooks": {
"path": "."

View File

@ -33,7 +33,7 @@ trigger.
* -p: The playbook to deploy, it must match with the top-level script. In this
example *slack.py* that contains the wiring between playbooks and Kubeless
functions
functions.
* -e: Sets configuration settings for Playbook. In this case the URL where we
have to post messages. You can specify multiple *-e* flags.
@ -214,3 +214,15 @@ This playbook creates a container in Phantom
* VERIFY_SSL: Verify SSL certificates for HTTPS requests. By default is enabled.
In this example, when Falco raises any kind of alert, the alert will be created in Phantom.
## Deploying playbooks to AWS Lambda
You can deploy functions to AWS Lambda using the `./deploy_playbook_aws` script.
### Parameters
* -p: The playbook to deploy, it must match with the top-level script.
* -e: Sets configuration settings for Playbook. You can specify multiple *-e* flags.
* -k: EKS cluster name against playbook is going to connect via K8s API.

View File

@ -16,7 +16,7 @@ You must pass the playbook and at least one topic to subscribe.
Example:
deploy_playbook -r slack -t "falco.error.*" -e SLACK_WEBHOOK_URL=http://foobar.com/...
deploy_playbook -p slack -t "falco.error.*" -e SLACK_WEBHOOK_URL=http://foobar.com/...
EOF
exit 1
}
@ -54,7 +54,22 @@ fi
pipenv lock --requirements | sed '/^-/ d' > requirements.txt
zip "${playbook}".zip -r playbooks/*.py "${playbook}".py
mkdir -p kubeless-function
cp -r playbooks kubeless-function/
cat > kubeless-function/"${playbook}".py <<EOL
import sys
import os.path
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__))))
EOL
cat functions/"${playbook}".py >> kubeless-function/"${playbook}".py
cd kubeless-function
zip ../"${playbook}".zip -r *
cd ..
kubeless function deploy --from-file "${playbook}".zip \
--dependencies requirements.txt \
@ -63,7 +78,7 @@ kubeless function deploy --from-file "${playbook}".zip \
--handler "${playbook}".handler \
falco-"${playbook}"
rm requirements.txt ${playbook}.zip
rm -fr requirements.txt ${playbook}.zip kubeless-function
for index in ${!topics[*]}; do
kubeless trigger nats create falco-"${playbook}"-trigger-"${index}" \

View File

@ -0,0 +1,76 @@
#!/bin/bash
#
# Deploys a playbook
set -e
function usage() {
cat<<EOF
Usage: $0 [options]
-p playbook Playbook to be deployed. Is the script for Kubeless: slack, taint, isolate.
-e environment Environment variables for the Kubeless function. You can pass multiple environment variables passing several -e parameters.
-k kubernetes_cluster Kubernetes cluster from aws eks list-clusters where function will be applied.
You must pass the playbook and at least one topic to subscribe.
Example:
deploy_playbook -p slack -t "falco.error.*" -e SLACK_WEBHOOK_URL=http://foobar.com/... -k sysdig_eks
EOF
exit 1
}
function join { local IFS="$1"; shift; echo "$*"; }
playbook=""
environment=("KUBECONFIG=kubeconfig" "KUBERNETES_LOAD_KUBE_CONFIG=1")
eks_cluster="${EKS_CLUSTER}"
while getopts "r:e:t:" arg; do
case $arg in
p)
playbook="${OPTARG}"
;;
e)
environment+=("${OPTARG}")
;;
k)
eks_cluster="${OPTARG}"
;;
*)
usage
;;
esac
done
if [[ "${playbook}" == "" ]] || [[ "${eks_cluster}" == "" ]]; then
usage
fi
pipenv lock --requirements | sed '/^-/ d' > requirements.txt
mkdir -p lambda
pip install -t lambda -r requirements.txt
pip install -t lambda .
aws eks update-kubeconfig --name "${eks_cluster}" --kubeconfig lambda/kubeconfig
sed -i "s/command: aws-iam-authenticator/command: .\/aws-iam-authenticator/g" lambda/kubeconfig
cp extra/aws-iam-authenticator lambda/
cp functions/"${playbook}".py lambda/
cd lambda
zip ../"${playbook}".zip -r *
cd ..
aws lambda create-function \
--function-name falco-"${playbook}" \
--runtime python2.7 \
--role $(terraform output --state=../deployment/aws/terraform.tfstate iam_for_lambda) \
--environment Variables={"$(join , ${environment[*]})"} \
--handler "${playbook}".handler \
--zip-file fileb://./"${playbook}".zip
rm -fr "${playbook}".zip lambda requirements.txt

View File

@ -1,8 +1,3 @@
import sys
import os.path
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__))))
import os
import playbooks
from playbooks import infrastructure
@ -13,4 +8,4 @@ playbook = playbooks.DeletePod(
def handler(event, context):
playbook.run(event['data'])
playbook.run(playbooks.falco_alert(event))

View File

@ -1,8 +1,3 @@
import sys
import os.path
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__))))
import os
import playbooks
from playbooks import infrastructure
@ -13,4 +8,4 @@ playbook = playbooks.NetworkIsolatePod(
def handler(event, context):
playbook.run(event['data'])
playbook.run(playbooks.falco_alert(event))

View File

@ -1,7 +1,3 @@
import sys
import os.path
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__))))
import os
import playbooks
from playbooks import infrastructure
@ -13,4 +9,4 @@ playbook = playbooks.AddMessageToSlack(
def handler(event, context):
playbook.run(event['data'])
playbook.run(playbooks.falco_alert(event))

View File

@ -1,7 +1,3 @@
import sys
import os.path
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__))))
import os
import playbooks
from playbooks import infrastructure
@ -16,4 +12,4 @@ playbook = playbooks.TaintNode(
def handler(event, context):
playbook.run(event['data'])
playbook.run(playbooks.falco_alert(event))

View File

@ -1,3 +1,4 @@
import json
import maya
@ -197,3 +198,13 @@ class CreateContainerInPhantom(object):
'Informational': 'low',
'Debug': 'low',
}
def falco_alert(event):
if 'data' in event:
return event['data']
if 'Records' in event:
return json.loads(event['Records'][0]['Sns']['Message'])
return event