Compare commits

..

3 Commits

Author SHA1 Message Date
Mark Stemm
0e3121b17c Embed .lua files into falco executable
Instead of having .lua files external to the program responsible for
loading rules, embed the contents of those files into the executable
and load them as strings instead of as files:

Add a cmake custom command below userspace/engine/lua that calls a
bash script lua-to-cpp.sh to generate falco_engine_lua_files.{cpp,hh}
that are compiled into the falco engine library.

The script creates a .cpp file that has const char * symbols for each
file, as well as lists of files that should be loaded when the falco
engine is loaded. There are actually two lists:

- lua_module_strings: these are loaded and also added to the lua
  runtime package.preload table, so they are available when lua code
  require()s them.

- lua_code_strings: these are loaded *and* evaluated, so the functions
  in them are availble to be called from C++.

This simplifies some of the falco_common methods, as there's no need
to keep track of a "main" lua file to load or paths from which the lua
loader should find files for modules, and there's no need to keep
track of an "alternate" lua directory that occurs for debug builds.

Also, there's no need to include any .lua files in the installed
packages, as they're built into the falco binary.

Signed-off-by: Mark Stemm <mark.stemm@gmail.com>
2022-01-12 12:14:47 -08:00
Mark Stemm
b05b252100 Clean up lyaml build a bit
change LYAML_SRC to LYAML_ROOT, which points to the top source
directory now.

LYAML_LIB and (new) LYAML_LUA_DIR are based relative to that
directory.

There's no install step at all now--the static library and the .lua
files are now used directly from the source tree.

Signed-off-by: Mark Stemm <mark.stemm@gmail.com>
2022-01-12 12:13:06 -08:00
Mark Stemm
2df9a68140 Move compiler/parser lua files to a "modules" subdir
This will distinguish it from rule_loader.lua, which is *not* a module
but lua code with functions that can be called directly.

Signed-off-by: Mark Stemm <mark.stemm@gmail.com>
2022-01-12 09:59:42 -08:00
12 changed files with 140 additions and 164 deletions

View File

@@ -392,8 +392,8 @@ jobs:
- run: - run:
name: Publish bin-dev name: Publish bin-dev
command: | command: |
FALCO_VERSION=$(cat /build/release/userspace/falco/config_falco.h | grep 'FALCO_VERSION ' | cut -d' ' -f3 | sed -e 's/^"//' -e 's/"$//') FALCO_VERSION=$(cat /build-static/release/userspace/falco/config_falco.h | grep 'FALCO_VERSION ' | cut -d' ' -f3 | sed -e 's/^"//' -e 's/"$//')
/source/falco/scripts/publish-bin -f /build/release/falco-${FALCO_VERSION}-x86_64.tar.gz -r bin-dev -a x86_64 /source/falco/scripts/publish-bin -f /build-static/release/falco-${FALCO_VERSION}-x86_64.tar.gz -r bin-dev -a x86_64
"publish/packages-deb-dev": "publish/packages-deb-dev":
docker: docker:
- image: docker.io/debian:stable - image: docker.io/debian:stable
@@ -506,8 +506,8 @@ jobs:
- run: - run:
name: Publish bin name: Publish bin
command: | command: |
FALCO_VERSION=$(cat /build/release/userspace/falco/config_falco.h | grep 'FALCO_VERSION ' | cut -d' ' -f3 | sed -e 's/^"//' -e 's/"$//') FALCO_VERSION=$(cat /build-static/release/userspace/falco/config_falco.h | grep 'FALCO_VERSION ' | cut -d' ' -f3 | sed -e 's/^"//' -e 's/"$//')
/source/falco/scripts/publish-bin -f /build/release/falco-${FALCO_VERSION}-x86_64.tar.gz -r bin -a x86_64 /source/falco/scripts/publish-bin -f /build-static/release/falco-${FALCO_VERSION}-x86_64.tar.gz -r bin -a x86_64
"publish/packages-deb": "publish/packages-deb":
docker: docker:
- image: docker.io/debian:stable - image: docker.io/debian:stable
@@ -711,6 +711,7 @@ workflows:
- falco - falco
- test-infra - test-infra
requires: requires:
- "build/musl"
- "rpm/sign" - "rpm/sign"
filters: filters:
tags: tags:

5
.gitignore vendored
View File

@@ -10,8 +10,11 @@ test/.phoronix-test-suite
test/results*.json.* test/results*.json.*
test/build test/build
userspace/engine/lua/lyaml
userspace/engine/lua/lyaml.lua
.vscode/* .vscode/*
.luacheckcache .luacheckcache
*.idea* *.idea*

View File

@@ -10,4 +10,5 @@ endif()
if(CPACK_GENERATOR MATCHES "TGZ") if(CPACK_GENERATOR MATCHES "TGZ")
set(CPACK_SET_DESTDIR "ON") set(CPACK_SET_DESTDIR "ON")
set(CPACK_STRIP_FILES "OFF")
endif() endif()

View File

@@ -24,8 +24,8 @@ else()
# default below In case you want to test against another falcosecurity/libs version just pass the variable - ie., `cmake # default below In case you want to test against another falcosecurity/libs version just pass the variable - ie., `cmake
# -DFALCOSECURITY_LIBS_VERSION=dev ..` # -DFALCOSECURITY_LIBS_VERSION=dev ..`
if(NOT FALCOSECURITY_LIBS_VERSION) if(NOT FALCOSECURITY_LIBS_VERSION)
set(FALCOSECURITY_LIBS_VERSION "4de7ad2857fb55439eb10455aacd1d262b70551b") set(FALCOSECURITY_LIBS_VERSION "bb9bee8e522fc953c2a79093d688d3d82b925e8b")
set(FALCOSECURITY_LIBS_CHECKSUM "SHA256=3769e410fc0e31d5c7c37f33a7a73dfe52418a850d8f166fbafc67a723c619b6") set(FALCOSECURITY_LIBS_CHECKSUM "SHA256=ab2f18ff9c8d92dd06088ccfa73d4230fce3617613229f5afd839a37c13b0459")
endif() endif()
# cd /path/to/build && cmake /path/to/source # cd /path/to/build && cmake /path/to/source

View File

@@ -13,10 +13,12 @@
include(ExternalProject) include(ExternalProject)
set(PLUGINS_VERSION "0.1.0-rc1-28-g019437e")
ExternalProject_Add( ExternalProject_Add(
cloudtrail-plugin cloudtrail-plugin
URL "https://download.falco.org/plugins/stable/cloudtrail-0.2.0-${CMAKE_HOST_SYSTEM_PROCESSOR}.tar.gz" URL "https://download.falco.org/plugins/dev/cloudtrail-${PLUGINS_VERSION}-${CMAKE_HOST_SYSTEM_PROCESSOR}.tar.gz"
URL_HASH "SHA256=917ebc5c3b1ad78d959372baa73ac2e9b18b38f51e1e42bd0974166dc04a964c" URL_HASH "SHA256=ad9692957c5435238e07d1625e1b247eabe98b85f54de9218367fdd73a6f3f0b"
CONFIGURE_COMMAND "" CONFIGURE_COMMAND ""
BUILD_COMMAND "" BUILD_COMMAND ""
INSTALL_COMMAND "") INSTALL_COMMAND "")
@@ -25,8 +27,8 @@ install(FILES "${PROJECT_BINARY_DIR}/cloudtrail-plugin-prefix/src/cloudtrail-plu
ExternalProject_Add( ExternalProject_Add(
json-plugin json-plugin
URL "https://download.falco.org/plugins/stable/json-0.2.0-${CMAKE_HOST_SYSTEM_PROCESSOR}.tar.gz" URL "https://download.falco.org/plugins/dev/json-${PLUGINS_VERSION}-${CMAKE_HOST_SYSTEM_PROCESSOR}.tar.gz"
URL_HASH "SHA256=250f0b04db7ab08f3bfa5ecd90cc9b39a6992fc2e96b887ed6f319a6ba156fd7" URL_HASH "SHA256=721ea5226b0f623915d0d5c34870589ad33a8ff795b0daa1af72f21a67430077"
CONFIGURE_COMMAND "" CONFIGURE_COMMAND ""
BUILD_COMMAND "" BUILD_COMMAND ""
INSTALL_COMMAND "") INSTALL_COMMAND "")

View File

@@ -20,7 +20,7 @@ RUN curl -L -o falco.tar.gz \
RUN sed -e 's/time_format_iso_8601: false/time_format_iso_8601: true/' < /falco/etc/falco/falco.yaml > /falco/etc/falco/falco.yaml.new \ RUN sed -e 's/time_format_iso_8601: false/time_format_iso_8601: true/' < /falco/etc/falco/falco.yaml > /falco/etc/falco/falco.yaml.new \
&& mv /falco/etc/falco/falco.yaml.new /falco/etc/falco/falco.yaml && mv /falco/etc/falco/falco.yaml.new /falco/etc/falco/falco.yaml
FROM debian:11-slim FROM scratch
LABEL maintainer="cncf-falco-dev@lists.cncf.io" LABEL maintainer="cncf-falco-dev@lists.cncf.io"

View File

@@ -82,7 +82,7 @@ trace_files: !mux
incompat_plugin_rules_version: incompat_plugin_rules_version:
exit_status: 1 exit_status: 1
stderr_contains: "Runtime error: Plugin cloudtrail version .* not compatible with required plugin version 100000.0.0. Exiting." stderr_contains: "Runtime error: Plugin cloudtrail version 0.1.0 not compatible with required plugin version 100000.0.0. Exiting."
conf_file: BUILD_DIR/test/confs/plugins/cloudtrail_json_create_instances.yaml conf_file: BUILD_DIR/test/confs/plugins/cloudtrail_json_create_instances.yaml
rules_file: rules_file:
- rules/plugins/cloudtrail_incompat_plugin_version.yaml - rules/plugins/cloudtrail_incompat_plugin_version.yaml

View File

@@ -136,12 +136,6 @@ void falco_engine::list_fields(std::string &source, bool verbose, bool names_onl
{ {
for(auto &field : fld_class.fields) for(auto &field : fld_class.fields)
{ {
// Skip fields with the EPF_TABLE_ONLY flag.
if(field.tags.find("EPF_TABLE_ONLY") != field.tags.end())
{
continue;
}
printf("%s\n", field.name.c_str()); printf("%s\n", field.name.c_str());
} }
} }

View File

@@ -22,3 +22,4 @@ limitations under the License.
// represents the fields supported by this version of Falco. It's used // represents the fields supported by this version of Falco. It's used
// at build time to detect a changed set of fields. // at build time to detect a changed set of fields.
#define FALCO_FIELDS_CHECKSUM "4de812495f8529ac20bda2b9774462b15911a51df293d59fe9ccb6b922fdeb9d" #define FALCO_FIELDS_CHECKSUM "4de812495f8529ac20bda2b9774462b15911a51df293d59fe9ccb6b922fdeb9d"

View File

@@ -1529,9 +1529,12 @@ void json_event_formatter::set_format(output_format of, const std::string &forma
bool json_event_formatter::tostring_withformat(gen_event *gevt, std::string &output, gen_event_formatter::output_format of) bool json_event_formatter::tostring_withformat(gen_event *gevt, std::string &output, gen_event_formatter::output_format of)
{ {
json_event *ev = static_cast<json_event *>(gevt); json_event *ev = static_cast<json_event *>(gevt);
std::string ret;
if(of == OF_JSON) if(of == OF_JSON)
{ {
output = tojson(ev); ret = tojson(ev);
return true; return true;
} }
else else

View File

@@ -293,20 +293,19 @@ function split_lines(rules_content)
end end
function get_orig_yaml_obj(rules_lines, row) function get_orig_yaml_obj(rules_lines, row)
idx = row local ret = ""
local t = {}
while (idx <= #rules_lines) do
t[#t + 1] = rules_lines[idx]
idx = idx + 1
if idx > #rules_lines or rules_lines[idx] == "" or string.sub(rules_lines[idx], 1, 1) == '-' then idx = row
break while (idx <= #rules_lines) do
end ret = ret..rules_lines[idx].."\n"
end idx = idx + 1
t[#t + 1] = ""
local ret = "" if idx > #rules_lines or rules_lines[idx] == "" or string.sub(rules_lines[idx], 1, 1) == '-' then
ret = table.concat(t, "\n") break
return ret end
end
return ret
end end
function get_lines(rules_lines, row, num_lines) function get_lines(rules_lines, row, num_lines)
@@ -755,69 +754,65 @@ end
-- Populates exfields with all fields used -- Populates exfields with all fields used
function build_exception_condition_string_multi_fields(eitem, exfields) function build_exception_condition_string_multi_fields(eitem, exfields)
local fields = eitem['fields'] local fields = eitem['fields']
local comps = eitem['comps'] local comps = eitem['comps']
local icond = {} local icond = "("
icond[#icond + 1] = "(" for i, values in ipairs(eitem['values']) do
local lcount = 0 if #fields ~= #values then
for i, values in ipairs(eitem['values']) do return nil, "Exception item "..eitem['name']..": fields and values lists must have equal length"
if #fields ~= #values then end
return nil, "Exception item " .. eitem['name'] .. ": fields and values lists must have equal length"
end
if lcount ~= 0 then if icond ~= "(" then
icond[#icond + 1] = " or " icond=icond.." or "
end end
lcount = lcount + 1
icond[#icond + 1] = "(" icond=icond.."("
for k = 1, #fields do for k=1,#fields do
if k > 1 then if k > 1 then
icond[#icond + 1] = " and " icond=icond.." and "
end end
local ival = values[k] local ival = values[k]
local istr = "" local istr = ""
-- If ival is a table, express it as (titem1, titem2, etc) -- If ival is a table, express it as (titem1, titem2, etc)
if type(ival) == "table" then if type(ival) == "table" then
istr = "(" istr = "("
for _, item in ipairs(ival) do for _, item in ipairs(ival) do
if istr ~= "(" then if istr ~= "(" then
istr = istr .. ", " istr = istr..", "
end end
istr = istr .. quote_item(item) istr = istr..quote_item(item)
end end
istr = istr .. ")" istr = istr..")"
else else
-- If the corresponding operator is one that works on lists, possibly add surrounding parentheses. -- If the corresponding operator is one that works on lists, possibly add surrounding parentheses.
if defined_list_comp_operators[comps[k]] then if defined_list_comp_operators[comps[k]] then
istr = paren_item(ival) istr = paren_item(ival)
else else
-- Quote the value if not already quoted -- Quote the value if not already quoted
istr = quote_item(ival) istr = quote_item(ival)
end end
end end
icond[#icond + 1] = fields[k] .. " " .. comps[k] .. " " .. istr icond = icond..fields[k].." "..comps[k].." "..istr
exfields[fields[k]] = true exfields[fields[k]] = true
end end
icond[#icond + 1] = ")" icond=icond..")"
end end
icond[#icond + 1] = ")" icond = icond..")"
-- Don't return a trivially empty condition string -- Don't return a trivially empty condition string
local ret = table.concat(icond) if icond == "()" then
if ret == "()" then icond = ""
return "", nil end
end
return ret, nil return icond, nil
end end

View File

@@ -168,10 +168,6 @@ private:
int nodeIdx = std::stoi(key.substr(i + 1, close_param_idx - i - 1)); int nodeIdx = std::stoi(key.substr(i + 1, close_param_idx - i - 1));
ret.reset(ret[nodeIdx]); ret.reset(ret[nodeIdx]);
i = close_param_idx; i = close_param_idx;
if (i < key.size() - 1 && key[i + 1] == '.')
{
i++;
}
} }
} }
} }
@@ -280,61 +276,49 @@ private:
}; };
namespace YAML { namespace YAML {
template<>
struct convert<nlohmann::json> {
static bool decode(const Node& node, nlohmann::json& res)
{
int int_val;
double double_val;
bool bool_val;
std::string str_val;
nlohmann::json sub{};
switch (node.Type()) {
case YAML::NodeType::Map:
for (auto &&it: node)
{
YAML::convert<nlohmann::json>::decode(it.second, sub);
res[it.first.as<std::string>()] = sub;
}
break;
case YAML::NodeType::Sequence:
for (auto &&it : node)
{
YAML::convert<nlohmann::json>::decode(it, sub);
res.emplace_back(sub);
}
break;
case YAML::NodeType::Scalar:
if (YAML::convert<int>::decode(node, int_val))
{
res = int_val;
}
else if (YAML::convert<double>::decode(node, double_val))
{
res = double_val;
}
else if (YAML::convert<bool>::decode(node, bool_val))
{
res = bool_val;
}
else if (YAML::convert<std::string>::decode(node, str_val))
{
res = str_val;
}
default:
break;
}
return true;
}
};
template<> template<>
struct convert<falco_configuration::plugin_config> { struct convert<falco_configuration::plugin_config> {
// Note that this loses the distinction between init configs static bool read_file_from_key(const Node &node, const std::string &prefix, std::string &value)
// defined as YAML maps or as opaque strings. {
std::string key = prefix;
if(node[key])
{
value = node[key].as<std::string>();
return true;
}
key += "_file";
if(node[key])
{
std::string path = node[key].as<std::string>();
// prepend share dir if path is not absolute
if(path.at(0) != '/')
{
path = string(FALCO_ENGINE_PLUGINS_DIR) + path;
}
// Intentionally letting potential
// exception be thrown, will get
// caught when reading config.
std::ifstream f(path);
std::string str((std::istreambuf_iterator<char>(f)),
std::istreambuf_iterator<char>());
value = str;
return true;
}
return false;
}
// Note that the distinction between
// init_config/init_config_file and
// open_params/open_params_file is lost. But also,
// this class doesn't write yaml config anyway.
static Node encode(const falco_configuration::plugin_config & rhs) { static Node encode(const falco_configuration::plugin_config & rhs) {
Node node; Node node;
node["name"] = rhs.m_name; node["name"] = rhs.m_name;
@@ -354,44 +338,36 @@ namespace YAML {
{ {
return false; return false;
} }
rhs.m_name = node["name"].as<std::string>(); else
{
rhs.m_name = node["name"].as<std::string>();
}
if(!node["library_path"]) if(!node["library_path"])
{ {
return false; return false;
} }
rhs.m_library_path = node["library_path"].as<std::string>(); else
if(rhs.m_library_path.at(0) != '/')
{ {
rhs.m_library_path = node["library_path"].as<std::string>();
// prepend share dir if path is not absolute // prepend share dir if path is not absolute
rhs.m_library_path = string(FALCO_ENGINE_PLUGINS_DIR) + rhs.m_library_path; if(rhs.m_library_path.at(0) != '/')
{
rhs.m_library_path = string(FALCO_ENGINE_PLUGINS_DIR) + rhs.m_library_path;
}
} }
if(!node["init_config"]) if(!read_file_from_key(node, string("init_config"), rhs.m_init_config))
{ {
return false; return false;
} }
// By convention, if the init config is a YAML map we convert it
// in a JSON object string. This is useful for plugins implementing
// the `get_init_schema` API symbol, which right now support the
// JSON Schema specific. If we ever support other schema/data types,
// we may want to bundle the conversion logic in an ad-hoc class.
// The benefit of this is being able of parsing/editing the config as
// a YAML map instead of having an opaque string.
if (node["init_config"].IsMap())
{
nlohmann::json json;
YAML::convert<nlohmann::json>::decode(node["init_config"], json);
rhs.m_init_config = json.dump();
}
else
{
rhs.m_init_config = node["init_config"].as<std::string>();
}
if(node["open_params"]) if(node["open_params"] &&
!read_file_from_key(node, string("open_params"), rhs.m_open_params))
{ {
rhs.m_open_params = node["open_params"].as<std::string>(); return false;
} }
return true; return true;