mirror of
https://github.com/mudler/luet.git
synced 2025-09-03 00:06:36 +00:00
Compare commits
11 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
356350f724 | ||
|
9d2ee1b760 | ||
|
fd12227d53 | ||
|
1e617b0c67 | ||
|
77b49d9c4a | ||
|
4c3532e3c6 | ||
|
f2ec065a89 | ||
|
7193ea03f9 | ||
|
beeb0dcaaa | ||
|
0de3177ddd | ||
|
45c8dfa19f |
@@ -247,11 +247,12 @@ Build packages specifying multiple definition trees:
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, sp := range toCalculate {
|
for _, sp := range toCalculate {
|
||||||
packs, err := luetCompiler.ComputeDepTree(sp)
|
ht := compiler.NewHashTree(generalRecipe.GetDatabase())
|
||||||
|
hashTree, err := ht.Query(luetCompiler, sp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs = append(errs, err)
|
errs = append(errs, err)
|
||||||
}
|
}
|
||||||
for _, p := range packs {
|
for _, p := range hashTree.Dependencies {
|
||||||
results.Packages = append(results.Packages,
|
results.Packages = append(results.Packages,
|
||||||
PackageResult{
|
PackageResult{
|
||||||
Name: p.Package.GetName(),
|
Name: p.Package.GetName(),
|
||||||
|
@@ -40,7 +40,7 @@ var Verbose bool
|
|||||||
var LockedCommands = []string{"install", "uninstall", "upgrade"}
|
var LockedCommands = []string{"install", "uninstall", "upgrade"}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
LuetCLIVersion = "0.14.3"
|
LuetCLIVersion = "0.14.7"
|
||||||
LuetEnvPrefix = "LUET"
|
LuetEnvPrefix = "LUET"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -97,7 +97,7 @@ To build a package, from a tree definition:
|
|||||||
|
|
||||||
plugin := viper.GetStringSlice("plugin")
|
plugin := viper.GetStringSlice("plugin")
|
||||||
|
|
||||||
bus.Manager.Load(plugin...).Register()
|
bus.Manager.Initialize(plugin...)
|
||||||
if len(bus.Manager.Plugins) != 0 {
|
if len(bus.Manager.Plugins) != 0 {
|
||||||
Info(":lollipop:Enabled plugins:")
|
Info(":lollipop:Enabled plugins:")
|
||||||
for _, p := range bus.Manager.Plugins {
|
for _, p := range bus.Manager.Plugins {
|
||||||
|
2
go.mod
2
go.mod
@@ -36,7 +36,7 @@ require (
|
|||||||
github.com/moby/sys/mount v0.2.0 // indirect
|
github.com/moby/sys/mount v0.2.0 // indirect
|
||||||
github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d
|
github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d
|
||||||
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87
|
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87
|
||||||
github.com/mudler/go-pluggable v0.0.0-20210510180427-ba09243a8c65
|
github.com/mudler/go-pluggable v0.0.0-20210513155700-54c6443073af
|
||||||
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290
|
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290
|
||||||
github.com/onsi/ginkgo v1.14.2
|
github.com/onsi/ginkgo v1.14.2
|
||||||
github.com/onsi/gomega v1.10.3
|
github.com/onsi/gomega v1.10.3
|
||||||
|
4
go.sum
4
go.sum
@@ -758,8 +758,8 @@ github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d h1:fKh+rvw
|
|||||||
github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d/go.mod h1:puRUWSwyecW2V355tKncwPVPRAjQBduPsFjG0mrV/Nw=
|
github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d/go.mod h1:puRUWSwyecW2V355tKncwPVPRAjQBduPsFjG0mrV/Nw=
|
||||||
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87 h1:mGz7T8KvmHH0gLWPI5tQne8xl2cO3T8wrrb6Aa16Jxo=
|
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87 h1:mGz7T8KvmHH0gLWPI5tQne8xl2cO3T8wrrb6Aa16Jxo=
|
||||||
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87/go.mod h1:1w4zI1LYXDeiUXqedPcrT5eQJnmKR6dbg5iJMgSIP/Y=
|
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87/go.mod h1:1w4zI1LYXDeiUXqedPcrT5eQJnmKR6dbg5iJMgSIP/Y=
|
||||||
github.com/mudler/go-pluggable v0.0.0-20210510180427-ba09243a8c65 h1:Xmfr2g3QU/Ci1mvLswuOhrzXnJ5OXGqAk/skUD1aOsY=
|
github.com/mudler/go-pluggable v0.0.0-20210513155700-54c6443073af h1:jixIxEgLSqu24eMiyzfCI+roa5IaOUhF546ePSFyHeY=
|
||||||
github.com/mudler/go-pluggable v0.0.0-20210510180427-ba09243a8c65/go.mod h1:WmKcT8ONmhDQIqQ+HxU+tkGWjzBEyY/KFO8LTGCu4AI=
|
github.com/mudler/go-pluggable v0.0.0-20210513155700-54c6443073af/go.mod h1:WmKcT8ONmhDQIqQ+HxU+tkGWjzBEyY/KFO8LTGCu4AI=
|
||||||
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290 h1:426hFyXMpXeqIeGJn2cGAW9ogvM2Jf+Jv23gtVPvBLM=
|
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290 h1:426hFyXMpXeqIeGJn2cGAW9ogvM2Jf+Jv23gtVPvBLM=
|
||||||
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290/go.mod h1:uP5BBgFxq2wNWo7n1vnY5SSbgL0WDshVJrOO12tZ/lA=
|
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290/go.mod h1:uP5BBgFxq2wNWo7n1vnY5SSbgL0WDshVJrOO12tZ/lA=
|
||||||
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
package bus
|
package bus
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
. "github.com/mudler/luet/pkg/logger"
|
||||||
|
|
||||||
"github.com/mudler/go-pluggable"
|
"github.com/mudler/go-pluggable"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -47,21 +49,47 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Manager is the bus instance manager, which subscribes plugins to events emitted by Luet
|
// Manager is the bus instance manager, which subscribes plugins to events emitted by Luet
|
||||||
var Manager *pluggable.Manager = pluggable.NewManager(
|
var Manager *Bus = &Bus{
|
||||||
[]pluggable.EventType{
|
Manager: pluggable.NewManager(
|
||||||
EventPackageInstall,
|
[]pluggable.EventType{
|
||||||
EventPackageUnInstall,
|
EventPackageInstall,
|
||||||
EventPackagePreBuild,
|
EventPackageUnInstall,
|
||||||
EventPackagePreBuildArtifact,
|
EventPackagePreBuild,
|
||||||
EventPackagePostBuildArtifact,
|
EventPackagePreBuildArtifact,
|
||||||
EventPackagePostBuild,
|
EventPackagePostBuildArtifact,
|
||||||
EventRepositoryPreBuild,
|
EventPackagePostBuild,
|
||||||
EventRepositoryPostBuild,
|
EventRepositoryPreBuild,
|
||||||
EventImagePreBuild,
|
EventRepositoryPostBuild,
|
||||||
EventImagePrePull,
|
EventImagePreBuild,
|
||||||
EventImagePrePush,
|
EventImagePrePull,
|
||||||
EventImagePostBuild,
|
EventImagePrePush,
|
||||||
EventImagePostPull,
|
EventImagePostBuild,
|
||||||
EventImagePostPush,
|
EventImagePostPull,
|
||||||
},
|
EventImagePostPush,
|
||||||
)
|
},
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
type Bus struct {
|
||||||
|
*pluggable.Manager
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Bus) Initialize(plugin ...string) {
|
||||||
|
b.Manager.Load(plugin...).Register()
|
||||||
|
|
||||||
|
for _, e := range b.Manager.Events {
|
||||||
|
b.Manager.Response(e, func(p *pluggable.Plugin, r *pluggable.EventResponse) {
|
||||||
|
if r.Errored() {
|
||||||
|
Fatal("Plugin", p.Name, "at", p.Executable, "Error", r.Error)
|
||||||
|
}
|
||||||
|
Debug(
|
||||||
|
"plugin_event",
|
||||||
|
"received from",
|
||||||
|
p.Name,
|
||||||
|
"at",
|
||||||
|
p.Executable,
|
||||||
|
r,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@@ -146,11 +146,6 @@ func (cs *LuetCompiler) CompileParallel(keepPermissions bool, ps *compilerspec.L
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range ps.All() {
|
for _, p := range ps.All() {
|
||||||
asserts, err := cs.ComputeDepTree(p)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
p.SetSourceAssertion(asserts)
|
|
||||||
all <- p
|
all <- p
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -295,17 +290,6 @@ func (cs *LuetCompiler) unpackDelta(concurrency int, keepPermissions bool, p *co
|
|||||||
return artifact, nil
|
return artifact, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cs *LuetCompiler) genBuilderImageTag(p *compilerspec.LuetCompilationSpec, packageImage string) string {
|
|
||||||
// Use packageImage as salt into the fp being used
|
|
||||||
// so the hash is unique also in cases where
|
|
||||||
// some package deps does have completely different
|
|
||||||
// depgraphs
|
|
||||||
// TODO: We should use the image tag, or pass by the package assertion hash which is unique
|
|
||||||
// and identifies the deptree of the package.
|
|
||||||
return fmt.Sprintf("builder-%s", p.GetPackage().HashFingerprint(helpers.StripRegistryFromImage(packageImage)))
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cs *LuetCompiler) buildPackageImage(image, buildertaggedImage, packageImage string,
|
func (cs *LuetCompiler) buildPackageImage(image, buildertaggedImage, packageImage string,
|
||||||
concurrency int, keepPermissions bool,
|
concurrency int, keepPermissions bool,
|
||||||
p *compilerspec.LuetCompilationSpec) (backend.Options, backend.Options, error) {
|
p *compilerspec.LuetCompilationSpec) (backend.Options, backend.Options, error) {
|
||||||
@@ -680,33 +664,7 @@ func (cs *LuetCompiler) FromDatabase(db pkg.PackageDatabase, minimum bool, dst s
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ComputeMinimumCompilableSet strips specs that are eventually compiled by leafs
|
|
||||||
func (cs *LuetCompiler) ComputeMinimumCompilableSet(p ...*compilerspec.LuetCompilationSpec) ([]*compilerspec.LuetCompilationSpec, error) {
|
|
||||||
// Generate a set with all the deps of the provided specs
|
|
||||||
// we will use that set to remove the deps from the list of provided compilation specs
|
|
||||||
allDependencies := solver.PackagesAssertions{} // Get all packages that will be in deps
|
|
||||||
result := []*compilerspec.LuetCompilationSpec{}
|
|
||||||
for _, spec := range p {
|
|
||||||
ass, err := cs.ComputeDepTree(spec)
|
|
||||||
if err != nil {
|
|
||||||
return result, errors.Wrap(err, "computin specs deptree")
|
|
||||||
}
|
|
||||||
|
|
||||||
allDependencies = append(allDependencies, ass.Drop(spec.GetPackage())...)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, spec := range p {
|
|
||||||
if found := allDependencies.Search(spec.GetPackage().GetFingerPrint()); found == nil {
|
|
||||||
result = append(result, spec)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ComputeDepTree computes the dependency tree of a compilation spec and returns solver assertions
|
|
||||||
// in order to be able to compile the spec.
|
|
||||||
func (cs *LuetCompiler) ComputeDepTree(p *compilerspec.LuetCompilationSpec) (solver.PackagesAssertions, error) {
|
func (cs *LuetCompiler) ComputeDepTree(p *compilerspec.LuetCompilationSpec) (solver.PackagesAssertions, error) {
|
||||||
|
|
||||||
s := solver.NewResolver(cs.Options.SolverOptions.Options, pkg.NewInMemoryDatabase(false), cs.Database, pkg.NewInMemoryDatabase(false), cs.Options.SolverOptions.Resolver())
|
s := solver.NewResolver(cs.Options.SolverOptions.Options, pkg.NewInMemoryDatabase(false), cs.Database, pkg.NewInMemoryDatabase(false), cs.Options.SolverOptions.Resolver())
|
||||||
|
|
||||||
solution, err := s.Install(pkg.Packages{p.GetPackage()})
|
solution, err := s.Install(pkg.Packages{p.GetPackage()})
|
||||||
@@ -718,31 +676,34 @@ func (cs *LuetCompiler) ComputeDepTree(p *compilerspec.LuetCompilationSpec) (sol
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "While order a solution for "+p.GetPackage().HumanReadableString())
|
return nil, errors.Wrap(err, "While order a solution for "+p.GetPackage().HumanReadableString())
|
||||||
}
|
}
|
||||||
|
return dependencies, nil
|
||||||
|
}
|
||||||
|
|
||||||
assertions := solver.PackagesAssertions{}
|
// ComputeMinimumCompilableSet strips specs that are eventually compiled by leafs
|
||||||
for _, assertion := range dependencies { //highly dependent on the order
|
func (cs *LuetCompiler) ComputeMinimumCompilableSet(p ...*compilerspec.LuetCompilationSpec) ([]*compilerspec.LuetCompilationSpec, error) {
|
||||||
if assertion.Value {
|
// Generate a set with all the deps of the provided specs
|
||||||
nthsolution := dependencies.Cut(assertion.Package)
|
// we will use that set to remove the deps from the list of provided compilation specs
|
||||||
assertion.Hash = solver.PackageHash{
|
allDependencies := solver.PackagesAssertions{} // Get all packages that will be in deps
|
||||||
BuildHash: nthsolution.HashFrom(assertion.Package),
|
result := []*compilerspec.LuetCompilationSpec{}
|
||||||
PackageHash: nthsolution.AssertionHash(),
|
for _, spec := range p {
|
||||||
}
|
sol, err := cs.ComputeDepTree(spec)
|
||||||
assertion.Package.SetTreeDir(p.Package.GetTreeDir())
|
if err != nil {
|
||||||
assertions = append(assertions, assertion)
|
return nil, errors.Wrap(err, "failed querying hashtree")
|
||||||
|
}
|
||||||
|
allDependencies = append(allDependencies, sol.Drop(spec.GetPackage())...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, spec := range p {
|
||||||
|
if found := allDependencies.Search(spec.GetPackage().GetFingerPrint()); found == nil {
|
||||||
|
result = append(result, spec)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.SetSourceAssertion(assertions)
|
return result, nil
|
||||||
return assertions, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compile is a non-parallel version of CompileParallel. It builds the compilation specs and generates
|
// Compile is a non-parallel version of CompileParallel. It builds the compilation specs and generates
|
||||||
// an artifact
|
// an artifact
|
||||||
func (cs *LuetCompiler) Compile(keepPermissions bool, p *compilerspec.LuetCompilationSpec) (*artifact.PackageArtifact, error) {
|
func (cs *LuetCompiler) Compile(keepPermissions bool, p *compilerspec.LuetCompilationSpec) (*artifact.PackageArtifact, error) {
|
||||||
asserts, err := cs.ComputeDepTree(p)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
p.SetSourceAssertion(asserts)
|
|
||||||
return cs.compile(cs.Options.Concurrency, keepPermissions, p)
|
return cs.compile(cs.Options.Concurrency, keepPermissions, p)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -773,11 +734,6 @@ func (cs *LuetCompiler) inheritSpecBuildOptions(p *compilerspec.LuetCompilationS
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p *compilerspec.LuetCompilationSpec) (*artifact.PackageArtifact, error) {
|
func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p *compilerspec.LuetCompilationSpec) (*artifact.PackageArtifact, error) {
|
||||||
// TODO: Racy, remove it
|
|
||||||
// Inherit build options from compilation specs metadata
|
|
||||||
// orig := cs.Options.PullImageRepository
|
|
||||||
// defer func() { cs.Options.PullImageRepository = orig }()
|
|
||||||
|
|
||||||
Info(":package: Compiling", p.GetPackage().HumanReadableString(), ".... :coffee:")
|
Info(":package: Compiling", p.GetPackage().HumanReadableString(), ".... :coffee:")
|
||||||
|
|
||||||
Debug(fmt.Sprintf("%s: has images %t, empty package: %t", p.GetPackage().HumanReadableString(), p.HasImageSource(), p.EmptyPackage()))
|
Debug(fmt.Sprintf("%s: has images %t, empty package: %t", p.GetPackage().HumanReadableString(), p.HasImageSource(), p.EmptyPackage()))
|
||||||
@@ -789,36 +745,42 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p *compil
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
targetAssertion := p.GetSourceAssertion().Search(p.GetPackage().GetFingerPrint())
|
ht := NewHashTree(cs.Database)
|
||||||
|
|
||||||
|
packageHashTree, err := ht.Query(cs, p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "failed querying hashtree")
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is in order to have the metadata in the yaml
|
||||||
|
p.SetSourceAssertion(packageHashTree.Solution)
|
||||||
|
targetAssertion := packageHashTree.Target
|
||||||
|
|
||||||
bus.Manager.Publish(bus.EventPackagePreBuild, struct {
|
bus.Manager.Publish(bus.EventPackagePreBuild, struct {
|
||||||
CompileSpec *compilerspec.LuetCompilationSpec
|
CompileSpec *compilerspec.LuetCompilationSpec
|
||||||
Assert solver.PackageAssert
|
Assert solver.PackageAssert
|
||||||
|
PackageHashTree *PackageImageHashTree
|
||||||
}{
|
}{
|
||||||
CompileSpec: p,
|
CompileSpec: p,
|
||||||
Assert: *targetAssertion,
|
Assert: *targetAssertion,
|
||||||
|
PackageHashTree: packageHashTree,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Update compilespec build options - it will be then serialized into the compilation metadata file
|
// Update compilespec build options - it will be then serialized into the compilation metadata file
|
||||||
//p.SetBuildOptions(cs.Options)
|
|
||||||
p.BuildOptions.PushImageRepository = cs.Options.PushImageRepository
|
p.BuildOptions.PushImageRepository = cs.Options.PushImageRepository
|
||||||
//p.BuildOptions.BuildValues = cs.Options.BuildValues
|
|
||||||
//p.BuildOptions.BuildValuesFile = cs.Options.BuildValuesFile
|
|
||||||
|
|
||||||
// - If image is set we just generate a plain dockerfile
|
// - If image is set we just generate a plain dockerfile
|
||||||
// Treat last case (easier) first. The image is provided and we just compute a plain dockerfile with the images listed as above
|
// Treat last case (easier) first. The image is provided and we just compute a plain dockerfile with the images listed as above
|
||||||
if p.GetImage() != "" {
|
if p.GetImage() != "" {
|
||||||
return cs.compileWithImage(p.GetImage(), cs.genBuilderImageTag(p, targetAssertion.Hash.PackageHash), targetAssertion.Hash.PackageHash, concurrency, keepPermissions, cs.Options.KeepImg, p, true)
|
return cs.compileWithImage(p.GetImage(), packageHashTree.BuilderImageHash, targetAssertion.Hash.PackageHash, concurrency, keepPermissions, cs.Options.KeepImg, p, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
// - If image is not set, we read a base_image. Then we will build one image from it to kick-off our build based
|
// - If image is not set, we read a base_image. Then we will build one image from it to kick-off our build based
|
||||||
// on how we compute the resolvable tree.
|
// on how we compute the resolvable tree.
|
||||||
// This means to recursively build all the build-images needed to reach that tree part.
|
// This means to recursively build all the build-images needed to reach that tree part.
|
||||||
// - We later on compute an hash used to identify the image, so each similar deptree keeps the same build image.
|
// - We later on compute an hash used to identify the image, so each similar deptree keeps the same build image.
|
||||||
|
dependencies := packageHashTree.Dependencies // at this point we should have a flattened list of deps to build, including all of them (with all constraints propagated already)
|
||||||
dependencies := p.GetSourceAssertion().Drop(p.GetPackage()) // at this point we should have a flattened list of deps to build, including all of them (with all constraints propagated already)
|
departifacts := []*artifact.PackageArtifact{} // TODO: Return this somehow
|
||||||
departifacts := []*artifact.PackageArtifact{} // TODO: Return this somehow
|
|
||||||
var lastHash string
|
|
||||||
depsN := 0
|
depsN := 0
|
||||||
currentN := 0
|
currentN := 0
|
||||||
|
|
||||||
@@ -845,8 +807,6 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p *compil
|
|||||||
Debug("PullImage repos:", compileSpec.BuildOptions.PullImageRepository)
|
Debug("PullImage repos:", compileSpec.BuildOptions.PullImageRepository)
|
||||||
|
|
||||||
compileSpec.SetOutputPath(p.GetOutputPath())
|
compileSpec.SetOutputPath(p.GetOutputPath())
|
||||||
Debug(pkgTag, " :arrow_right_hook: :whale: Builder image from hash", assertion.Hash.BuildHash)
|
|
||||||
Debug(pkgTag, " :arrow_right_hook: :whale: Package image from hash", assertion.Hash.PackageHash)
|
|
||||||
|
|
||||||
bus.Manager.Publish(bus.EventPackagePreBuild, struct {
|
bus.Manager.Publish(bus.EventPackagePreBuild, struct {
|
||||||
CompileSpec *compilerspec.LuetCompilationSpec
|
CompileSpec *compilerspec.LuetCompilationSpec
|
||||||
@@ -856,29 +816,43 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p *compil
|
|||||||
Assert: assertion,
|
Assert: assertion,
|
||||||
})
|
})
|
||||||
|
|
||||||
lastHash = assertion.Hash.PackageHash
|
buildHash, err := packageHashTree.DependencyBuildImage(assertion.Package)
|
||||||
// for the source instead, pick an image and a buildertaggedImage from hashes if they exists.
|
if err != nil {
|
||||||
// otherways fallback to the pushed repo
|
return nil, errors.Wrap(err, "failed looking for dependency in hashtree")
|
||||||
// Resolve images from the hashtree
|
|
||||||
resolvedBuildImage := cs.resolveExistingImageHash(assertion.Hash.BuildHash, compileSpec)
|
|
||||||
if compileSpec.GetImage() != "" {
|
|
||||||
Debug(pkgTag, " :wrench: Compiling "+compileSpec.GetPackage().HumanReadableString()+" from image")
|
|
||||||
|
|
||||||
a, err := cs.compileWithImage(compileSpec.GetImage(), assertion.Hash.BuildHash, assertion.Hash.PackageHash, concurrency, keepPermissions, cs.Options.KeepImg, compileSpec, packageDeps)
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.Wrap(err, "Failed compiling "+compileSpec.GetPackage().HumanReadableString())
|
|
||||||
}
|
|
||||||
departifacts = append(departifacts, a)
|
|
||||||
Info(pkgTag, ":white_check_mark: Done")
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Debug(pkgTag, " :wrench: Compiling "+compileSpec.GetPackage().HumanReadableString()+" from tree")
|
Debug(pkgTag, " :arrow_right_hook: :whale: Builder image from hash", assertion.Hash.BuildHash)
|
||||||
a, err := cs.compileWithImage(resolvedBuildImage, cs.genBuilderImageTag(compileSpec, targetAssertion.Hash.PackageHash), assertion.Hash.PackageHash, concurrency, keepPermissions, cs.Options.KeepImg, compileSpec, packageDeps)
|
Debug(pkgTag, " :arrow_right_hook: :whale: Package image from hash", assertion.Hash.PackageHash)
|
||||||
|
|
||||||
|
var sourceImage string
|
||||||
|
|
||||||
|
if compileSpec.GetImage() != "" {
|
||||||
|
Debug(pkgTag, " :wrench: Compiling "+compileSpec.GetPackage().HumanReadableString()+" from image")
|
||||||
|
sourceImage = compileSpec.GetImage()
|
||||||
|
} else {
|
||||||
|
// for the source instead, pick an image and a buildertaggedImage from hashes if they exists.
|
||||||
|
// otherways fallback to the pushed repo
|
||||||
|
// Resolve images from the hashtree
|
||||||
|
sourceImage = cs.resolveExistingImageHash(assertion.Hash.BuildHash, compileSpec)
|
||||||
|
Debug(pkgTag, " :wrench: Compiling "+compileSpec.GetPackage().HumanReadableString()+" from tree")
|
||||||
|
}
|
||||||
|
|
||||||
|
a, err := cs.compileWithImage(
|
||||||
|
sourceImage,
|
||||||
|
buildHash,
|
||||||
|
assertion.Hash.PackageHash,
|
||||||
|
concurrency,
|
||||||
|
keepPermissions,
|
||||||
|
cs.Options.KeepImg,
|
||||||
|
compileSpec,
|
||||||
|
packageDeps,
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "Failed compiling "+compileSpec.GetPackage().HumanReadableString())
|
return nil, errors.Wrap(err, "Failed compiling "+compileSpec.GetPackage().HumanReadableString())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Info(pkgTag, ":white_check_mark: Done")
|
||||||
|
|
||||||
bus.Manager.Publish(bus.EventPackagePostBuild, struct {
|
bus.Manager.Publish(bus.EventPackagePostBuild, struct {
|
||||||
CompileSpec *compilerspec.LuetCompilationSpec
|
CompileSpec *compilerspec.LuetCompilationSpec
|
||||||
Artifact *artifact.PackageArtifact
|
Artifact *artifact.PackageArtifact
|
||||||
@@ -888,18 +862,14 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p *compil
|
|||||||
})
|
})
|
||||||
|
|
||||||
departifacts = append(departifacts, a)
|
departifacts = append(departifacts, a)
|
||||||
Info(pkgTag, ":white_check_mark: Done")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if len(dependencies) > 0 {
|
|
||||||
lastHash = dependencies[len(dependencies)-1].Hash.PackageHash
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if buildTarget {
|
if buildTarget {
|
||||||
resolvedBuildImage := cs.resolveExistingImageHash(lastHash, p)
|
resolvedSourceImage := cs.resolveExistingImageHash(packageHashTree.SourceHash, p)
|
||||||
Info(":rocket: All dependencies are satisfied, building package requested by the user", p.GetPackage().HumanReadableString())
|
Info(":rocket: All dependencies are satisfied, building package requested by the user", p.GetPackage().HumanReadableString())
|
||||||
Info(":package:", p.GetPackage().HumanReadableString(), " Using image: ", resolvedBuildImage)
|
Info(":package:", p.GetPackage().HumanReadableString(), " Using image: ", resolvedSourceImage)
|
||||||
a, err := cs.compileWithImage(resolvedBuildImage, cs.genBuilderImageTag(p, targetAssertion.Hash.PackageHash), targetAssertion.Hash.PackageHash, concurrency, keepPermissions, cs.Options.KeepImg, p, true)
|
a, err := cs.compileWithImage(resolvedSourceImage, packageHashTree.BuilderImageHash, targetAssertion.Hash.PackageHash, concurrency, keepPermissions, cs.Options.KeepImg, p, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return a, err
|
return a, err
|
||||||
}
|
}
|
||||||
|
126
pkg/compiler/imagehashtree.go
Normal file
126
pkg/compiler/imagehashtree.go
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
// Copyright © 2021 Ettore Di Giacinto <mudler@mocaccino.org>
|
||||||
|
//
|
||||||
|
// This program is free software; you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation; either version 2 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License along
|
||||||
|
// with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
compilerspec "github.com/mudler/luet/pkg/compiler/types/spec"
|
||||||
|
"github.com/mudler/luet/pkg/config"
|
||||||
|
pkg "github.com/mudler/luet/pkg/package"
|
||||||
|
"github.com/mudler/luet/pkg/solver"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ImageHashTree struct {
|
||||||
|
Database pkg.PackageDatabase
|
||||||
|
SolverOptions config.LuetSolverOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
type PackageImageHashTree struct {
|
||||||
|
Target *solver.PackageAssert
|
||||||
|
Dependencies solver.PackagesAssertions
|
||||||
|
Solution solver.PackagesAssertions
|
||||||
|
dependencyBuilderImageHashes map[string]string
|
||||||
|
SourceHash string
|
||||||
|
BuilderImageHash string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewHashTree(db pkg.PackageDatabase) *ImageHashTree {
|
||||||
|
return &ImageHashTree{
|
||||||
|
Database: db,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ht *PackageImageHashTree) DependencyBuildImage(p pkg.Package) (string, error) {
|
||||||
|
found, ok := ht.dependencyBuilderImageHashes[p.GetFingerPrint()]
|
||||||
|
if !ok {
|
||||||
|
return "", errors.New("package hash not found")
|
||||||
|
}
|
||||||
|
return found, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: ___ When computing the hash per package (and evaluating the sat solver solution tree part)
|
||||||
|
// we should use the hash of each package + its fingerprint instead as a salt.
|
||||||
|
// That's because the hash will be salted with its `build.yaml`.
|
||||||
|
// In this way, we trigger recompilations if some dep of a target changes
|
||||||
|
// a build.yaml, without touching the version
|
||||||
|
func (ht *ImageHashTree) Query(cs *LuetCompiler, p *compilerspec.LuetCompilationSpec) (*PackageImageHashTree, error) {
|
||||||
|
assertions, err := ht.resolve(cs, p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
targetAssertion := assertions.Search(p.GetPackage().GetFingerPrint())
|
||||||
|
|
||||||
|
dependencies := assertions.Drop(p.GetPackage())
|
||||||
|
var sourceHash string
|
||||||
|
imageHashes := map[string]string{}
|
||||||
|
for _, assertion := range dependencies {
|
||||||
|
var depbuildImageTag string
|
||||||
|
compileSpec, err := cs.FromPackage(assertion.Package)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "Error while generating compilespec for "+assertion.Package.GetName())
|
||||||
|
}
|
||||||
|
if compileSpec.GetImage() != "" {
|
||||||
|
depbuildImageTag = assertion.Hash.BuildHash
|
||||||
|
} else {
|
||||||
|
depbuildImageTag = ht.genBuilderImageTag(compileSpec, targetAssertion.Hash.PackageHash)
|
||||||
|
}
|
||||||
|
imageHashes[assertion.Package.GetFingerPrint()] = depbuildImageTag
|
||||||
|
sourceHash = assertion.Hash.PackageHash
|
||||||
|
}
|
||||||
|
|
||||||
|
return &PackageImageHashTree{
|
||||||
|
Dependencies: dependencies,
|
||||||
|
Target: targetAssertion,
|
||||||
|
SourceHash: sourceHash,
|
||||||
|
BuilderImageHash: ht.genBuilderImageTag(p, targetAssertion.Hash.PackageHash),
|
||||||
|
dependencyBuilderImageHashes: imageHashes,
|
||||||
|
Solution: assertions,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ht *ImageHashTree) genBuilderImageTag(p *compilerspec.LuetCompilationSpec, packageImage string) string {
|
||||||
|
// Use packageImage as salt into the fp being used
|
||||||
|
// so the hash is unique also in cases where
|
||||||
|
// some package deps does have completely different
|
||||||
|
// depgraphs
|
||||||
|
return fmt.Sprintf("builder-%s", p.GetPackage().HashFingerprint(packageImage))
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolve computes the dependency tree of a compilation spec and returns solver assertions
|
||||||
|
// in order to be able to compile the spec.
|
||||||
|
func (ht *ImageHashTree) resolve(cs *LuetCompiler, p *compilerspec.LuetCompilationSpec) (solver.PackagesAssertions, error) {
|
||||||
|
dependencies, err := cs.ComputeDepTree(p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "While computing a solution for "+p.GetPackage().HumanReadableString())
|
||||||
|
}
|
||||||
|
|
||||||
|
assertions := solver.PackagesAssertions{}
|
||||||
|
for _, assertion := range dependencies { //highly dependent on the order
|
||||||
|
if assertion.Value {
|
||||||
|
nthsolution := dependencies.Cut(assertion.Package)
|
||||||
|
assertion.Hash = solver.PackageHash{
|
||||||
|
BuildHash: nthsolution.HashFrom(assertion.Package),
|
||||||
|
PackageHash: nthsolution.AssertionHash(),
|
||||||
|
}
|
||||||
|
assertion.Package.SetTreeDir(p.Package.GetTreeDir())
|
||||||
|
assertions = append(assertions, assertion)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return assertions, nil
|
||||||
|
}
|
94
pkg/compiler/imagehashtree_test.go
Normal file
94
pkg/compiler/imagehashtree_test.go
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
// Copyright © 2021 Ettore Di Giacinto <mudler@mocaccino.org>
|
||||||
|
//
|
||||||
|
// This program is free software; you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation; either version 2 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License along
|
||||||
|
// with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package compiler_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/mudler/luet/pkg/compiler"
|
||||||
|
sd "github.com/mudler/luet/pkg/compiler/backend"
|
||||||
|
"github.com/mudler/luet/pkg/compiler/types/options"
|
||||||
|
pkg "github.com/mudler/luet/pkg/package"
|
||||||
|
"github.com/mudler/luet/pkg/tree"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("ImageHashTree", func() {
|
||||||
|
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
|
||||||
|
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), options.Concurrency(2))
|
||||||
|
hashtree := NewHashTree(generalRecipe.GetDatabase())
|
||||||
|
Context("Simple package definition", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
generalRecipe = tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
|
||||||
|
err := generalRecipe.Load("../../tests/fixtures/buildable")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
compiler = NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), options.Concurrency(2))
|
||||||
|
hashtree = NewHashTree(generalRecipe.GetDatabase())
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("Calculates the hash correctly", func() {
|
||||||
|
|
||||||
|
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
packageHash, err := hashtree.Query(compiler, spec)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(packageHash.Target.Hash.BuildHash).To(Equal("6490e800fe443b99328fc363529aee74bda513930fb27ce6ab814d692bba068e"))
|
||||||
|
Expect(packageHash.Target.Hash.PackageHash).To(Equal("79d7107d13d578b362e6a7bf10ec850efce26316405b8d732ce8f9e004d64281"))
|
||||||
|
Expect(packageHash.BuilderImageHash).To(Equal("builder-79462b60bf899ad79db63f194a3c9c2a"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("complex package definition", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
generalRecipe = tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
|
||||||
|
|
||||||
|
err := generalRecipe.Load("../../tests/fixtures/upgrade_old_repo_revision")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
compiler = NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), options.Concurrency(2))
|
||||||
|
hashtree = NewHashTree(generalRecipe.GetDatabase())
|
||||||
|
|
||||||
|
})
|
||||||
|
It("Calculates the hash correctly", func() {
|
||||||
|
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"})
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
packageHash, err := hashtree.Query(compiler, spec)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
Expect(packageHash.Dependencies[len(packageHash.Dependencies)-1].Hash.PackageHash).To(Equal("c46e653125d71ee3fd696b3941ec1ed6e8a0268f896204c7a222a5aa03eb9982"))
|
||||||
|
Expect(packageHash.SourceHash).To(Equal("c46e653125d71ee3fd696b3941ec1ed6e8a0268f896204c7a222a5aa03eb9982"))
|
||||||
|
Expect(packageHash.BuilderImageHash).To(Equal("builder-37f4d05ba8a39525742ca364f69b4090"))
|
||||||
|
|
||||||
|
//Expect(packageHash.Target.Hash.BuildHash).To(Equal("79d7107d13d578b362e6a7bf10ec850efce26316405b8d732ce8f9e004d64281"))
|
||||||
|
Expect(packageHash.Target.Hash.PackageHash).To(Equal("bb1d9a99c0c309a297c75b436504e664a42121fadbb4e035bda403cd418117aa"))
|
||||||
|
a := &pkg.DefaultPackage{Name: "a", Category: "test", Version: "1.1"}
|
||||||
|
hash, err := packageHash.DependencyBuildImage(a)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(hash).To(Equal("79d7107d13d578b362e6a7bf10ec850efce26316405b8d732ce8f9e004d64281"))
|
||||||
|
|
||||||
|
assertionA := packageHash.Dependencies.Search(a.GetFingerPrint())
|
||||||
|
Expect(assertionA.Hash.PackageHash).To(Equal("c46e653125d71ee3fd696b3941ec1ed6e8a0268f896204c7a222a5aa03eb9982"))
|
||||||
|
b := &pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}
|
||||||
|
assertionB := packageHash.Dependencies.Search(b.GetFingerPrint())
|
||||||
|
Expect(assertionB.Hash.PackageHash).To(Equal("79d7107d13d578b362e6a7bf10ec850efce26316405b8d732ce8f9e004d64281"))
|
||||||
|
hashB, err := packageHash.DependencyBuildImage(b)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(hashB).To(Equal("6490e800fe443b99328fc363529aee74bda513930fb27ce6ab814d692bba068e"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
@@ -19,6 +19,8 @@ import (
|
|||||||
"archive/tar"
|
"archive/tar"
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"crypto/sha1"
|
||||||
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
@@ -316,7 +318,6 @@ func (a *PackageArtifact) Compress(src string, concurrency int) error {
|
|||||||
default:
|
default:
|
||||||
return helpers.Tar(src, a.getCompressedName())
|
return helpers.Tar(src, a.getCompressedName())
|
||||||
}
|
}
|
||||||
return errors.New("Compression type must be supplied")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *PackageArtifact) getCompressedName() string {
|
func (a *PackageArtifact) getCompressedName() string {
|
||||||
@@ -339,6 +340,28 @@ func (a *PackageArtifact) GetUncompressedName() string {
|
|||||||
return a.Path
|
return a.Path
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func hashContent(bv []byte) string {
|
||||||
|
hasher := sha1.New()
|
||||||
|
hasher.Write(bv)
|
||||||
|
sha := base64.URLEncoding.EncodeToString(hasher.Sum(nil))
|
||||||
|
return sha
|
||||||
|
}
|
||||||
|
|
||||||
|
func hashFileContent(path string) (string, error) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
h := sha1.New()
|
||||||
|
if _, err := io.Copy(h, f); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return base64.URLEncoding.EncodeToString(h.Sum(nil)), nil
|
||||||
|
}
|
||||||
|
|
||||||
func tarModifierWrapperFunc(dst, path string, header *tar.Header, content io.Reader) (*tar.Header, []byte, error) {
|
func tarModifierWrapperFunc(dst, path string, header *tar.Header, content io.Reader) (*tar.Header, []byte, error) {
|
||||||
// If the destination path already exists I rename target file name with postfix.
|
// If the destination path already exists I rename target file name with postfix.
|
||||||
var destPath string
|
var destPath string
|
||||||
@@ -350,6 +373,7 @@ func tarModifierWrapperFunc(dst, path string, header *tar.Header, content io.Rea
|
|||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
tarHash := hashContent(buffer.Bytes())
|
||||||
|
|
||||||
// If file is not present on archive but is defined on mods
|
// If file is not present on archive but is defined on mods
|
||||||
// I receive the callback. Prevent nil exception.
|
// I receive the callback. Prevent nil exception.
|
||||||
@@ -362,8 +386,21 @@ func tarModifierWrapperFunc(dst, path string, header *tar.Header, content io.Rea
|
|||||||
return header, buffer.Bytes(), nil
|
return header, buffer.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
existingHash := ""
|
||||||
|
f, err := os.Lstat(destPath)
|
||||||
|
if err == nil {
|
||||||
|
Debug("File exists already, computing hash for", destPath)
|
||||||
|
hash, herr := hashFileContent(destPath)
|
||||||
|
if herr == nil {
|
||||||
|
existingHash = hash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Debug("Existing file hash: ", existingHash, "Tar file hashsum: ", tarHash)
|
||||||
|
// We want to protect file only if the hash of the files are differing OR the file size are
|
||||||
|
differs := (existingHash != "" && existingHash != tarHash) || (err != nil && f != nil && header.Size != f.Size())
|
||||||
// Check if exists
|
// Check if exists
|
||||||
if helpers.Exists(destPath) {
|
if helpers.Exists(destPath) && differs {
|
||||||
for i := 1; i < 1000; i++ {
|
for i := 1; i < 1000; i++ {
|
||||||
name := filepath.Join(filepath.Join(filepath.Dir(path),
|
name := filepath.Join(filepath.Join(filepath.Dir(path),
|
||||||
fmt.Sprintf("._cfg%04d_%s", i, filepath.Base(path))))
|
fmt.Sprintf("._cfg%04d_%s", i, filepath.Base(path))))
|
||||||
|
@@ -16,6 +16,7 @@
|
|||||||
package compilerspec
|
package compilerspec
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
@@ -85,6 +86,13 @@ func (specs *LuetCompilationspecs) Unique() *LuetCompilationspecs {
|
|||||||
return &newSpecs
|
return &newSpecs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type CopyField struct {
|
||||||
|
Package *pkg.DefaultPackage `json:"package"`
|
||||||
|
Image string `json:"image"`
|
||||||
|
Source string `json:"src"`
|
||||||
|
Destination string `json:"dst"`
|
||||||
|
}
|
||||||
|
|
||||||
type LuetCompilationSpec struct {
|
type LuetCompilationSpec struct {
|
||||||
Steps []string `json:"steps"` // Are run inside a container and the result layer diff is saved
|
Steps []string `json:"steps"` // Are run inside a container and the result layer diff is saved
|
||||||
Env []string `json:"env"`
|
Env []string `json:"env"`
|
||||||
@@ -103,6 +111,8 @@ type LuetCompilationSpec struct {
|
|||||||
Excludes []string `json:"excludes"`
|
Excludes []string `json:"excludes"`
|
||||||
|
|
||||||
BuildOptions *options.Compiler `json:"build_options"`
|
BuildOptions *options.Compiler `json:"build_options"`
|
||||||
|
|
||||||
|
Copy []CopyField `json:"copy"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLuetCompilationSpec(b []byte, p pkg.Package) (*LuetCompilationSpec, error) {
|
func NewLuetCompilationSpec(b []byte, p pkg.Package) (*LuetCompilationSpec, error) {
|
||||||
@@ -256,6 +266,12 @@ ADD ` + s + ` /luetbuild/`
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, c := range cs.Copy {
|
||||||
|
if c.Image != "" {
|
||||||
|
spec = spec + fmt.Sprintf("\nCOPY --from=%s %s %s\n", c.Image, c.Source, c.Destination)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for _, s := range cs.Env {
|
for _, s := range cs.Env {
|
||||||
spec = spec + `
|
spec = spec + `
|
||||||
ENV ` + s
|
ENV ` + s
|
||||||
|
@@ -90,8 +90,7 @@ func UntarProtect(src, dst string, sameOwner bool, protectedFiles []string, modi
|
|||||||
}
|
}
|
||||||
|
|
||||||
if sameOwner {
|
if sameOwner {
|
||||||
// PRE: i have root privileged.
|
// we do have root permissions, so we can extract keeping the same permissions.
|
||||||
|
|
||||||
replacerArchive := archive.ReplaceFileTarWrapper(in, mods)
|
replacerArchive := archive.ReplaceFileTarWrapper(in, mods)
|
||||||
|
|
||||||
opts := &archive.TarOptions{
|
opts := &archive.TarOptions{
|
||||||
|
4
tests/fixtures/plugin/test-foo
vendored
4
tests/fixtures/plugin/test-foo
vendored
@@ -1,3 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
echo "$1" >> $EVENT_FILE
|
echo "$1" >> $EVENT_FILE
|
||||||
echo "$2" >> $PAYLOAD_FILE
|
echo "$2" >> $PAYLOAD_FILE
|
||||||
|
|
||||||
|
echo "{}"
|
107
tests/integration/12_config_protect_samefile.sh
Executable file
107
tests/integration/12_config_protect_samefile.sh
Executable file
@@ -0,0 +1,107 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
export LUET_NOLOCK=true
|
||||||
|
|
||||||
|
oneTimeSetUp() {
|
||||||
|
export tmpdir="$(mktemp -d)"
|
||||||
|
}
|
||||||
|
|
||||||
|
oneTimeTearDown() {
|
||||||
|
rm -rf "$tmpdir"
|
||||||
|
}
|
||||||
|
|
||||||
|
testBuild() {
|
||||||
|
mkdir $tmpdir/testrootfs/testbuild -p
|
||||||
|
luet build --tree "$ROOT_DIR/tests/fixtures/config_protect" \
|
||||||
|
--destination $tmpdir/testrootfs/testbuild --compression gzip test/a
|
||||||
|
buildst=$?
|
||||||
|
assertEquals 'builds successfully' "$buildst" "0"
|
||||||
|
assertTrue 'create package' "[ -e '$tmpdir/testrootfs/testbuild/a-test-1.0.package.tar.gz' ]"
|
||||||
|
}
|
||||||
|
|
||||||
|
testRepo() {
|
||||||
|
assertTrue 'no repository' "[ ! -e '$tmpdir/testbuild/repository.yaml' ]"
|
||||||
|
luet create-repo --tree "$ROOT_DIR/tests/fixtures/config_protect" \
|
||||||
|
--output $tmpdir/testrootfs/testbuild \
|
||||||
|
--packages $tmpdir/testrootfs/testbuild \
|
||||||
|
--name "test" \
|
||||||
|
--descr "Test Repo" \
|
||||||
|
--urls $tmpdir/testrootfs \
|
||||||
|
--type disk > /dev/null
|
||||||
|
|
||||||
|
createst=$?
|
||||||
|
assertEquals 'create repo successfully' "$createst" "0"
|
||||||
|
assertTrue 'create repository' "[ -e '$tmpdir/testrootfs/testbuild/repository.yaml' ]"
|
||||||
|
}
|
||||||
|
|
||||||
|
testConfig() {
|
||||||
|
|
||||||
|
mkdir $tmpdir/testrootfs/etc/luet/config.protect.d -p
|
||||||
|
|
||||||
|
cat <<EOF > $tmpdir/testrootfs/etc/luet/config.protect.d/conf1.yml
|
||||||
|
name: "protect1"
|
||||||
|
dirs:
|
||||||
|
- /etc/
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<EOF > $tmpdir/luet.yaml
|
||||||
|
general:
|
||||||
|
debug: true
|
||||||
|
system:
|
||||||
|
rootfs: $tmpdir/testrootfs
|
||||||
|
database_path: "/"
|
||||||
|
database_engine: "boltdb"
|
||||||
|
config_protect_confdir:
|
||||||
|
- /etc/luet/config.protect.d
|
||||||
|
config_from_host: false
|
||||||
|
repositories:
|
||||||
|
- name: "main"
|
||||||
|
type: "disk"
|
||||||
|
enable: true
|
||||||
|
urls:
|
||||||
|
- "/testbuild"
|
||||||
|
EOF
|
||||||
|
luet config --config $tmpdir/luet.yaml
|
||||||
|
res=$?
|
||||||
|
assertEquals 'config test successfully' "$res" "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
testInstall() {
|
||||||
|
|
||||||
|
# Simulate previous installation
|
||||||
|
mkdir $tmpdir/testrootfs/etc/a -p
|
||||||
|
echo config > $tmpdir/testrootfs/etc/a/conf
|
||||||
|
|
||||||
|
luet install -y --config $tmpdir/luet.yaml test/a
|
||||||
|
installst=$?
|
||||||
|
assertEquals 'install test successfully' "$installst" "0"
|
||||||
|
|
||||||
|
|
||||||
|
# Simulate config protect
|
||||||
|
assertTrue 'package A installed' "[ -e '$tmpdir/testrootfs/c' ]"
|
||||||
|
assertTrue 'config protect not created, file is the same' "[ ! -e '$tmpdir/testrootfs/etc/a/._cfg0001_conf' ]"
|
||||||
|
assertEquals 'config protect content' "$(cat $tmpdir/testrootfs/etc/a/conf)" "config"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
testUnInstall() {
|
||||||
|
luet uninstall -y --full --config $tmpdir/luet.yaml test/a
|
||||||
|
installst=$?
|
||||||
|
assertEquals 'uninstall test successfully' "$installst" "0"
|
||||||
|
assertTrue 'package uninstalled' "[ ! -e '$tmpdir/testrootfs/c' ]"
|
||||||
|
assertTrue 'config protect maintains the protected files' "[ -e '$tmpdir/testrootfs/etc/a/conf' ]"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
testCleanup() {
|
||||||
|
luet cleanup --config $tmpdir/luet.yaml
|
||||||
|
installst=$?
|
||||||
|
assertEquals 'install test successfully' "$installst" "0"
|
||||||
|
assertTrue 'package installed' "[ ! -e '$tmpdir/testrootfs/packages/a-test-1.0.package.tar.gz' ]"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load shUnit2.
|
||||||
|
. "$ROOT_DIR/tests/integration/shunit2"/shunit2
|
||||||
|
|
7
vendor/github.com/mudler/go-pluggable/events.go
generated
vendored
7
vendor/github.com/mudler/go-pluggable/events.go
generated
vendored
@@ -29,6 +29,7 @@ type EventType string
|
|||||||
type Event struct {
|
type Event struct {
|
||||||
Name EventType `json:"name"`
|
Name EventType `json:"name"`
|
||||||
Data string `json:"data"`
|
Data string `json:"data"`
|
||||||
|
File string `json:"file"` // If Data >> 10K write content to file instead
|
||||||
}
|
}
|
||||||
|
|
||||||
// EventResponse describes the event response structure
|
// EventResponse describes the event response structure
|
||||||
@@ -45,6 +46,12 @@ func (e Event) JSON() (string, error) {
|
|||||||
return string(dat), err
|
return string(dat), err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Copy returns a copy of Event
|
||||||
|
func (e Event) Copy() *Event {
|
||||||
|
copy := &e
|
||||||
|
return copy
|
||||||
|
}
|
||||||
|
|
||||||
func (e Event) ResponseEventName(s string) EventType {
|
func (e Event) ResponseEventName(s string) EventType {
|
||||||
return EventType(fmt.Sprintf("%s-%s", e.Name, s))
|
return EventType(fmt.Sprintf("%s-%s", e.Name, s))
|
||||||
}
|
}
|
||||||
|
31
vendor/github.com/mudler/go-pluggable/plugin.go
generated
vendored
31
vendor/github.com/mudler/go-pluggable/plugin.go
generated
vendored
@@ -16,7 +16,9 @@
|
|||||||
package pluggable
|
package pluggable
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
|
||||||
@@ -29,19 +31,42 @@ type Plugin struct {
|
|||||||
Executable string
|
Executable string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// A safe threshold to avoid unpleasant exec buffer fill for argv too big. Seems 128K is the limit on Linux.
|
||||||
|
const maxMessageSize = 1 << 13
|
||||||
|
|
||||||
// Run runs the Event on the plugin, and returns an EventResponse
|
// Run runs the Event on the plugin, and returns an EventResponse
|
||||||
func (p Plugin) Run(e Event) (EventResponse, error) {
|
func (p Plugin) Run(e Event) (EventResponse, error) {
|
||||||
r := EventResponse{}
|
r := EventResponse{}
|
||||||
k, err := e.JSON()
|
|
||||||
|
eventToprocess := &e
|
||||||
|
|
||||||
|
if len(e.Data) > maxMessageSize {
|
||||||
|
copy := e.Copy()
|
||||||
|
copy.Data = ""
|
||||||
|
f, err := ioutil.TempFile(os.TempDir(), "pluggable")
|
||||||
|
if err != nil {
|
||||||
|
return r, errors.Wrap(err, "while creating temporary file")
|
||||||
|
}
|
||||||
|
if err := ioutil.WriteFile(f.Name(), []byte(e.Data), os.ModePerm); err != nil {
|
||||||
|
return r, errors.Wrap(err, "while writing to temporary file")
|
||||||
|
}
|
||||||
|
copy.File = f.Name()
|
||||||
|
eventToprocess = copy
|
||||||
|
defer os.RemoveAll(f.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
k, err := eventToprocess.JSON()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return r, errors.Wrap(err, "while marshalling event")
|
return r, errors.Wrap(err, "while marshalling event")
|
||||||
}
|
}
|
||||||
cmd := exec.Command(p.Executable, string(e.Name), k)
|
cmd := exec.Command(p.Executable, string(e.Name), k)
|
||||||
cmd.Env = os.Environ()
|
cmd.Env = os.Environ()
|
||||||
|
var b bytes.Buffer
|
||||||
|
cmd.Stderr = &b
|
||||||
out, err := cmd.Output()
|
out, err := cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.Error = err.Error()
|
r.Error = "error while executing plugin: " + err.Error() + string(b.String())
|
||||||
return r, errors.Wrap(err, "while executing plugin")
|
return r, errors.Wrap(err, "while executing plugin: "+string(b.String()))
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := json.Unmarshal(out, &r); err != nil {
|
if err := json.Unmarshal(out, &r); err != nil {
|
||||||
|
2
vendor/modules.txt
vendored
2
vendor/modules.txt
vendored
@@ -475,7 +475,7 @@ github.com/mudler/cobra-extensions
|
|||||||
# github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87
|
# github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87
|
||||||
## explicit
|
## explicit
|
||||||
github.com/mudler/docker-companion/api
|
github.com/mudler/docker-companion/api
|
||||||
# github.com/mudler/go-pluggable v0.0.0-20210510180427-ba09243a8c65
|
# github.com/mudler/go-pluggable v0.0.0-20210513155700-54c6443073af
|
||||||
## explicit
|
## explicit
|
||||||
github.com/mudler/go-pluggable
|
github.com/mudler/go-pluggable
|
||||||
# github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290
|
# github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290
|
||||||
|
Reference in New Issue
Block a user