Compare commits

...

103 Commits
0.8.8 ... 0.9.8

Author SHA1 Message Date
Ettore Di Giacinto
87004c8e78 Tag 0.9.8 2020-11-28 16:29:38 +01:00
Ettore Di Giacinto
0fe30ddcfd Add ability to interpolate during build
Now build takes a --values argument, which is a yaml file that can be
used to interpolate the specs that are going to be compiled.
2020-11-28 15:47:29 +01:00
Ettore Di Giacinto
44d33eceba Set workdir also on step image
Otherwise with DOCKER_SQUASH=true it wouldn't be coherent on where to
find the package files
2020-11-28 12:07:07 +01:00
Ettore Di Giacinto
ca994b07ab Tag 0.9.7 2020-11-28 00:34:46 +01:00
Ettore Di Giacinto
8ce135fe12 Add DOCKER_SQUASH 2020-11-27 23:38:31 +01:00
Ettore Di Giacinto
18d9366bca Minor fixes 2020-11-24 18:27:49 +01:00
Ettore Di Giacinto
c0206e5849 Tag 0.9.6 2020-11-23 20:18:42 +01:00
Ettore Di Giacinto
9fab46aa9e Add also description 2020-11-23 19:15:54 +01:00
Ettore Di Giacinto
5b54aeb822 Update vendor 2020-11-23 19:14:07 +01:00
Ettore Di Giacinto
7a10ff2742 Enhance search output with tables and alias to '.' when no args are specified 2020-11-23 19:13:54 +01:00
Ettore Di Giacinto
db1b190fb5 Minor fixup and cleanups around the new prompt feature 2020-11-23 18:20:30 +01:00
Ettore Di Giacinto
b349665ff2 Add user prompts
Fixes #106
2020-11-22 23:43:29 +01:00
Ettore Di Giacinto
3959cfd623 Tag 0.9.5 2020-11-20 19:02:54 +01:00
Ettore Di Giacinto
53ab0e0dd2 Merge pull request #151 from mudler/download-progress-bar
Download progress bar
2020-11-20 19:00:25 +01:00
Daniele Rondina
651ea17548 Update vendor/ (progress bar deps) 2020-11-20 18:16:49 +01:00
Daniele Rondina
60d5c9dfd5 Add download progress bar 2020-11-20 18:12:23 +01:00
Ettore Di Giacinto
1f807f369a Move revdeps computation to db 2020-11-20 17:23:21 +01:00
Ettore Di Giacinto
4e1b006a08 Cleanup vendor 2020-11-19 18:53:08 +01:00
Ettore Di Giacinto
47f0049efa Tag 0.9.4 2020-11-19 18:52:22 +01:00
Ettore Di Giacinto
0cc2b72831 Drop converter code, will be in a separate extension 2020-11-19 18:10:16 +01:00
Ettore Di Giacinto
f2df3faee5 Now Uninstall takes multiple packages 2020-11-19 18:05:27 +01:00
Daniele Rondina
287098f101 Update vendor github.com/cavaliercoder/grab 2020-11-19 00:56:59 +01:00
Daniele Rondina
f9a7113ab9 client/http: Add experimental download info 2020-11-19 00:56:28 +01:00
Ettore Di Giacinto
c3559d952c Tag 0.9.3 2020-11-15 13:38:30 +01:00
Ettore Di Giacinto
fc863fc8e5 Add collections integration test 2020-11-15 13:22:21 +01:00
Ettore Di Giacinto
ac149e9336 Use candidate for search, as doesn't have a selector 2020-11-15 11:47:32 +01:00
Ettore Di Giacinto
b9c8e50e42 Allow to define multiple templated packages with collections
Collections, similarly to packages, have a `build.yaml` and
a `finalize.yaml` that are templated for each package.
They have a `collection.yaml` containing a list of
packages that are part of the tree.
2020-11-15 00:13:46 +01:00
Ettore Di Giacinto
cf7df00a65 Add luet tree images command to show images tree 2020-11-14 14:51:11 +01:00
Daniele Rondina
83f924da35 spectools: Add DefaultPackageSanitized.Clone() 2020-11-14 12:42:49 +01:00
Ettore Di Giacinto
c82d23f9f2 Update go-pluggable 2020-11-13 19:50:10 +01:00
Ettore Di Giacinto
0e46e763d5 Move bus implementation to a separate repo, hook to events in luet 2020-11-13 18:25:44 +01:00
Ettore Di Giacinto
a793b44e83 Wip 2020-11-12 23:21:10 +01:00
Ettore Di Giacinto
19e6054574 Tag 0.9.2 2020-11-10 20:20:27 +01:00
Ettore Di Giacinto
a8624fe451 Move image removal in compileWithImage and further cleanup 2020-11-10 18:48:39 +01:00
Ettore Di Giacinto
14c1d6ef24 Refactor and optimize build process 2020-11-10 18:14:18 +01:00
Ettore Di Giacinto
36c58307e2 Don't export unless needed 2020-11-10 16:57:24 +01:00
Ettore Di Giacinto
665261e526 Tag 0.9.1 2020-11-09 19:42:34 +01:00
Ettore Di Giacinto
794c5984a2 Add pack command 2020-11-09 18:16:22 +01:00
Ettore Di Giacinto
a765147c1d Add templated finalizers 2020-11-08 21:14:19 +01:00
Ettore Di Giacinto
088adf6f3a Tag 0.9 2020-11-08 18:25:59 +01:00
Ettore Di Giacinto
cead09fb9f Merge pull request #148 from mudler/respect_rootfs4conf
Respect rootfs path for configs and url
2020-11-08 18:25:29 +01:00
Daniele Rondina
9a1787ddaf client/local: Handle config_from_host on DownloadFile 2020-11-08 17:06:05 +01:00
Ettore Di Giacinto
b1316b50b4 Add excludes tests 2020-11-08 16:02:11 +01:00
Ettore Di Giacinto
d92ee9e1d9 Add preliminar support for excludes 2020-11-08 15:35:24 +01:00
Ettore Di Giacinto
e7b58eec41 Use sane default for installer script 2020-11-08 14:33:34 +01:00
Ettore Di Giacinto
6a1b64acea Order files before uninstall
Fixes #149
2020-11-08 12:36:41 +01:00
Ettore Di Giacinto
df14fe60fc Tag 0.8.15 2020-11-08 11:07:33 +01:00
Ettore Di Giacinto
459eb01a59 Don't write err to stdout if not present 2020-11-08 10:02:00 +01:00
Daniele Rondina
e6c597c7d3 test-integration/12_config_protect.sh: Use repo url related with rootfs path 2020-11-08 00:05:06 +01:00
Daniele Rondina
e70cdbaaf7 Respect rootfs on repositories urls 2020-11-08 00:00:15 +01:00
Daniele Rondina
eea9dad2c6 tests/integration: Add option config_from_host 2020-11-07 19:14:44 +01:00
Daniele Rondina
513f441bb3 Add option config_from_host 2020-11-07 18:56:25 +01:00
Daniele Rondina
ebe7466fdc Respect rootfs path for load config 2020-11-07 18:28:23 +01:00
Ettore Di Giacinto
76328176c1 Tag 0.8.14 2020-11-07 12:29:07 +01:00
Ettore Di Giacinto
46ed6423ad Merge pull request #147 from mudler/fix-protect-uninstall
Fix protect uninstall
2020-11-07 12:28:24 +01:00
Daniele Rondina
d5df40512b installer: Improve message for protected files 2020-11-07 12:27:18 +01:00
Daniele Rondina
d219a2e0fb Run travis task with/without buildkit 2020-11-07 11:41:44 +01:00
Daniele Rondina
4048138dcb Add test suite for ConfigProtect 2020-11-07 11:39:31 +01:00
Daniele Rondina
e5f44eee09 ConfigProtect: support annotation without initial / 2020-11-07 11:39:13 +01:00
Daniele Rondina
6819a28f07 Add support to DOCKER_BUILDKIT on test 2020-11-07 11:37:58 +01:00
Daniele Rondina
24eb6eaef5 Fix test with docker buildkit 2020-11-07 11:37:58 +01:00
Daniele Rondina
58c4866289 .travis.yml: Enable Docker buildkit 2020-11-07 11:37:58 +01:00
Daniele Rondina
c72565e019 Integrate tests for config protects with uninstall 2020-11-06 23:30:37 +01:00
Daniele Rondina
0f59c207b0 Load config protect files on upgrade/uninstall 2020-11-06 23:30:08 +01:00
Daniele Rondina
68bc8d4d27 ConfigProtect: Permit to obtain the list of files without initial / 2020-11-06 23:29:08 +01:00
Daniele Rondina
b24d335538 GetProtectFiles() is used also for tree tarball without specs 2020-11-06 23:00:37 +01:00
Ettore Di Giacinto
dcc5aae3cd Tag 0.8.13 2020-11-06 22:25:26 +01:00
Ettore Di Giacinto
99bf9e291d Use LStat and attempt removing before bailing out on first failure 2020-11-06 21:34:56 +01:00
Daniele Rondina
51417ecb5d pkg/compiler/artifact.go: permit to support config protect with only annotation 2020-11-06 20:23:46 +01:00
Daniele Rondina
130eb8de1a Integrate config protection on uninstall too 2020-11-06 20:14:25 +01:00
Daniele Rondina
f1604c3b6f contrib: Add get_luet_root.sh script 2020-11-06 07:46:00 +01:00
Ettore Di Giacinto
5b5735266a Calculate provides for parallel solver too 2020-11-05 21:00:24 +01:00
Ettore Di Giacinto
984366d3a5 Consider provides during upgrades 2020-11-05 20:52:02 +01:00
Ettore Di Giacinto
55ec38ffc7 Tag 0.8.12 2020-11-03 20:02:44 +01:00
Ettore Di Giacinto
9aa352dec8 Add json output to build 2020-11-03 18:06:56 +01:00
Ettore Di Giacinto
d7a04465fd update vendor/ 2020-11-03 17:21:32 +01:00
Ettore Di Giacinto
25f69d4f1c Bump topsort 2020-11-03 17:20:52 +01:00
Ettore Di Giacinto
102a788c91 Revert "Revert "Stabilize ordering graph""
This reverts commit 2b23016a51.
2020-11-02 15:43:35 +01:00
Ettore Di Giacinto
2b23016a51 Revert "Stabilize ordering graph"
This reverts commit 940f553e1c.
2020-11-02 15:43:15 +01:00
Ettore Di Giacinto
940f553e1c Stabilize ordering graph
In this way when we order, we always return the same solution order in
case there are weak deps.

The following is optional - it doesn't change the "correctness" of the
solver results: We add an extra edge between deps that
share common dependendencies. This makes the link more stronger and
balances the graph so it doesn't show different results for the same query, as they
could be shuffled as don't have a direct connection.
2020-11-02 14:30:41 +01:00
Ettore Di Giacinto
c3ef549673 Warn user only when required when uninstalling directories 2020-10-31 11:56:03 +01:00
Ettore Di Giacinto
0e764e525e Filter packages to install instead of looping solver result 2020-10-31 01:25:18 +01:00
Ettore Di Giacinto
f401e2b37f Add install benchmark test for solver 2020-10-30 22:20:08 +01:00
Ettore Di Giacinto
2b67b8dd24 Bump version 2020-10-30 19:15:10 +01:00
Ettore Di Giacinto
91dfb8ce3a Enhance CLI output 2020-10-30 19:15:04 +01:00
Ettore Di Giacinto
f6a4b634c1 Don't always walk all World() packages
With this change the solver during install now considers only the part
of the tree which is required to calculate the solution, it doesn't
consider anymore World() as the search space.

The search space now is narrowed down to the packages that related to
the one which we are considering.

In this subset of changes we are also optimizing the Parallel solver
avoiding an useless loop.

This change boost overall performance on large datasets which don't
necessarly have relations touching the whole tree.
2020-10-30 19:12:12 +01:00
Ettore Di Giacinto
2fa58fc7db Bump gophersat 2020-10-30 18:37:26 +01:00
Ettore Di Giacinto
529a827c5f Tag 0.8.10 2020-10-29 16:50:17 +01:00
Ettore Di Giacinto
39bc74fc73 Add boltDB test and fixup range over interface cast 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
99c59643a1 Add benchmarks tests 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
ffea4d8cf9 Fix priority constraint formula
The parallel solver made the issue more visible, the constraints needed
to be less relaxed and needed to be exclusive so our candidate is looked
up at it first
2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
e459ddf470 Optimize BoltDB World() call 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
eb2c240e84 Adapt installer tests 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
1956f476cc Set concurrency when building 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
a216f71d53 Inverted options 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
95e640c9d0 Make solver type switchable 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
9f1a182eee Add tests and various fixes to parallel implementation 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
9a7d92b02e Make the parallel solver completely parallel in building formulas from dataset 2020-10-29 16:32:13 +01:00
Ettore Di Giacinto
c5ed36b2bd Sketch concourrent solver when building formulas 2020-10-29 16:32:13 +01:00
Daniele Rondina
5f8b836335 Update vendor github.com/Sabayon/pkgs-checker@v0.7.2 2020-10-28 17:48:30 +01:00
Daniele Rondina
6806103b3e installer: Start spinner of upgrade calculation 2020-10-25 14:01:45 +01:00
Ettore Di Giacinto
4e9313ed55 Tag 0.8.9 2020-10-22 18:36:37 +02:00
Ettore Di Giacinto
c9952b12a8 Get file list from parsed yaml 2020-10-22 17:33:37 +02:00
789 changed files with 104734 additions and 128540 deletions

View File

@@ -1,15 +1,31 @@
dist: bionic
language: go
services:
- docker
go:
- "1.14"
env:
- "GO15VENDOREXPERIMENT=1"
global:
- "GO15VENDOREXPERIMENT=1"
jobs:
- "DOCKER_BUILDKIT=0"
- "DOCKER_BUILDKIT=1"
before_install:
- sudo rm -rf /var/lib/apt/lists/*
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
- sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) edge"
- sudo apt-get update
- echo '{"experimental":true}' | sudo tee /etc/docker/daemon.json
- export DOCKER_CLI_EXPERIMENTAL=enabled
- sudo apt-get -y -o Dpkg::Options::="--force-confnew" install docker-ce
- mkdir -vp ~/.docker/cli-plugins/
- curl --silent -L "https://github.com/docker/buildx/releases/download/v0.3.0/buildx-v0.3.0.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
- chmod a+x ~/.docker/cli-plugins/docker-buildx
- docker buildx version
- sudo -E env "PATH=$PATH" apt-get install -y libcap2-bin
- sudo -E env "PATH=$PATH" make deps
script:
- sudo -E env "PATH=$PATH" make multiarch-build test-integration test-coverage
#after_success:
# - |
# if [ -n "$TRAVIS_TAG" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ]; then

View File

@@ -15,15 +15,18 @@
package cmd
import (
"fmt"
"io/ioutil"
"os"
"github.com/ghodss/yaml"
helpers "github.com/mudler/luet/cmd/helpers"
"github.com/mudler/luet/pkg/compiler"
"github.com/mudler/luet/pkg/compiler/backend"
. "github.com/mudler/luet/pkg/config"
. "github.com/mudler/luet/pkg/logger"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
tree "github.com/mudler/luet/pkg/tree"
"github.com/spf13/cobra"
@@ -46,6 +49,7 @@ var buildCmd = &cobra.Command{
viper.BindPFlag("compression", cmd.Flags().Lookup("compression"))
viper.BindPFlag("nodeps", cmd.Flags().Lookup("nodeps"))
viper.BindPFlag("onlydeps", cmd.Flags().Lookup("onlydeps"))
viper.BindPFlag("values", cmd.Flags().Lookup("values"))
viper.BindPFlag("image-repository", cmd.Flags().Lookup("image-repository"))
viper.BindPFlag("push", cmd.Flags().Lookup("push"))
@@ -72,6 +76,8 @@ var buildCmd = &cobra.Command{
databaseType := viper.GetString("database")
compressionType := viper.GetString("compression")
imageRepository := viper.GetString("image-repository")
values := viper.GetString("values")
push := viper.GetBool("push")
pull := viper.GetBool("pull")
keepImages := viper.GetBool("keep-images")
@@ -81,7 +87,14 @@ var buildCmd = &cobra.Command{
onlyTarget, _ := cmd.Flags().GetBool("only-target-package")
full, _ := cmd.Flags().GetBool("full")
skip, _ := cmd.Flags().GetBool("skip-if-metadata-exists")
concurrent, _ := cmd.Flags().GetBool("solver-concurrent")
var results Results
out, _ := cmd.Flags().GetString("output")
if out != "terminal" {
LuetCfg.GetLogging().SetLogLevel("error")
}
pretend, _ := cmd.Flags().GetBool("pretend")
compilerSpecs := compiler.NewLuetCompilationspecs()
var compilerBackend compiler.CompilerBackend
var db pkg.PackageDatabase
@@ -147,8 +160,15 @@ var buildCmd = &cobra.Command{
opts.KeepImageExport = keepExportedImages
opts.SkipIfMetadataExists = skip
opts.PackageTargetOnly = onlyTarget
opts.BuildValuesFile = values
var solverOpts solver.Options
if concurrent {
solverOpts = solver.Options{Type: solver.ParallelSimple, Concurrency: concurrency}
} else {
solverOpts = solver.Options{Type: solver.SingleCoreSimple, Concurrency: concurrency}
}
luetCompiler := compiler.NewLuetCompiler(compilerBackend, generalRecipe.GetDatabase(), opts)
luetCompiler := compiler.NewLuetCompiler(compilerBackend, generalRecipe.GetDatabase(), opts, solverOpts)
luetCompiler.SetConcurrency(concurrency)
luetCompiler.SetCompressionType(compiler.CompressionImplementation(compressionType))
if full {
@@ -196,9 +216,58 @@ var buildCmd = &cobra.Command{
if revdeps {
artifact, errs = luetCompiler.CompileWithReverseDeps(privileged, compilerSpecs)
} else if pretend {
toCalculate := []compiler.CompilationSpec{}
if full {
var err error
toCalculate, err = luetCompiler.ComputeMinimumCompilableSet(compilerSpecs.All()...)
if err != nil {
errs = append(errs, err)
}
} else {
toCalculate = compilerSpecs.All()
}
for _, sp := range toCalculate {
packs, err := luetCompiler.ComputeDepTree(sp)
if err != nil {
errs = append(errs, err)
}
for _, p := range packs {
results.Packages = append(results.Packages,
PackageResult{
Name: p.Package.GetName(),
Version: p.Package.GetVersion(),
Category: p.Package.GetCategory(),
Repository: "",
Hidden: p.Package.IsHidden(),
Target: sp.GetPackage().HumanReadableString(),
})
}
}
y, err := yaml.Marshal(results)
if err != nil {
fmt.Printf("err: %v\n", err)
return
}
switch out {
case "yaml":
fmt.Println(string(y))
case "json":
j2, err := yaml.YAMLToJSON(y)
if err != nil {
fmt.Printf("err: %v\n", err)
return
}
fmt.Println(string(j2))
case "terminal":
for _, p := range results.Packages {
Info(p.String())
}
}
} else {
artifact, errs = luetCompiler.CompileParallel(privileged, compilerSpecs)
}
if len(errs) != 0 {
for _, e := range errs {
@@ -225,6 +294,7 @@ func init() {
buildCmd.Flags().Bool("revdeps", false, "Build with revdeps")
buildCmd.Flags().Bool("all", false, "Build all specfiles in the tree")
buildCmd.Flags().Bool("full", false, "Build all packages (optimized)")
buildCmd.Flags().String("values", "", "Build values file to interpolate with each package")
buildCmd.Flags().String("destination", path, "Destination folder")
buildCmd.Flags().String("compression", "none", "Compression alg: none, gzip")
@@ -241,6 +311,11 @@ func init() {
buildCmd.Flags().Float32("solver-rate", 0.7, "Solver learning rate")
buildCmd.Flags().Float32("solver-discount", 1.0, "Solver discount rate")
buildCmd.Flags().Int("solver-attempts", 9000, "Solver maximum attempts")
buildCmd.Flags().Bool("solver-concurrent", false, "Use concurrent solver (experimental)")
buildCmd.Flags().Bool("pretend", false, "Just print what packages will be compiled")
buildCmd.Flags().StringP("output", "o", "terminal", "Output format ( Defaults: terminal, available: json,yaml )")
RootCmd.AddCommand(buildCmd)
}

View File

@@ -1,101 +0,0 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"io/ioutil"
. "github.com/mudler/luet/pkg/config"
. "github.com/mudler/luet/pkg/logger"
pkg "github.com/mudler/luet/pkg/package"
tree "github.com/mudler/luet/pkg/tree"
"github.com/mudler/luet/pkg/tree/builder/gentoo"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var convertCmd = &cobra.Command{
Use: "convert [portage-tree] [luet-tree]",
Short: "convert other package manager tree into luet",
Long: `Parses external PM and produces a luet parsable tree`,
PreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlag("type", cmd.Flags().Lookup("type"))
viper.BindPFlag("database", cmd.Flags().Lookup("database"))
},
Run: func(cmd *cobra.Command, args []string) {
t := viper.GetString("type")
databaseType := viper.GetString("database")
var db pkg.PackageDatabase
if len(args) != 2 {
Fatal("Incorrect number of arguments")
}
input := args[0]
output := args[1]
Info("Converting trees from " + input + " [" + t + "]")
var builder tree.Parser
switch t {
case "gentoo":
builder = gentoo.NewGentooBuilder(
&gentoo.SimpleEbuildParser{},
LuetCfg.GetGeneral().Concurrency,
gentoo.InMemory)
default: // dup
builder = gentoo.NewGentooBuilder(
&gentoo.SimpleEbuildParser{},
LuetCfg.GetGeneral().Concurrency,
gentoo.InMemory)
}
switch databaseType {
case "memory":
db = pkg.NewInMemoryDatabase(false)
case "boltdb":
tmpdir, err := ioutil.TempDir("", "package")
if err != nil {
Fatal(err)
}
db = pkg.NewBoltDatabase(tmpdir)
}
defer db.Clean()
packageTree, err := builder.Generate(input)
if err != nil {
Fatal("Error: " + err.Error())
}
defer packageTree.Clean()
Info("Tree generated")
generalRecipe := tree.NewGeneralRecipe(packageTree)
Info("Saving generated tree to " + output)
err = generalRecipe.Save(output)
if err != nil {
Fatal("Error: " + err.Error())
}
},
}
func init() {
convertCmd.Flags().String("type", "gentoo", "source type")
convertCmd.Flags().String("database", "memory", "database used for solving (memory,boltdb)")
RootCmd.AddCommand(convertCmd)
}

View File

@@ -59,10 +59,7 @@ func NewDatabaseCreateCommand() *cobra.Command {
systemDB = pkg.NewInMemoryDatabase(true)
}
files, err := art.FileList()
if err != nil {
Fatal("Failed getting file list for ", a, ": ", err.Error())
}
files := art.GetFiles()
if _, err := systemDB.CreatePackage(art.GetCompileSpec().GetPackage()); err != nil {
Fatal("Failed to create ", a, ": ", err.Error())

View File

@@ -19,6 +19,7 @@ import (
"path/filepath"
installer "github.com/mudler/luet/pkg/installer"
"github.com/mudler/luet/pkg/solver"
helpers "github.com/mudler/luet/cmd/helpers"
. "github.com/mudler/luet/pkg/config"
@@ -42,6 +43,7 @@ var installCmd = &cobra.Command{
LuetCfg.Viper.BindPFlag("onlydeps", cmd.Flags().Lookup("onlydeps"))
LuetCfg.Viper.BindPFlag("nodeps", cmd.Flags().Lookup("nodeps"))
LuetCfg.Viper.BindPFlag("force", cmd.Flags().Lookup("force"))
LuetCfg.Viper.BindPFlag("yes", cmd.Flags().Lookup("yes"))
},
Long: `Install packages in parallel`,
Run: func(cmd *cobra.Command, args []string) {
@@ -73,11 +75,20 @@ var installCmd = &cobra.Command{
force := LuetCfg.Viper.GetBool("force")
nodeps := LuetCfg.Viper.GetBool("nodeps")
onlydeps := LuetCfg.Viper.GetBool("onlydeps")
concurrent, _ := cmd.Flags().GetBool("solver-concurrent")
yes := LuetCfg.Viper.GetBool("yes")
LuetCfg.GetSolverOptions().Type = stype
LuetCfg.GetSolverOptions().LearnRate = float32(rate)
LuetCfg.GetSolverOptions().Discount = float32(discount)
LuetCfg.GetSolverOptions().MaxAttempts = attempts
if concurrent {
LuetCfg.GetSolverOptions().Implementation = solver.ParallelSimple
} else {
LuetCfg.GetSolverOptions().Implementation = solver.SingleCoreSimple
}
Debug("Solver", LuetCfg.GetSolverOptions().CompactString())
// Load config protect configs
@@ -90,6 +101,7 @@ var installCmd = &cobra.Command{
Force: force,
OnlyDeps: onlydeps,
PreserveSystemEssentialData: true,
Ask: !yes,
})
inst.Repositories(repos)
@@ -121,6 +133,8 @@ func init() {
installCmd.Flags().Bool("nodeps", false, "Don't consider package dependencies (harmful!)")
installCmd.Flags().Bool("onlydeps", false, "Consider **only** package dependencies")
installCmd.Flags().Bool("force", false, "Skip errors and keep going (potentially harmful)")
installCmd.Flags().Bool("solver-concurrent", false, "Use concurrent solver (experimental)")
installCmd.Flags().BoolP("yes", "y", false, "Don't ask questions")
RootCmd.AddCommand(installCmd)
}

89
cmd/pack.go Normal file
View File

@@ -0,0 +1,89 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"os"
"path/filepath"
"time"
helpers "github.com/mudler/luet/cmd/helpers"
"github.com/mudler/luet/pkg/compiler"
. "github.com/mudler/luet/pkg/config"
. "github.com/mudler/luet/pkg/logger"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var packCmd = &cobra.Command{
Use: "pack <package name>",
Short: "pack a custom package",
Long: `pack and creates metadata directly from a source path`,
PreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlag("destination", cmd.Flags().Lookup("destination"))
viper.BindPFlag("compression", cmd.Flags().Lookup("compression"))
viper.BindPFlag("source", cmd.Flags().Lookup("source"))
},
Run: func(cmd *cobra.Command, args []string) {
sourcePath := viper.GetString("source")
dst := viper.GetString("destination")
compressionType := viper.GetString("compression")
concurrency := LuetCfg.GetGeneral().Concurrency
if len(args) != 1 {
Fatal("You must specify a package name")
}
packageName := args[0]
p, err := helpers.ParsePackageStr(packageName)
if err != nil {
Fatal("Invalid package string ", packageName, ": ", err.Error())
}
spec := &compiler.LuetCompilationSpec{Package: p}
artifact := compiler.NewPackageArtifact(filepath.Join(dst, p.GetFingerPrint()+".package.tar"))
artifact.SetCompressionType(compiler.CompressionImplementation(compressionType))
err = artifact.Compress(sourcePath, concurrency)
if err != nil {
Fatal("failed compressing ", packageName, ": ", err.Error())
}
artifact.SetCompileSpec(spec)
filelist, err := artifact.FileList()
if err != nil {
Fatal("failed generating file list for ", packageName, ": ", err.Error())
}
artifact.SetFiles(filelist)
artifact.GetCompileSpec().GetPackage().SetBuildTimestamp(time.Now().String())
err = artifact.WriteYaml(dst)
if err != nil {
Fatal("failed writing metadata yaml file for ", packageName, ": ", err.Error())
}
},
}
func init() {
path, err := os.Getwd()
if err != nil {
Fatal(err)
}
packCmd.Flags().String("source", path, "Source folder")
packCmd.Flags().String("destination", path, "Destination folder")
packCmd.Flags().String("compression", "gzip", "Compression alg: none, gzip")
RootCmd.AddCommand(packCmd)
}

View File

@@ -24,6 +24,8 @@ import (
"strings"
"github.com/marcsauter/single"
bus "github.com/mudler/luet/pkg/bus"
extensions "github.com/mudler/cobra-extensions"
config "github.com/mudler/luet/pkg/config"
helpers "github.com/mudler/luet/pkg/helpers"
@@ -38,7 +40,7 @@ var Verbose bool
var LockedCommands = []string{"install", "uninstall", "upgrade"}
const (
LuetCLIVersion = "0.8.8"
LuetCLIVersion = "0.9.8"
LuetEnvPrefix = "LUET"
)
@@ -52,9 +54,31 @@ var (
// RootCmd represents the base command when called without any subcommands
var RootCmd = &cobra.Command{
Use: "luet",
Short: "Package manager for the XXth century!",
Long: `Package manager which uses containers to build packages`,
Use: "luet",
Short: "Package manager for the XXth century!",
Long: `Luet is a single-binary package manager based on containers to build packages.
To install a package:
$ luet install package
To search for a package in the repositories:
$ luet search package
To list all packages installed in the system:
$ luet search --installed .
To show hidden packages:
$ luet search --hidden package
To build a package, from a tree definition:
$ luet build --tree tree/path package
`,
Version: fmt.Sprintf("%s-g%s %s", LuetCLIVersion, BuildCommit, BuildTime),
PersistentPreRun: func(cmd *cobra.Command, args []string) {
err := LoadConfig(config.LuetCfg)
@@ -68,6 +92,18 @@ var RootCmd = &cobra.Command{
if err != nil {
Fatal("failed on init tmp basedir:", err.Error())
}
viper.BindPFlag("plugin", cmd.Flags().Lookup("plugin"))
plugin := viper.GetStringSlice("plugin")
bus.Manager.Load(plugin...).Register()
if len(bus.Manager.Plugins) != 0 {
Info(":lollipop:Enabled plugins:")
for _, p := range bus.Manager.Plugins {
Info("\t:arrow_right:", p.Name)
}
}
},
PersistentPostRun: func(cmd *cobra.Command, args []string) {
// Cleanup all tmp directories used by luet
@@ -156,6 +192,7 @@ func init() {
"Disable config protect analysis.")
pflags.StringP("logfile", "l", config.LuetCfg.GetLogging().Path,
"Logfile path. Empty value disable log to file.")
pflags.StringSlice("plugin", []string{}, "A list of runtime plugins to load")
// os/user doesn't work in from scratch environments.
// Check if i can retrieve user informations.
@@ -175,6 +212,8 @@ func init() {
config.LuetCfg.Viper.BindPFlag("general.debug", pflags.Lookup("debug"))
config.LuetCfg.Viper.BindPFlag("general.fatal_warnings", pflags.Lookup("fatal"))
config.LuetCfg.Viper.BindPFlag("general.same_owner", pflags.Lookup("same-owner"))
config.LuetCfg.Viper.BindPFlag("plugin", pflags.Lookup("plugin"))
// Currently I maintain this only from cli.
config.LuetCfg.Viper.BindPFlag("no_spinner", pflags.Lookup("no-spinner"))
config.LuetCfg.Viper.BindPFlag("config_protect_skip", pflags.Lookup("skip-config-protect"))

View File

@@ -18,8 +18,11 @@ import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/ghodss/yaml"
"github.com/jedib0t/go-pretty/table"
"github.com/jedib0t/go-pretty/v6/list"
. "github.com/mudler/luet/pkg/config"
installer "github.com/mudler/luet/pkg/installer"
. "github.com/mudler/luet/pkg/logger"
@@ -32,6 +35,7 @@ type PackageResult struct {
Category string `json:"category"`
Version string `json:"version"`
Repository string `json:"repository"`
Target string `json:"target"`
Hidden bool `json:"hidden"`
}
@@ -39,6 +43,28 @@ type Results struct {
Packages []PackageResult `json:"packages"`
}
func (r PackageResult) String() string {
return fmt.Sprintf("%s/%s-%s required for %s", r.Category, r.Name, r.Version, r.Target)
}
var rows table.Row = table.Row{"Package", "Category", "Name", "Version", "Repository", "Description", "License", "URI"}
func packageToRow(repo string, p pkg.Package) table.Row {
return table.Row{p.HumanReadableString(), p.GetCategory(), p.GetName(), p.GetVersion(), repo, p.GetDescription(), p.GetLicense(), strings.Join(p.GetURI(), "\n")}
}
func packageToList(l list.Writer, repo string, p pkg.Package) {
l.AppendItem(p.HumanReadableString())
l.Indent()
l.AppendItem(fmt.Sprintf("Category: %s", p.GetCategory()))
l.AppendItem(fmt.Sprintf("Name: %s", p.GetName()))
l.AppendItem(fmt.Sprintf("Version: %s", p.GetVersion()))
l.AppendItem(fmt.Sprintf("Description: %s", p.GetDescription()))
l.AppendItem(fmt.Sprintf("Repository: %s ", repo))
l.AppendItem(fmt.Sprintf("Uri: %s ", strings.Join(p.GetURI(), "\n")))
l.UnIndent()
}
var searchCmd = &cobra.Command{
Use: "search <term>",
Short: "Search packages",
@@ -56,10 +82,11 @@ var searchCmd = &cobra.Command{
Run: func(cmd *cobra.Command, args []string) {
var systemDB pkg.PackageDatabase
var results Results
if len(args) != 1 {
if len(args) > 1 {
Fatal("Wrong number of arguments (expected 1)")
} else if len(args) == 0 {
args = []string{"."}
}
hidden, _ := cmd.Flags().GetBool("hidden")
installed := LuetCfg.Viper.GetBool("installed")
@@ -70,6 +97,7 @@ var searchCmd = &cobra.Command{
searchWithLabel, _ := cmd.Flags().GetBool("by-label")
searchWithLabelMatch, _ := cmd.Flags().GetBool("by-label-regex")
revdeps, _ := cmd.Flags().GetBool("revdeps")
tableMode, _ := cmd.Flags().GetBool("table")
out, _ := cmd.Flags().GetString("output")
if out != "terminal" {
@@ -81,6 +109,9 @@ var searchCmd = &cobra.Command{
LuetCfg.GetSolverOptions().Discount = float32(discount)
LuetCfg.GetSolverOptions().MaxAttempts = attempts
l := list.NewWriter()
t := table.NewWriter()
t.AppendHeader(rows)
Debug("Solver", LuetCfg.GetSolverOptions().CompactString())
if !installed {
@@ -119,7 +150,8 @@ var searchCmd = &cobra.Command{
for _, m := range matches {
if !revdeps {
if !m.Package.IsHidden() || m.Package.IsHidden() && hidden {
Info(fmt.Sprintf(":file_folder:%s", m.Repo.GetName()), fmt.Sprintf(":package:%s", m.Package.HumanReadableString()))
t.AppendRow(packageToRow(m.Repo.GetName(), m.Package))
packageToList(l, m.Repo.GetName(), m.Package)
results.Packages = append(results.Packages,
PackageResult{
Name: m.Package.GetName(),
@@ -130,10 +162,11 @@ var searchCmd = &cobra.Command{
})
}
} else {
visited := make(map[string]interface{})
for _, revdep := range m.Package.ExpandedRevdeps(m.Repo.GetTree().GetDatabase(), visited) {
packs, _ := m.Repo.GetTree().GetDatabase().GetRevdeps(m.Package)
for _, revdep := range packs {
if !revdep.IsHidden() || revdep.IsHidden() && hidden {
Info(fmt.Sprintf(":file_folder:%s", m.Repo.GetName()), fmt.Sprintf(":package:%s", revdep.HumanReadableString()))
t.AppendRow(packageToRow(m.Repo.GetName(), revdep))
packageToList(l, m.Repo.GetName(), revdep)
results.Packages = append(results.Packages,
PackageResult{
Name: revdep.GetName(),
@@ -173,7 +206,8 @@ var searchCmd = &cobra.Command{
for _, pack := range iMatches {
if !revdeps {
if !pack.IsHidden() || pack.IsHidden() && hidden {
Info(fmt.Sprintf(":package:%s", pack.HumanReadableString()))
t.AppendRow(packageToRow("system", pack))
packageToList(l, "system", pack)
results.Packages = append(results.Packages,
PackageResult{
Name: pack.GetName(),
@@ -184,11 +218,11 @@ var searchCmd = &cobra.Command{
})
}
} else {
visited := make(map[string]interface{})
for _, revdep := range pack.ExpandedRevdeps(system.Database, visited) {
packs, _ := system.Database.GetRevdeps(pack)
for _, revdep := range packs {
if !revdep.IsHidden() || revdep.IsHidden() && hidden {
Info(fmt.Sprintf(":package:%s", pack.HumanReadableString()))
t.AppendRow(packageToRow("system", pack))
packageToList(l, "system", pack)
results.Packages = append(results.Packages,
PackageResult{
Name: revdep.GetName(),
@@ -203,6 +237,16 @@ var searchCmd = &cobra.Command{
}
}
t.AppendFooter(rows)
t.SetStyle(table.StyleColoredBright)
l.SetStyle(list.StyleConnectedRounded)
if tableMode {
Info(t.Render())
} else {
Info(l.Render())
}
y, err := yaml.Marshal(results)
if err != nil {
fmt.Printf("err: %v\n", err)
@@ -240,6 +284,7 @@ func init() {
searchCmd.Flags().Bool("by-label-regex", false, "Search packages through label regex")
searchCmd.Flags().Bool("revdeps", false, "Search package reverse dependencies")
searchCmd.Flags().Bool("hidden", false, "Include hidden packages")
searchCmd.Flags().Bool("table", false, "show output in a table (wider screens)")
RootCmd.AddCommand(searchCmd)
}

View File

@@ -34,5 +34,6 @@ func init() {
NewTreePkglistCommand(),
NewTreeValidateCommand(),
NewTreeBumpCommand(),
NewTreeImageCommand(),
)
}

136
cmd/tree/images.go Normal file
View File

@@ -0,0 +1,136 @@
// Copyright © 2020 Ettore Di Giacinto <mudler@gentoo.org>
// Daniele Rondina <geaaru@sabayonlinux.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package cmd_tree
import (
"fmt"
//. "github.com/mudler/luet/pkg/config"
"github.com/ghodss/yaml"
helpers "github.com/mudler/luet/cmd/helpers"
"github.com/mudler/luet/pkg/compiler"
"github.com/mudler/luet/pkg/compiler/backend"
. "github.com/mudler/luet/pkg/config"
. "github.com/mudler/luet/pkg/logger"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
tree "github.com/mudler/luet/pkg/tree"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func NewTreeImageCommand() *cobra.Command {
var ans = &cobra.Command{
Use: "images [OPTIONS]",
Short: "List of the images of a package",
PreRun: func(cmd *cobra.Command, args []string) {
t, _ := cmd.Flags().GetStringArray("tree")
if len(t) == 0 {
Fatal("Mandatory tree param missing.")
}
if len(args) != 1 {
Fatal("Expects one package as parameter")
}
viper.BindPFlag("image-repository", cmd.Flags().Lookup("image-repository"))
},
Run: func(cmd *cobra.Command, args []string) {
var results TreeResults
treePath, _ := cmd.Flags().GetStringArray("tree")
imageRepository := viper.GetString("image-repository")
out, _ := cmd.Flags().GetString("output")
if out != "terminal" {
LuetCfg.GetLogging().SetLogLevel("error")
}
reciper := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
for _, t := range treePath {
err := reciper.Load(t)
if err != nil {
Fatal("Error on load tree ", err)
}
}
compilerBackend := backend.NewSimpleDockerBackend()
opts := compiler.NewDefaultCompilerOptions()
opts.SolverOptions = *LuetCfg.GetSolverOptions()
opts.ImageRepository = imageRepository
solverOpts := solver.Options{Type: solver.SingleCoreSimple, Concurrency: 1}
luetCompiler := compiler.NewLuetCompiler(compilerBackend, reciper.GetDatabase(), opts, solverOpts)
a := args[0]
pack, err := helpers.ParsePackageStr(a)
if err != nil {
Fatal("Invalid package string ", a, ": ", err.Error())
}
spec, err := luetCompiler.FromPackage(pack)
if err != nil {
Fatal("Error: " + err.Error())
}
asserts, err := luetCompiler.ComputeDepTree(spec)
for _, assertion := range asserts { //highly dependent on the order
//buildImageHash := imageRepository + ":" + assertion.Hash.BuildHash
currentPackageImageHash := imageRepository + ":" + assertion.Hash.PackageHash
results.Packages = append(results.Packages, TreePackageResult{
Name: assertion.Package.GetName(),
Version: assertion.Package.GetVersion(),
Category: assertion.Package.GetCategory(),
Image: currentPackageImageHash,
})
}
y, err := yaml.Marshal(results)
if err != nil {
fmt.Printf("err: %v\n", err)
return
}
switch out {
case "yaml":
fmt.Println(string(y))
case "json":
j2, err := yaml.YAMLToJSON(y)
if err != nil {
fmt.Printf("err: %v\n", err)
return
}
fmt.Println(string(j2))
default:
for _, p := range results.Packages {
fmt.Println(fmt.Sprintf("%s/%s-%s: %s", p.Category, p.Name, p.Version, p.Image))
}
}
},
}
ans.Flags().StringP("output", "o", "terminal", "Output format ( Defaults: terminal, available: json,yaml )")
ans.Flags().StringArrayP("tree", "t", []string{}, "Path of the tree to use.")
ans.Flags().String("image-repository", "luet/cache", "Default base image string for generated image")
return ans
}

View File

@@ -37,6 +37,7 @@ type TreePackageResult struct {
Category string `json:"category"`
Version string `json:"version"`
Path string `json:"path"`
Image string `json:"image"`
}
type TreeResults struct {
@@ -116,7 +117,7 @@ func NewTreePkglistCommand() *cobra.Command {
if deps {
emptyInstallationDb := pkg.NewInMemoryDatabase(false)
depSolver = solver.NewSolver(pkg.NewInMemoryDatabase(false),
depSolver = solver.NewSolver(solver.Options{Type: solver.SingleCoreSimple}, pkg.NewInMemoryDatabase(false),
reciper.GetDatabase(),
emptyInstallationDb)
@@ -167,9 +168,8 @@ func NewTreePkglistCommand() *cobra.Command {
if addPkg {
if revdeps {
visited := make(map[string]interface{})
for _, revdep := range p.ExpandedRevdeps(reciper.GetDatabase(), visited) {
packs, _ := reciper.GetDatabase().GetRevdeps(p)
for _, revdep := range packs {
if full {
pkgstr = pkgDetail(revdep)
} else if verbose {

View File

@@ -85,7 +85,7 @@ func validatePackage(p pkg.Package, checkType string, opts *ValidateOpts, recipe
if opts.WithSolver {
emptyInstallationDb := pkg.NewInMemoryDatabase(false)
depSolver = solver.NewSolver(pkg.NewInMemoryDatabase(false),
depSolver = solver.NewSolver(solver.Options{Type: solver.SingleCoreSimple}, pkg.NewInMemoryDatabase(false),
reciper.GetDatabase(),
emptyInstallationDb)
}

View File

@@ -23,6 +23,7 @@ import (
installer "github.com/mudler/luet/pkg/installer"
. "github.com/mudler/luet/pkg/logger"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
"github.com/spf13/cobra"
)
@@ -41,6 +42,7 @@ var uninstallCmd = &cobra.Command{
LuetCfg.Viper.BindPFlag("solver.max_attempts", cmd.Flags().Lookup("solver-attempts"))
LuetCfg.Viper.BindPFlag("nodeps", cmd.Flags().Lookup("nodeps"))
LuetCfg.Viper.BindPFlag("force", cmd.Flags().Lookup("force"))
LuetCfg.Viper.BindPFlag("yes", cmd.Flags().Lookup("yes"))
},
Run: func(cmd *cobra.Command, args []string) {
var systemDB pkg.PackageDatabase
@@ -61,14 +63,23 @@ var uninstallCmd = &cobra.Command{
full, _ := cmd.Flags().GetBool("full")
checkconflicts, _ := cmd.Flags().GetBool("conflictscheck")
fullClean, _ := cmd.Flags().GetBool("full-clean")
concurrent, _ := cmd.Flags().GetBool("solver-concurrent")
yes := LuetCfg.Viper.GetBool("yes")
LuetCfg.GetSolverOptions().Type = stype
LuetCfg.GetSolverOptions().LearnRate = float32(rate)
LuetCfg.GetSolverOptions().Discount = float32(discount)
LuetCfg.GetSolverOptions().MaxAttempts = attempts
if concurrent {
LuetCfg.GetSolverOptions().Implementation = solver.ParallelSimple
} else {
LuetCfg.GetSolverOptions().Implementation = solver.SingleCoreSimple
}
Debug("Solver", LuetCfg.GetSolverOptions().CompactString())
// Load config protect configs
installer.LoadConfigProtectConfs(LuetCfg)
inst := installer.NewLuetInstaller(installer.LuetInstallerOptions{
Concurrency: LuetCfg.GetGeneral().Concurrency,
SolverOptions: *LuetCfg.GetSolverOptions(),
@@ -77,6 +88,7 @@ var uninstallCmd = &cobra.Command{
FullUninstall: full,
FullCleanUninstall: fullClean,
CheckConflicts: checkconflicts,
Ask: !yes,
})
if LuetCfg.GetSystem().DatabaseEngine == "boltdb" {
@@ -110,6 +122,8 @@ func init() {
uninstallCmd.Flags().Bool("full", false, "Attempts to remove as much packages as possible which aren't required (slow)")
uninstallCmd.Flags().Bool("conflictscheck", true, "Check if the package marked for deletion is required by other packages")
uninstallCmd.Flags().Bool("full-clean", false, "(experimental) Uninstall packages and all the other deps/revdeps of it.")
uninstallCmd.Flags().Bool("solver-concurrent", false, "Use concurrent solver (experimental)")
uninstallCmd.Flags().BoolP("yes", "y", false, "Don't ask questions")
RootCmd.AddCommand(uninstallCmd)
}

View File

@@ -22,6 +22,7 @@ import (
installer "github.com/mudler/luet/pkg/installer"
. "github.com/mudler/luet/pkg/logger"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
"github.com/spf13/cobra"
)
@@ -38,6 +39,7 @@ var upgradeCmd = &cobra.Command{
LuetCfg.Viper.BindPFlag("solver.rate", cmd.Flags().Lookup("solver-rate"))
LuetCfg.Viper.BindPFlag("solver.max_attempts", cmd.Flags().Lookup("solver-attempts"))
LuetCfg.Viper.BindPFlag("force", cmd.Flags().Lookup("force"))
LuetCfg.Viper.BindPFlag("yes", cmd.Flags().Lookup("yes"))
},
Long: `Upgrades packages in parallel`,
Run: func(cmd *cobra.Command, args []string) {
@@ -63,14 +65,24 @@ var upgradeCmd = &cobra.Command{
universe, _ := cmd.Flags().GetBool("universe")
clean, _ := cmd.Flags().GetBool("clean")
sync, _ := cmd.Flags().GetBool("sync")
concurrent, _ := cmd.Flags().GetBool("solver-concurrent")
yes := LuetCfg.Viper.GetBool("yes")
LuetCfg.GetSolverOptions().Type = stype
LuetCfg.GetSolverOptions().LearnRate = float32(rate)
LuetCfg.GetSolverOptions().Discount = float32(discount)
LuetCfg.GetSolverOptions().MaxAttempts = attempts
if concurrent {
LuetCfg.GetSolverOptions().Implementation = solver.ParallelSimple
} else {
LuetCfg.GetSolverOptions().Implementation = solver.SingleCoreSimple
}
Debug("Solver", LuetCfg.GetSolverOptions().String())
// Load config protect configs
installer.LoadConfigProtectConfs(LuetCfg)
inst := installer.NewLuetInstaller(installer.LuetInstallerOptions{
Concurrency: LuetCfg.GetGeneral().Concurrency,
SolverOptions: *LuetCfg.GetSolverOptions(),
@@ -80,12 +92,9 @@ var upgradeCmd = &cobra.Command{
SolverUpgrade: universe,
RemoveUnavailableOnUpgrade: clean,
UpgradeNewRevisions: sync,
Ask: !yes,
})
inst.Repositories(repos)
_, err := inst.SyncRepositories(false)
if err != nil {
Fatal("Error: " + err.Error())
}
if LuetCfg.GetSystem().DatabaseEngine == "boltdb" {
systemDB = pkg.NewBoltDatabase(
@@ -94,8 +103,8 @@ var upgradeCmd = &cobra.Command{
systemDB = pkg.NewInMemoryDatabase(true)
}
system := &installer.System{Database: systemDB, Target: LuetCfg.GetSystem().Rootfs}
err = inst.Upgrade(system)
if err != nil {
if err := inst.Upgrade(system); err != nil {
Fatal("Error: " + err.Error())
}
},
@@ -118,6 +127,8 @@ func init() {
upgradeCmd.Flags().Bool("universe", false, "Use ONLY the SAT solver to compute upgrades (experimental)")
upgradeCmd.Flags().Bool("clean", false, "Try to drop removed packages (experimental, only when --universe is enabled)")
upgradeCmd.Flags().Bool("sync", false, "Upgrade packages with new revisions (experimental)")
upgradeCmd.Flags().Bool("solver-concurrent", false, "Use concurrent solver (experimental)")
upgradeCmd.Flags().BoolP("yes", "y", false, "Don't ask questions")
RootCmd.AddCommand(upgradeCmd)
}

37
contrib/config/get_luet_root.sh Executable file
View File

@@ -0,0 +1,37 @@
#!/bin/bash
set -ex
export LUET_NOLOCK=true
LUET_VERSION=0.8.6
LUET_ROOTFS=${LUET_ROOTFS:-/}
LUET_DATABASE_PATH=${LUET_DATABASE_PATH:-/var/luet/db}
LUET_DATABASE_ENGINE=${LUET_DATABASE_ENGINE:-boltdb}
LUET_CONFIG_PROTECT=${LUET_CONFIG_PROTECT:-1}
wget -q https://github.com/mudler/luet/releases/download/0.8.6/luet-0.8.6-linux-amd64 -O luet
chmod +x luet
mkdir -p /etc/luet/repos.conf.d || true
mkdir -p $LUET_DATABASE_PATH || true
mkdir -p /var/tmp/luet || true
if [ "${LUET_CONFIG_PROTECT}" = "1" ] ; then
mkdir -p /etc/luet/config.protect.d || true
wget -q https://raw.githubusercontent.com/mudler/luet/master/contrib/config/config.protect.d/01_etc.yml.example -O /etc/luet/config.protect.d/01_etc.yml
fi
wget -q https://raw.githubusercontent.com/mocaccinoOS/repository-index/master/packages/mocaccino-repository-index/mocaccino-repository-index.yml -O /etc/luet/repos.conf.d/mocaccino-repository-index.yml
cat > /etc/luet/luet.yaml <<EOF
general:
debug: false
system:
rootfs: ${LUET_ROOTFS}
database_path: "${LUET_DATABASE_PATH}"
database_engine: "${LUET_DATABASE_ENGINE}"
tmpdir_base: "/var/tmp/luet"
EOF
./luet install repository/luet repository/mocaccino-repository-index
./luet install system/luet system/luet-extensions
rm -rf luet

View File

@@ -69,6 +69,7 @@
# Default $TMPDIR/tmpluet
# tmpdir_base: "/tmp/tmpluet"
#
#
# ---------------------------------------------
# Repositories configurations directories.
# ---------------------------------------------
@@ -93,6 +94,11 @@
# annotation.
# config_protect_skip: false
#
# The paths used for load repositories and config
# protects are based on host rootfs.
# If set to false rootfs path is used as prefix.
# config_from_host: true
#
# System repositories
# ---------------------------------------------
# In alternative to define repositories files

22
go.mod
View File

@@ -4,16 +4,19 @@ go 1.12
require (
github.com/DataDog/zstd v1.4.4 // indirect
github.com/Sabayon/pkgs-checker v0.6.3-0.20200912135508-97c41780e9b6
github.com/Sabayon/pkgs-checker v0.7.2
github.com/asdine/storm v0.0.0-20190418133842-e0f77eada154
github.com/briandowns/spinner v1.7.0
github.com/cavaliercoder/grab v2.0.0+incompatible
github.com/crillab/gophersat v1.1.9-0.20200211102949-9a8bf7f2f0a3
github.com/cavaliercoder/grab v1.0.1-0.20201108051000-98a5bfe305ec
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765
github.com/docker/docker v17.12.0-ce-rc1.0.20200417035958-130b0bc6032c+incompatible
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c // indirect
github.com/ecooper/qlearning v0.0.0-20160612200101-3075011a69fd
github.com/fsouza/go-dockerclient v1.6.4
github.com/ghodss/yaml v1.0.0
github.com/hashicorp/go-version v1.2.0
github.com/jedib0t/go-pretty v4.3.0+incompatible
github.com/jedib0t/go-pretty/v6 v6.0.5
github.com/jinzhu/copier v0.0.0-20180308034124-7e38e58719c3
github.com/klauspost/pgzip v1.2.1
github.com/knqyf263/go-deb-version v0.0.0-20190517075300-09fca494f03d
@@ -21,29 +24,28 @@ require (
github.com/kyokomi/emoji v2.1.0+incompatible
github.com/logrusorgru/aurora v0.0.0-20190417123914-21d75270181e
github.com/marcsauter/single v0.0.0-20181104081128-f8bf46f26ec0
github.com/mattn/go-isatty v0.0.10 // indirect
github.com/moby/sys/mount v0.1.1-0.20200320164225-6154f11e6840 // indirect
github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87
github.com/onsi/ginkgo v1.12.1
github.com/onsi/gomega v1.10.0
github.com/mudler/go-pluggable v0.0.0-20201113184918-d36448fc8f82
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290
github.com/onsi/ginkgo v1.14.2
github.com/onsi/gomega v1.10.3
github.com/otiai10/copy v1.2.1-0.20200916181228-26f84a0b1578
github.com/pelletier/go-toml v1.6.0 // indirect
github.com/philopon/go-toposort v0.0.0-20170620085441-9be86dbd762f
github.com/pkg/errors v0.9.1
github.com/schollz/progressbar/v3 v3.7.1
github.com/spf13/cobra v1.0.0
github.com/spf13/viper v1.6.3
github.com/stevenle/topsort v0.0.0-20130922064739-8130c1d7596b
go.etcd.io/bbolt v1.3.4
go.uber.org/atomic v1.5.1 // indirect
go.uber.org/multierr v1.4.0 // indirect
go.uber.org/zap v1.13.0
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f // indirect
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553 // indirect
gopkg.in/yaml.v2 v2.2.8
gopkg.in/yaml.v2 v2.3.0
gotest.tools/v3 v3.0.2 // indirect
helm.sh/helm/v3 v3.3.4
mvdan.cc/sh/v3 v3.0.0-beta1
)
replace github.com/docker/docker => github.com/Luet-lab/moby v17.12.0-ce-rc1.0.20200605210607-749178b8f80d+incompatible

93
go.sum
View File

@@ -42,8 +42,8 @@ github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbt
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/Sabayon/pkgs-checker v0.6.3-0.20200912135508-97c41780e9b6 h1:k4MhaAzNNMqNz2CCbVpbhBxhiXDNtTbk2BAAj/CzMEo=
github.com/Sabayon/pkgs-checker v0.6.3-0.20200912135508-97c41780e9b6/go.mod h1:GFGM6ZzSE5owdGgjLnulj0+Vt9UTd5LFGmB2AOVPYrE=
github.com/Sabayon/pkgs-checker v0.7.2 h1:mh53u5D7FTCeBJevYQA9cCxAWGTSuKqw7m/x7GsQVb0=
github.com/Sabayon/pkgs-checker v0.7.2/go.mod h1:GFGM6ZzSE5owdGgjLnulj0+Vt9UTd5LFGmB2AOVPYrE=
github.com/Sereal/Sereal v0.0.0-20181211220259-509a78ddbda3 h1:Xu7z47ZiE/J+sKXHZMGxEor/oY2q6dq51fkO0JqdSwY=
github.com/Sereal/Sereal v0.0.0-20181211220259-509a78ddbda3/go.mod h1:D0JMgToj/WdxCgd30Kc1UcA9E+WdZoJqeVOuYW7iTBM=
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
@@ -71,6 +71,7 @@ github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535 h1:4daAzAu0S6Vi7/lbWECcX0j45yZReDZ56BQsrVBOEEY=
github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg=
github.com/asdine/storm v0.0.0-20190418133842-e0f77eada154 h1:2lbe+CPe6eQf2EA3jjLdLFZKGv3cbYqVIDjKnzcyOXg=
github.com/asdine/storm v0.0.0-20190418133842-e0f77eada154/go.mod h1:cMLKpjHSP4q0P133fV15ojQgwWWB2IMv+hrFsmBF/wI=
@@ -97,17 +98,21 @@ github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8n
github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=
github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE=
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
github.com/cavaliercoder/grab v2.0.0+incompatible h1:wZHbBQx56+Yxjx2TCGDcenhh3cJn7cCLMfkEPmySTSE=
github.com/cavaliercoder/grab v2.0.0+incompatible/go.mod h1:tTBkfNqSBfuMmMBFaO2phgyhdYhiZQ/+iXCZDzcDsMI=
github.com/cavaliercoder/grab v1.0.1-0.20201108051000-98a5bfe305ec h1:4XvMn0XuV7qxCH22gbnR79r+xTUaLOSA0GW/egpO3SQ=
github.com/cavaliercoder/grab v1.0.1-0.20201108051000-98a5bfe305ec/go.mod h1:NbXoa59CCAGqtRm7kRrcZIk2dTCJMRVF8QI3BOD7isY=
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5/go.mod h1:/iP1qXHoty45bqomnu2LM+VVyAEdWN+vtSHGlQgyxbw=
github.com/chuckpreslar/emission v0.0.0-20170206194824-a7ddd980baf9 h1:xz6Nv3zcwO2Lila35hcb0QloCQsc38Al13RNEzWRpX4=
github.com/chuckpreslar/emission v0.0.0-20170206194824-a7ddd980baf9/go.mod h1:2wSM9zJkl1UQEFZgSd68NfCgRz1VL1jzy/RjCg+ULrs=
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0 h1:sDMmm+q/3+BukdIpxwO365v/Rbspp2Nt5XntgQRXq8Q=
github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM=
github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f h1:tSNMc+rJDfmYntojat8lljbt1mgKNpTxUZJsSzJ9Y1s=
github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko=
github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw=
@@ -146,8 +151,8 @@ github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:ma
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/crillab/gophersat v1.1.9-0.20200211102949-9a8bf7f2f0a3 h1:HO63LCf9kTXQgUnlvFeS2qSDQhZ/cLP8DAJO89CythY=
github.com/crillab/gophersat v1.1.9-0.20200211102949-9a8bf7f2f0a3/go.mod h1:S91tHga1PCZzYhCkStwZAhvp1rCc+zqtSi55I+vDWGc=
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765 h1:EO5Dm7O50aaEwv1GTFWNLAj7vNQ1OjW3+DeJCy1vTMk=
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765/go.mod h1:S91tHga1PCZzYhCkStwZAhvp1rCc+zqtSi55I+vDWGc=
github.com/cyphar/filepath-securejoin v0.2.2 h1:jCwT2GTP+PY5nBz3c/YL5PAIbusElVrPujOBSCj8xRg=
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -201,6 +206,8 @@ github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVB
github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fsouza/go-dockerclient v1.6.4 h1:B+L+1lz1LUrNgEUUh8PSG76s70EYC49ssv2xvTefTMM=
github.com/fsouza/go-dockerclient v1.6.4/go.mod h1:GOdftxWLWIbIWKbIMDroKFJzPdg6Iw7r+jX1DDZdVsA=
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
@@ -224,6 +231,7 @@ github.com/go-openapi/analysis v0.19.2/go.mod h1:3P1osvZa9jKjb8ed2TPng3f0i/UY9sn
github.com/go-openapi/analysis v0.19.5/go.mod h1:hkEAkxagaIvIP7VTn8ygJNkd4kAYON2rCu0v0ObL0AU=
github.com/go-openapi/errors v0.17.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0=
github.com/go-openapi/errors v0.18.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0=
github.com/go-openapi/errors v0.19.2 h1:a2kIyV3w+OS3S97zxUndRVD46+FhGOUBDFY7nmu4CsY=
github.com/go-openapi/errors v0.19.2/go.mod h1:qX0BLWsyaKfvhluLejVpVNwNRdXZhEbTA4kxxpKBC94=
github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
@@ -251,6 +259,7 @@ github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8
github.com/go-openapi/strfmt v0.17.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU=
github.com/go-openapi/strfmt v0.18.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU=
github.com/go-openapi/strfmt v0.19.0/go.mod h1:+uW+93UVvGGq2qGaZxdDeJqSAqBqBdl+ZPMF/cC8nDY=
github.com/go-openapi/strfmt v0.19.3 h1:eRfyY5SkaNJCAwmmMcADjY31ow9+N7MCLW7oRkbsINA=
github.com/go-openapi/strfmt v0.19.3/go.mod h1:0yX7dbo8mKIvc3XSKp7MNfxw4JytCfCD6+bY1AVL9LU=
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
@@ -262,6 +271,7 @@ github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2K
github.com/go-openapi/validate v0.19.5/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85nY1c2z52x1Gk4=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
github.com/gobuffalo/envy v1.7.1/go.mod h1:FurDp9+EDPE4aIUS3ZLyD+7/9fpx7YRt/ukY6jIHf0w=
@@ -294,6 +304,13 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db h1:woRePGFeVFfLKN/pOkfl+p/TAqKOfFu+7KPlMVpok/w=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golangplus/bytes v0.0.0-20160111154220-45c989fe5450/go.mod h1:Bk6SMAONeMXrxql8uvOKuAZSu8aM5RUGv+1C6IJaEho=
@@ -379,6 +396,10 @@ github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJ
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
github.com/jedib0t/go-pretty v4.3.0+incompatible h1:CGs8AVhEKg/n9YbUenWmNStRW2PHJzaeDodcfvRAbIo=
github.com/jedib0t/go-pretty v4.3.0+incompatible/go.mod h1:XemHduiw8R651AF9Pt4FwCTKeG3oo7hrHJAoznj9nag=
github.com/jedib0t/go-pretty/v6 v6.0.5 h1:oOo0/jSb3NEYKT6l1hhFXoX2UZnkanMuCE2DVT1mqnE=
github.com/jedib0t/go-pretty/v6 v6.0.5/go.mod h1:MTr6FgcfNdnN5wPVBzJ6mhJeDyiF0yBvS2TMXEV/XSU=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jinzhu/copier v0.0.0-20180308034124-7e38e58719c3 h1:sHsPfNMAG70QAvKbddQ0uScZCHQoZsT5NykGRCeeeIs=
github.com/jinzhu/copier v0.0.0-20180308034124-7e38e58719c3/go.mod h1:yL958EeXv8Ylng6IfnvG4oflryUi3vgA3xPs9hmII1s=
@@ -398,6 +419,7 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213/go.mod h1:vNUNkEQ1e29fT/6vq2aBdFsgNPmy8qMdSay1npru+Sw=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
@@ -459,18 +481,23 @@ github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNx
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
github.com/mattn/go-isatty v0.0.10 h1:qxFzApOv4WsAL965uUPIsXzAKCZxN2p9UqdhFS4ZW10=
github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-oci8 v0.0.7/go.mod h1:wjDx6Xm9q7dFtHJvIlrI99JytznLw5wQ4R+9mNXJwGI=
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-shellwords v1.0.10/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.12.0 h1:u/x3mp++qUxvYfulZ4HKOvVO0JWhk7HtE8lWhbGz/Do=
github.com/mattn/go-sqlite3 v1.12.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ=
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
@@ -503,6 +530,10 @@ github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d h1:fKh+rvw
github.com/mudler/cobra-extensions v0.0.0-20200612154940-31a47105fe3d/go.mod h1:puRUWSwyecW2V355tKncwPVPRAjQBduPsFjG0mrV/Nw=
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87 h1:mGz7T8KvmHH0gLWPI5tQne8xl2cO3T8wrrb6Aa16Jxo=
github.com/mudler/docker-companion v0.4.6-0.20200418093252-41846f112d87/go.mod h1:1w4zI1LYXDeiUXqedPcrT5eQJnmKR6dbg5iJMgSIP/Y=
github.com/mudler/go-pluggable v0.0.0-20201113184918-d36448fc8f82 h1:Hkefw2tzoKATVUTFsCtDlUnY180+OE851qGbq45ATxk=
github.com/mudler/go-pluggable v0.0.0-20201113184918-d36448fc8f82/go.mod h1:4P/ULate+2QxoAQtojaRjyO5VGMhV0KLnSdAS8nuBbo=
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290 h1:426hFyXMpXeqIeGJn2cGAW9ogvM2Jf+Jv23gtVPvBLM=
github.com/mudler/topsort v0.0.0-20201103161459-db5c7901c290/go.mod h1:uP5BBgFxq2wNWo7n1vnY5SSbgL0WDshVJrOO12tZ/lA=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
@@ -531,6 +562,8 @@ github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+
github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.1 h1:mFwc4LvZ0xpSvDZ3E+k8Yte0hLOMxXUlP+yXtJqkYfQ=
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
github.com/onsi/ginkgo v1.14.2 h1:8mVmC9kjFFmA8H4pKMUhcblgifdkOIXPvbhN1T36q1M=
github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY=
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
@@ -539,6 +572,9 @@ github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1Cpa
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.10.0 h1:Gwkk+PTu/nfOwNMtUB/mRUv0X7ewW5dO4AERT1ThVKo=
github.com/onsi/gomega v1.10.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.10.3 h1:gph6h/qe9GSUw1NhH1gp+qb+h8rXD8Cy60Z32Qw3ELA=
github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/openSUSE/umoci v0.1.1-0.20191030112807-c0dd46ae078f h1:G9hyzNrFbTgp9KEoGRcNYxAT41lo7hDy9oxXT1Y7WHI=
github.com/openSUSE/umoci v0.1.1-0.20191030112807-c0dd46ae078f/go.mod h1:3p4KA5nwyY65lVmQZxv7tm0YEylJ+t1fY91ORsVXv58=
@@ -589,7 +625,6 @@ github.com/philopon/go-toposort v0.0.0-20170620085441-9be86dbd762f h1:WyCn68lTiy
github.com/philopon/go-toposort v0.0.0-20170620085441-9be86dbd762f/go.mod h1:/iRjX3DdSK956SzsUdV55J+wIsQ+2IBWmBrB4RvZfk4=
github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/diff v0.0.0-20190930165518-531926345625/go.mod h1:kFj35MyHn14a6pIgWhm46KVjJr5CHys3eEYxkuKD1EI=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
@@ -638,7 +673,6 @@ github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.3.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.4.0/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.5.0/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rootless-containers/proto v0.1.0 h1:gS1JOMEtk1YDYHCzBAf/url+olMJbac7MTrgSeP6zh4=
github.com/rootless-containers/proto v0.1.0/go.mod h1:vgkUFZbQd0gcE/K/ZwtE4MYjZPu0UNHLXIQxhyqAFh8=
github.com/rubenv/sql-migrate v0.0.0-20200616145509-8d140a17f351/go.mod h1:DCgfY80j8GYL7MLEfvcpSFvjD0L5yZq/aZUJmhZklyg=
@@ -649,6 +683,8 @@ github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/schollz/progressbar/v3 v3.7.1 h1:aQR/t6d+1nURSdoMn6c7n0vJi5xQ3KndpF0n7R5wrik=
github.com/schollz/progressbar/v3 v3.7.1/go.mod h1:CG/f0JmacksUc6TkZToO7tVq4t03zIQSQUtTd7F9GR4=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
@@ -698,8 +734,6 @@ github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DM
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
github.com/spf13/viper v1.6.3 h1:pDDu1OyEDTKzpJwdq4TiuLyMsUgRa/BT5cn5O62NoHs=
github.com/spf13/viper v1.6.3/go.mod h1:jUMtyi0/lB5yZH/FjyGAoH7IMNrIhlBf6pXZmbMDvzw=
github.com/stevenle/topsort v0.0.0-20130922064739-8130c1d7596b h1:wJSBFlabo96ySlmSX0a02WAPyGxagzTo9c5sk3sHP3E=
github.com/stevenle/topsort v0.0.0-20130922064739-8130c1d7596b/go.mod h1:YIyOMT17IKD8FbLO8RfCJZd2qAZiOnIfuYePIeESwWc=
github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw=
github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw=
github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI=
@@ -716,6 +750,7 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0=
github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0=
@@ -756,6 +791,7 @@ go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
go.mongodb.org/mongo-driver v1.1.2 h1:jxcFYjlkl8xaERsgLo+RNquI0epW6zuy/ZRQs6jnrFA=
go.mongodb.org/mongo-driver v1.1.2/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
@@ -793,7 +829,6 @@ golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191002192127-34f69633bfdc/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200128174031-69ecbb4d6d5d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975 h1:/Tl7pH94bvbAAHBdZJT947M/+gp0+CqQXDtMRC0fseo=
@@ -801,6 +836,8 @@ golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9 h1:umElSU9WZirRdgu2yFHY0ayQkEnKiOC1TtM3fWXFnoU=
golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
@@ -837,8 +874,9 @@ golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20190912160710-24e19bdeb0f2 h1:4dVFTC832rPn4pomLSz1vA+are2+dU19w1H8OngV7nc=
golang.org/x/net v0.0.0-20190912160710-24e19bdeb0f2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553 h1:efeOvDhwQ29Dj3SdAV/MJf8oukgn+8D8WgaCaRMchF8=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0 h1:wBouT66WTYFXdxfVdz9sVWARVd/2vfGcmI45D2gj45M=
golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
@@ -851,6 +889,7 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180816055513-1c9583448a9c/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -877,19 +916,27 @@ golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190913121621-c3b328c6e5a7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191008105621-543471e840be h1:QAcqgptGM8IQBC9K/RC4o+O9YmqEm0diQn9QmZw/0mU=
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201113135734-0a15ea8d9b02 h1:5Ftd3YbC/kANXWCBjvppvUmv1BMakgFcBKA7MpYYp4M=
golang.org/x/sys v0.0.0-20201113135734-0a15ea8d9b02/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -958,6 +1005,13 @@ google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1 h1:zvIju4sqAGvwKspUQOhwnpcqSbzi7/H6QomNNjTL4sk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -990,6 +1044,8 @@ gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
@@ -1028,9 +1084,6 @@ k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/metrics v0.18.8/go.mod h1:j7JzZdiyhLP2BsJm/Fzjs+j5Lb1Y7TySjhPWqBPwRXA=
k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89 h1:d4vVOjXm687F1iLSP2q3lyPPuyvTUt3aVoBpi2DqRsU=
k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
mvdan.cc/editorconfig v0.1.1-0.20191109213504-890940e3f00e/go.mod h1:Ge4atmRUYqueGppvJ7JNrtqpqokoJEFxYbP0Z+WeKS8=
mvdan.cc/sh/v3 v3.0.0-beta1 h1:UqiwBEXEPzelaGxuvixaOtzc7WzKtrElePJ8HqvW7K8=
mvdan.cc/sh/v3 v3.0.0-beta1/go.mod h1:rBIndNJFYPp8xSppiZcGIk6B5d1g3OEARxEaXjPxwVI=
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.7/go.mod h1:PHgbrJT7lCHcxMU+mDHEm+nx46H4zuuHZkDP6icnhu0=
sigs.k8s.io/kustomize v2.0.3+incompatible/go.mod h1:MkjgH3RdOWrievjo6c9T245dYlB5QeXV4WCbnt/PEpU=
sigs.k8s.io/structured-merge-diff/v3 v3.0.0-20200116222232-67a7b8c61874/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw=

46
pkg/bus/events.go Normal file
View File

@@ -0,0 +1,46 @@
package bus
import (
"github.com/mudler/go-pluggable"
)
var (
// Package events
// EventPackageInstall is the event fired when a new package is being installed
EventPackageInstall pluggable.EventType = "package.install"
// EventPackageUnInstall is the event fired when a new package is being uninstalled
EventPackageUnInstall pluggable.EventType = "package.uninstall"
// Package build
// EventPackagePreBuild is the event fired before a package is being built
EventPackagePreBuild pluggable.EventType = "package.pre.build"
// EventPackagePreBuildArtifact is the event fired before a package artifact is being built
EventPackagePreBuildArtifact pluggable.EventType = "package.pre.build_artifact"
// EventPackagePostBuildArtifact is the event fired after a package artifact was built
EventPackagePostBuildArtifact pluggable.EventType = "package.post.build_artifact"
// EventPackagePostBuild is the event fired after a package was built
EventPackagePostBuild pluggable.EventType = "package.post.build"
// Repository events
// EventRepositoryPreBuild is the event fired before a repository is being built
EventRepositoryPreBuild pluggable.EventType = "repository.pre.build"
// EventRepositoryPostBuild is the event fired after a repository was built
EventRepositoryPostBuild pluggable.EventType = "repository.post.build"
)
// Manager is the bus instance manager, which subscribes plugins to events emitted by Luet
var Manager *pluggable.Manager = pluggable.NewManager(
[]pluggable.EventType{
EventPackageInstall,
EventPackageUnInstall,
EventPackagePreBuild,
EventPackagePreBuildArtifact,
EventPackagePostBuildArtifact,
EventPackagePostBuild,
EventRepositoryPreBuild,
EventRepositoryPostBuild,
},
)

View File

@@ -34,6 +34,7 @@ import (
"strings"
"sync"
bus "github.com/mudler/luet/pkg/bus"
. "github.com/mudler/luet/pkg/config"
"github.com/mudler/luet/pkg/helpers"
. "github.com/mudler/luet/pkg/logger"
@@ -170,6 +171,8 @@ func (a *PackageArtifact) WriteYaml(dst string) error {
return errors.Wrap(err, "While marshalling for PackageArtifact YAML")
}
bus.Manager.Publish(bus.EventPackagePreBuildArtifact, a)
mangle, err := NewPackageArtifactFromYaml(data)
if err != nil {
return errors.Wrap(err, "Generated invalid artifact")
@@ -191,6 +194,7 @@ func (a *PackageArtifact) WriteYaml(dst string) error {
return errors.Wrap(err, "While writing PackageArtifact YAML")
}
//a.CompileSpec.GetPackage().SetPath(p)
bus.Manager.Publish(bus.EventPackagePostBuildArtifact, a)
return nil
}
@@ -330,35 +334,25 @@ func tarModifierWrapperFunc(dst, path string, header *tar.Header, content io.Rea
func (a *PackageArtifact) GetProtectFiles() []string {
ans := []string{}
annotationDir := ""
if !LuetCfg.ConfigProtectSkip &&
LuetCfg.GetConfigProtectConfFiles() != nil &&
len(LuetCfg.GetConfigProtectConfFiles()) > 0 {
if !LuetCfg.ConfigProtectSkip {
for _, file := range a.Files {
for _, conf := range LuetCfg.GetConfigProtectConfFiles() {
for _, dir := range conf.Directories {
// Note file is without / at begin.
if strings.HasPrefix("/"+file, filepath.Clean(dir)) {
// docker archive modifier works with path without / at begin.
ans = append(ans, file)
goto nextFile
}
}
// a.CompileSpec could be nil when artifact.Unpack is used for tree tarball
if a.CompileSpec != nil &&
a.CompileSpec.GetPackage().HasAnnotation(string(pkg.ConfigProtectAnnnotation)) {
dir, ok := a.CompileSpec.GetPackage().GetAnnotations()[string(pkg.ConfigProtectAnnnotation)]
if ok {
annotationDir = dir
}
if a.CompileSpec.GetPackage().HasAnnotation(string(pkg.ConfigProtectAnnnotation)) {
dir, ok := a.CompileSpec.GetPackage().GetAnnotations()[string(pkg.ConfigProtectAnnnotation)]
if ok {
if strings.HasPrefix("/"+file, filepath.Clean(dir)) {
ans = append(ans, file)
goto nextFile
}
}
}
nextFile:
}
// TODO: check if skip this if we have a.CompileSpec nil
cp := NewConfigProtect(annotationDir)
cp.Map(a.Files)
// NOTE: for unpack we need files path without initial /
ans = cp.GetProtectFiles(false)
}
return ans
@@ -526,7 +520,7 @@ func worker(i int, wg *sync.WaitGroup, s <-chan CopyJob) {
}
// ExtractArtifactFromDelta extracts deltas from ArtifactLayer from an image in tar format
func ExtractArtifactFromDelta(src, dst string, layers []ArtifactLayer, concurrency int, keepPerms bool, includes []string, t CompressionImplementation) (Artifact, error) {
func ExtractArtifactFromDelta(src, dst string, layers []ArtifactLayer, concurrency int, keepPerms bool, includes []string, excludes []string, t CompressionImplementation) (Artifact, error) {
archive, err := LuetCfg.GetSystem().TempDir("archive")
if err != nil {
@@ -556,7 +550,8 @@ func ExtractArtifactFromDelta(src, dst string, layers []ArtifactLayer, concurren
}
// Handle includes in spec. If specified they filter what gets in the package
if len(includes) > 0 {
if len(includes) > 0 && len(excludes) == 0 {
var includeRegexp []*regexp.Regexp
for _, i := range includes {
r, e := regexp.Compile(i)
@@ -584,6 +579,81 @@ func ExtractArtifactFromDelta(src, dst string, layers []ArtifactLayer, concurren
Debug("File ", a.Name, " deleted")
}
}
} else if len(includes) == 0 && len(excludes) != 0 {
var excludeRegexp []*regexp.Regexp
for _, i := range excludes {
r, e := regexp.Compile(i)
if e != nil {
Warning("Failed compiling regex:", e)
continue
}
excludeRegexp = append(excludeRegexp, r)
}
for _, l := range layers {
// Consider d.Additions (and d.Changes? - warn at least) only
ADD:
for _, a := range l.Diffs.Additions {
for _, i := range excludeRegexp {
if i.MatchString(a.Name) {
continue ADD
}
}
toCopy <- CopyJob{Src: filepath.Join(src, a.Name), Dst: filepath.Join(archive, a.Name), Artifact: a.Name}
}
for _, a := range l.Diffs.Changes {
Debug("File ", a.Name, " changed")
}
for _, a := range l.Diffs.Deletions {
Debug("File ", a.Name, " deleted")
}
}
} else if len(includes) != 0 && len(excludes) != 0 {
var includeRegexp []*regexp.Regexp
for _, i := range includes {
r, e := regexp.Compile(i)
if e != nil {
Warning("Failed compiling regex:", e)
continue
}
includeRegexp = append(includeRegexp, r)
}
var excludeRegexp []*regexp.Regexp
for _, i := range excludes {
r, e := regexp.Compile(i)
if e != nil {
Warning("Failed compiling regex:", e)
continue
}
excludeRegexp = append(excludeRegexp, r)
}
for _, l := range layers {
// Consider d.Additions (and d.Changes? - warn at least) only
EXCLUDES:
for _, a := range l.Diffs.Additions {
for _, i := range includeRegexp {
if i.MatchString(a.Name) {
for _, e := range excludeRegexp {
if e.MatchString(a.Name) {
continue EXCLUDES
}
}
toCopy <- CopyJob{Src: filepath.Join(src, a.Name), Dst: filepath.Join(archive, a.Name), Artifact: a.Name}
continue EXCLUDES
}
}
}
for _, a := range l.Diffs.Changes {
Debug("File ", a.Name, " changed")
}
for _, a := range l.Diffs.Deletions {
Debug("File ", a.Name, " deleted")
}
}
} else {
// Otherwise just grab all
for _, l := range layers {

View File

@@ -21,6 +21,7 @@ import (
"path/filepath"
. "github.com/mudler/luet/pkg/compiler/backend"
"github.com/mudler/luet/pkg/solver"
. "github.com/mudler/luet/pkg/compiler"
helpers "github.com/mudler/luet/pkg/helpers"
@@ -41,7 +42,7 @@ var _ = Describe("Artifact", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"})
Expect(err).ToNot(HaveOccurred())
@@ -95,6 +96,7 @@ ENV PACKAGE_CATEGORY=app-admin`))
Expect(err).ToNot(HaveOccurred())
Expect(dockerfile).To(Equal(`
FROM luet/base
WORKDIR /luetbuild
ENV PACKAGE_NAME=enman
ENV PACKAGE_VERSION=1.4.0
ENV PACKAGE_CATEGORY=app-admin
@@ -111,21 +113,25 @@ RUN echo bar > /test2`))
diffs, err := b.Changes(filepath.Join(tmpdir2, "output1.tar"), filepath.Join(tmpdir, "output2.tar"))
Expect(err).ToNot(HaveOccurred())
artifacts := []ArtifactNode{}
if os.Getenv("DOCKER_BUILDKIT") == "1" {
artifacts = append(artifacts, ArtifactNode{Name: "/etc/resolv.conf", Size: 0})
}
artifacts = append(artifacts, ArtifactNode{Name: "/test", Size: 4})
artifacts = append(artifacts, ArtifactNode{Name: "/test2", Size: 4})
Expect(diffs).To(Equal(
[]ArtifactLayer{{
FromImage: filepath.Join(tmpdir2, "output1.tar"),
ToImage: filepath.Join(tmpdir, "output2.tar"),
Diffs: ArtifactDiffs{
Additions: []ArtifactNode{
{Name: "/test", Size: 4},
{Name: "/test2", Size: 4},
},
Additions: artifacts,
},
}}))
err = b.ExtractRootfs(CompilerBackendOptions{SourcePath: filepath.Join(tmpdir, "output2.tar"), Destination: rootfs}, false)
Expect(err).ToNot(HaveOccurred())
artifact, err := ExtractArtifactFromDelta(rootfs, filepath.Join(tmpdir, "package.tar"), diffs, 2, false, []string{}, None)
artifact, err := ExtractArtifactFromDelta(rootfs, filepath.Join(tmpdir, "package.tar"), diffs, 2, false, []string{}, []string{}, None)
Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(filepath.Join(tmpdir, "package.tar"))).To(BeTrue())
err = helpers.Untar(artifact.GetPath(), unpacked, false)

View File

@@ -24,6 +24,7 @@ import (
"path/filepath"
"strings"
docker "github.com/fsouza/go-dockerclient"
capi "github.com/mudler/docker-companion/api"
"github.com/mudler/luet/pkg/compiler"
@@ -45,6 +46,7 @@ func (*SimpleDocker) BuildImage(opts compiler.CompilerBackendOptions) error {
name := opts.ImageName
path := opts.SourcePath
dockerfileName := opts.DockerFileName
buildarg := []string{"build", "-f", dockerfileName, "-t", name, "."}
Debug(":whale2: Building image " + name)
@@ -56,6 +58,21 @@ func (*SimpleDocker) BuildImage(opts compiler.CompilerBackendOptions) error {
}
Info(":whale: Building image " + name + " done")
if os.Getenv("DOCKER_SQUASH") == "true" {
Info(":whale: Squashing image " + name)
var client *docker.Client
client, err = docker.NewClientFromEnv()
if err != nil {
return errors.Wrap(err, "could not connect to the Docker daemon")
}
err = capi.Squash(client, name, name)
if err != nil {
return errors.Wrap(err, "Failed squashing image")
}
Info(":whale: Squashing image " + name + " done")
}
if config.LuetCfg.GetGeneral().ShowBuildOutput {
Info(string(out))
} else {
@@ -95,7 +112,7 @@ func (*SimpleDocker) ImageExists(imagename string) bool {
cmd := exec.Command("docker", buildarg...)
out, err := cmd.CombinedOutput()
if err != nil {
Warning("Image not present")
Debug("Image not present")
Debug(string(out))
return false
}

View File

@@ -18,6 +18,7 @@ package backend_test
import (
. "github.com/mudler/luet/pkg/compiler"
. "github.com/mudler/luet/pkg/compiler/backend"
"github.com/mudler/luet/pkg/solver"
"io/ioutil"
"os"
@@ -40,7 +41,7 @@ var _ = Describe("Docker backend", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"})
Expect(err).ToNot(HaveOccurred())
@@ -86,6 +87,7 @@ ENV PACKAGE_CATEGORY=app-admin`))
Expect(err).ToNot(HaveOccurred())
Expect(dockerfile).To(Equal(`
FROM luet/base
WORKDIR /luetbuild
ENV PACKAGE_NAME=enman
ENV PACKAGE_VERSION=1.4.0
ENV PACKAGE_CATEGORY=app-admin
@@ -100,15 +102,19 @@ RUN echo bar > /test2`))
Expect(b.ImageDefinitionToTar(opts)).ToNot(HaveOccurred())
Expect(helpers.Exists(filepath.Join(tmpdir, "output2.tar"))).To(BeTrue())
artifacts := []ArtifactNode{}
if os.Getenv("DOCKER_BUILDKIT") == "1" {
artifacts = append(artifacts, ArtifactNode{Name: "/etc/resolv.conf", Size: 0})
}
artifacts = append(artifacts, ArtifactNode{Name: "/test", Size: 4})
artifacts = append(artifacts, ArtifactNode{Name: "/test2", Size: 4})
Expect(b.Changes(filepath.Join(tmpdir2, "output1.tar"), filepath.Join(tmpdir, "output2.tar"))).To(Equal(
[]ArtifactLayer{{
FromImage: filepath.Join(tmpdir2, "output1.tar"),
ToImage: filepath.Join(tmpdir, "output2.tar"),
Diffs: ArtifactDiffs{
Additions: []ArtifactNode{
{Name: "/test", Size: 4},
{Name: "/test2", Size: 4},
},
Additions: artifacts,
},
}}))

View File

@@ -21,13 +21,14 @@ import (
"os"
"path/filepath"
"github.com/ghodss/yaml"
"regexp"
"strings"
"sync"
"time"
bus "github.com/mudler/luet/pkg/bus"
yaml "gopkg.in/yaml.v2"
"github.com/mudler/luet/pkg/helpers"
. "github.com/mudler/luet/pkg/logger"
pkg "github.com/mudler/luet/pkg/package"
@@ -38,6 +39,7 @@ import (
const BuildFile = "build.yaml"
const DefinitionFile = "definition.yaml"
const CollectionFile = "collection.yaml"
type LuetCompiler struct {
*tree.CompilerRecipe
@@ -48,9 +50,10 @@ type LuetCompiler struct {
Concurrency int
CompressionType CompressionImplementation
Options CompilerOptions
SolverOptions solver.Options
}
func NewLuetCompiler(backend CompilerBackend, db pkg.PackageDatabase, opt *CompilerOptions) Compiler {
func NewLuetCompiler(backend CompilerBackend, db pkg.PackageDatabase, opt *CompilerOptions, solvopts solver.Options) Compiler {
// The CompilerRecipe will gives us a tree with only build deps listed.
return &LuetCompiler{
Backend: backend,
@@ -65,6 +68,7 @@ func NewLuetCompiler(backend CompilerBackend, db pkg.PackageDatabase, opt *Compi
Concurrency: opt.Concurrency,
Clean: opt.Clean,
Options: *opt,
SolverOptions: solvopts,
}
}
@@ -179,7 +183,7 @@ func (cs *LuetCompiler) CompileParallel(keepPermissions bool, ps CompilationSpec
return artifacts, allErrors
}
func (cs *LuetCompiler) stripIncludesFromRootfs(includes []string, rootfs string) error {
func (cs *LuetCompiler) stripFromRootfs(includes []string, rootfs string, include bool) error {
var includeRegexp []*regexp.Regexp
for _, i := range includes {
r, e := regexp.Compile(i)
@@ -211,7 +215,7 @@ func (cs *LuetCompiler) stripIncludesFromRootfs(includes []string, rootfs string
}
}
if !match {
if include && !match || !include && match {
toRemove = append(toRemove, currentpath)
}
@@ -233,9 +237,60 @@ func (cs *LuetCompiler) stripIncludesFromRootfs(includes []string, rootfs string
return nil
}
func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage string, concurrency int, keepPermissions, keepImg bool, p CompilationSpec, generateArtifact bool) (Artifact, error) {
func (cs *LuetCompiler) unpackFs(rootfs string, concurrency int, p CompilationSpec) (Artifact, error) {
if p.GetPackageDir() != "" {
Info(":tophat: Packing from output dir", p.GetPackageDir())
rootfs = filepath.Join(rootfs, p.GetPackageDir())
}
pkgTag := ":package: " + p.GetPackage().GetName()
if len(p.GetIncludes()) > 0 {
// strip from includes
cs.stripFromRootfs(p.GetIncludes(), rootfs, true)
}
if len(p.GetExcludes()) > 0 {
// strip from includes
cs.stripFromRootfs(p.GetExcludes(), rootfs, false)
}
artifact := NewPackageArtifact(p.Rel(p.GetPackage().GetFingerPrint() + ".package.tar"))
artifact.SetCompressionType(cs.CompressionType)
if err := artifact.Compress(rootfs, concurrency); err != nil {
return nil, errors.Wrap(err, "Error met while creating package archive")
}
artifact.SetCompileSpec(p)
return artifact, nil
}
func (cs *LuetCompiler) unpackDelta(rootfs string, concurrency int, keepPermissions bool, p CompilationSpec, builderOpts, runnerOpts CompilerBackendOptions) (Artifact, error) {
pkgTag := ":package: " + p.GetPackage().HumanReadableString()
if err := cs.Backend.ExportImage(builderOpts); err != nil {
return nil, errors.Wrap(err, "Could not export image")
}
if !cs.Options.KeepImageExport {
defer os.Remove(builderOpts.Destination)
}
Info(pkgTag, ":hammer: Generating delta")
diffs, err := cs.Backend.Changes(builderOpts.Destination, runnerOpts.Destination)
if err != nil {
return nil, errors.Wrap(err, "Could not generate changes from layers")
}
artifact, err := ExtractArtifactFromDelta(rootfs, p.Rel(p.GetPackage().GetFingerPrint()+".package.tar"), diffs, concurrency, keepPermissions, p.GetIncludes(), p.GetExcludes(), cs.CompressionType)
if err != nil {
return nil, errors.Wrap(err, "Could not generate deltas")
}
artifact.SetCompileSpec(p)
return artifact, nil
}
func (cs *LuetCompiler) buildPackageImage(image, buildertaggedImage, packageImage string,
concurrency int, keepPermissions bool,
p CompilationSpec) (CompilerBackendOptions, CompilerBackendOptions, error) {
var runnerOpts, builderOpts CompilerBackendOptions
pkgTag := ":package: " + p.GetPackage().HumanReadableString()
// Use packageImage as salt into the fp being used
// so the hash is unique also in cases where
@@ -258,32 +313,23 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
packageImage = cs.ImageRepository + "-" + fp
}
if !cs.Clean {
exists := cs.Backend.ImageExists(buildertaggedImage) && cs.Backend.ImageExists(packageImage)
if art, err := LoadArtifactFromYaml(p); err == nil && (cs.Options.SkipIfMetadataExists || exists) {
Debug("Artifact reloaded. Skipping build")
return art, err
}
}
p.SetSeedImage(image) // In this case, we ignore the build deps as we suppose that the image has them - otherwise we recompose the tree with a solver,
// and we build all the images first.
err := os.MkdirAll(p.Rel("build"), os.ModePerm)
if err != nil {
return nil, errors.Wrap(err, "Error met while creating tempdir for building")
return builderOpts, runnerOpts, errors.Wrap(err, "Error met while creating tempdir for building")
}
buildDir, err := ioutil.TempDir(p.Rel("build"), "pack")
if err != nil {
return nil, errors.Wrap(err, "Error met while creating tempdir for building")
return builderOpts, runnerOpts, errors.Wrap(err, "Error met while creating tempdir for building")
}
defer os.RemoveAll(buildDir) // clean up
// First we copy the source definitions into the output - we create a copy which the builds will need (we need to cache this phase somehow)
err = helpers.CopyDir(p.GetPackage().GetPath(), buildDir)
if err != nil {
return nil, errors.Wrap(err, "Could not copy package sources")
return builderOpts, runnerOpts, errors.Wrap(err, "Could not copy package sources")
}
// Copy file into the build context, the compilespec might have requested to do so.
@@ -294,66 +340,78 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
}
}
Info(pkgTag, "Generating :whale: definition for builder image from", image)
Info(pkgTag, ":whale: Generating 'builder' image definition from", image)
// First we create the builder image
p.WriteBuildImageDefinition(filepath.Join(buildDir, p.GetPackage().GetFingerPrint()+"-builder.dockerfile"))
builderOpts := CompilerBackendOptions{
if err := p.WriteBuildImageDefinition(filepath.Join(buildDir, p.GetPackage().GetFingerPrint()+"-builder.dockerfile")); err != nil {
return builderOpts, runnerOpts, errors.Wrap(err, "Could not generate image definition")
}
// Then we write the step image, which uses the builder one
if err := p.WriteStepImageDefinition(buildertaggedImage, filepath.Join(buildDir, p.GetPackage().GetFingerPrint()+".dockerfile")); err != nil {
return builderOpts, runnerOpts, errors.Wrap(err, "Could not generate image definition")
}
builderOpts = CompilerBackendOptions{
ImageName: buildertaggedImage,
SourcePath: buildDir,
DockerFileName: p.GetPackage().GetFingerPrint() + "-builder.dockerfile",
Destination: p.Rel(p.GetPackage().GetFingerPrint() + "-builder.image.tar"),
}
buildBuilderImage := true
if cs.Options.PullFirst {
if err := cs.Backend.DownloadImage(builderOpts); err == nil {
buildBuilderImage = false
}
}
if buildBuilderImage {
if err = cs.Backend.BuildImage(builderOpts); err != nil {
return nil, errors.Wrap(err, "Could not build image: "+image+" "+builderOpts.DockerFileName)
}
}
if err = cs.Backend.ExportImage(builderOpts); err != nil {
return nil, errors.Wrap(err, "Could not export image")
}
if !cs.Options.KeepImageExport {
defer os.Remove(builderOpts.Destination)
}
if cs.Options.Push && buildBuilderImage {
if err = cs.Backend.Push(builderOpts); err != nil {
return nil, errors.Wrap(err, "Could not push image: "+image+" "+builderOpts.DockerFileName)
}
}
// Then we write the step image, which uses the builder one
p.WriteStepImageDefinition(buildertaggedImage, filepath.Join(buildDir, p.GetPackage().GetFingerPrint()+".dockerfile"))
runnerOpts := CompilerBackendOptions{
runnerOpts = CompilerBackendOptions{
ImageName: packageImage,
SourcePath: buildDir,
DockerFileName: p.GetPackage().GetFingerPrint() + ".dockerfile",
Destination: p.Rel(p.GetPackage().GetFingerPrint() + ".image.tar"),
}
buildPackageImage := true
if cs.Options.PullFirst {
//Best effort pull
if err := cs.Backend.DownloadImage(runnerOpts); err == nil {
buildPackageImage = false
buildAndPush := func(opts CompilerBackendOptions) error {
buildImage := true
if cs.Options.PullFirst {
if err := cs.Backend.DownloadImage(opts); err == nil {
buildImage = false
}
}
if buildImage {
if err := cs.Backend.BuildImage(opts); err != nil {
return errors.Wrap(err, "Could not build image: "+image+" "+opts.DockerFileName)
}
if cs.Options.Push {
if err = cs.Backend.Push(opts); err != nil {
return errors.Wrap(err, "Could not push image: "+image+" "+opts.DockerFileName)
}
}
}
return nil
}
if buildPackageImage {
if err := cs.Backend.BuildImage(runnerOpts); err != nil {
return nil, errors.Wrap(err, "Failed building image for "+runnerOpts.ImageName+" "+runnerOpts.DockerFileName)
}
if err := buildAndPush(builderOpts); err != nil {
return builderOpts, runnerOpts, errors.Wrap(err, "Could not push image: "+image+" "+builderOpts.DockerFileName)
}
if err := buildAndPush(runnerOpts); err != nil {
return builderOpts, runnerOpts, errors.Wrap(err, "Could not push image: "+image+" "+builderOpts.DockerFileName)
}
return builderOpts, runnerOpts, nil
}
func (cs *LuetCompiler) genArtifact(p CompilationSpec, builderOpts, runnerOpts CompilerBackendOptions, concurrency int, keepPermissions bool) (Artifact, error) {
// generate Artifact
var artifact Artifact
var rootfs string
var err error
unpack := p.ImageUnpack()
pkgTag := ":package: " + p.GetPackage().HumanReadableString()
// If package_dir was specified in the spec, we want to treat the content of the directory
// as the root of our archive. ImageUnpack is implied to be true. override it
if p.GetPackageDir() != "" {
unpack = true
}
// prepare folder content of the image with the package compiled inside
if err := cs.Backend.ExportImage(runnerOpts); err != nil {
return nil, errors.Wrap(err, "Failed exporting image")
}
@@ -362,23 +420,7 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
defer os.Remove(runnerOpts.Destination)
}
if cs.Options.Push && buildPackageImage {
err = cs.Backend.Push(runnerOpts)
if err != nil {
return nil, errors.Wrap(err, "Could not push image: "+image+" "+builderOpts.DockerFileName)
}
}
var artifact Artifact
unpack := p.ImageUnpack()
// If package_dir was specified in the spec, we want to treat the content of the directory
// as the root of our archive. ImageUnpack is implied to be true. override it
if p.GetPackageDir() != "" {
unpack = true
}
rootfs, err := ioutil.TempDir(p.GetOutputPath(), "rootfs")
rootfs, err = ioutil.TempDir(p.GetOutputPath(), "rootfs")
if err != nil {
return nil, errors.Wrap(err, "Could not create tempdir")
}
@@ -386,62 +428,24 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
// TODO: Compression and such
err = cs.Backend.ExtractRootfs(CompilerBackendOptions{
ImageName: packageImage,
ImageName: runnerOpts.ImageName,
SourcePath: runnerOpts.Destination, Destination: rootfs}, keepPermissions)
if err != nil {
return nil, errors.Wrap(err, "Could not extract rootfs")
}
if !keepImg {
// We keep them around, so to not reload them from the tar (which should be the "correct way") and we automatically share the same layers
// TODO: Handle caching and optionally do not remove things
err = cs.Backend.RemoveImage(builderOpts)
if err != nil {
Warning("Could not remove image ", builderOpts.ImageName)
// return nil, errors.Wrap(err, "Could not remove image")
}
err = cs.Backend.RemoveImage(runnerOpts)
if err != nil {
Warning("Could not remove image ", builderOpts.ImageName)
// return nil, errors.Wrap(err, "Could not remove image")
}
}
if !generateArtifact {
return &PackageArtifact{}, nil
}
if unpack {
if p.GetPackageDir() != "" {
Info(":tophat: Packing from output dir", p.GetPackageDir())
rootfs = filepath.Join(rootfs, p.GetPackageDir())
}
if len(p.GetIncludes()) > 0 {
// strip from includes
cs.stripIncludesFromRootfs(p.GetIncludes(), rootfs)
}
artifact = NewPackageArtifact(p.Rel(p.GetPackage().GetFingerPrint() + ".package.tar"))
artifact.SetCompressionType(cs.CompressionType)
err = artifact.Compress(rootfs, concurrency)
// Take content of container as a base for our package files
artifact, err = cs.unpackFs(rootfs, concurrency, p)
if err != nil {
return nil, errors.Wrap(err, "Error met while creating package archive")
}
artifact.SetCompileSpec(p)
} else {
Info(pkgTag, "Generating delta")
diffs, err := cs.Backend.Changes(p.Rel(p.GetPackage().GetFingerPrint()+"-builder.image.tar"), p.Rel(p.GetPackage().GetFingerPrint()+".image.tar"))
// Generate delta between the two images
artifact, err = cs.unpackDelta(rootfs, concurrency, keepPermissions, p, builderOpts, runnerOpts)
if err != nil {
return nil, errors.Wrap(err, "Could not generate changes from layers")
return nil, errors.Wrap(err, "Error met while creating package archive")
}
artifact, err = ExtractArtifactFromDelta(rootfs, p.Rel(p.GetPackage().GetFingerPrint()+".package.tar"), diffs, concurrency, keepPermissions, p.GetIncludes(), cs.CompressionType)
if err != nil {
return nil, errors.Wrap(err, "Could not generate deltas")
}
artifact.SetCompileSpec(p)
}
filelist, err := artifact.FileList()
@@ -450,7 +454,6 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
}
artifact.SetFiles(filelist)
artifact.GetCompileSpec().GetPackage().SetBuildTimestamp(time.Now().String())
err = artifact.WriteYaml(p.GetOutputPath())
@@ -462,6 +465,43 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
return artifact, nil
}
func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage string,
concurrency int,
keepPermissions, keepImg bool,
p CompilationSpec, generateArtifact bool) (Artifact, error) {
if !cs.Clean {
exists := cs.Backend.ImageExists(buildertaggedImage) && cs.Backend.ImageExists(packageImage)
if art, err := LoadArtifactFromYaml(p); err == nil && (cs.Options.SkipIfMetadataExists || exists) {
Debug("Artifact reloaded. Skipping build")
return art, err
}
}
builderOpts, runnerOpts, err := cs.buildPackageImage(image, buildertaggedImage, packageImage, concurrency, keepPermissions, p)
if err != nil {
return nil, errors.Wrap(err, "failed building package image")
}
if !keepImg {
defer func() {
// We keep them around, so to not reload them from the tar (which should be the "correct way") and we automatically share the same layers
if err := cs.Backend.RemoveImage(builderOpts); err != nil {
Warning("Could not remove image ", builderOpts.ImageName)
}
if err := cs.Backend.RemoveImage(runnerOpts); err != nil {
Warning("Could not remove image ", runnerOpts.ImageName)
}
}()
}
if !generateArtifact {
return &PackageArtifact{}, nil
}
return cs.genArtifact(p, builderOpts, runnerOpts, concurrency, keepPermissions)
}
func (cs *LuetCompiler) FromDatabase(db pkg.PackageDatabase, minimum bool, dst string) ([]CompilationSpec, error) {
compilerSpecs := NewLuetCompilationspecs()
@@ -511,7 +551,7 @@ func (cs *LuetCompiler) ComputeMinimumCompilableSet(p ...CompilationSpec) ([]Com
func (cs *LuetCompiler) ComputeDepTree(p CompilationSpec) (solver.PackagesAssertions, error) {
s := solver.NewResolver(pkg.NewInMemoryDatabase(false), cs.Database, pkg.NewInMemoryDatabase(false), cs.Options.SolverOptions.Resolver())
s := solver.NewResolver(cs.SolverOptions, pkg.NewInMemoryDatabase(false), cs.Database, pkg.NewInMemoryDatabase(false), cs.Options.SolverOptions.Resolver())
solution, err := s.Install(pkg.Packages{p.GetPackage()})
if err != nil {
@@ -560,6 +600,14 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
targetAssertion := p.GetSourceAssertion().Search(p.GetPackage().GetFingerPrint())
targetPackageHash := cs.ImageRepository + ":" + targetAssertion.Hash.PackageHash
bus.Manager.Publish(bus.EventPackagePreBuild, struct {
CompileSpec CompilationSpec
Assert solver.PackageAssert
}{
CompileSpec: p,
Assert: *targetAssertion,
})
// - If image is set we just generate a plain dockerfile
// Treat last case (easier) first. The image is provided and we just compute a plain dockerfile with the images listed as above
if p.GetImage() != "" {
@@ -587,8 +635,8 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
for _, assertion := range dependencies { //highly dependent on the order
currentN++
pkgTag := fmt.Sprintf(":package: %d/%d %s ⤑ %s", currentN, depsN, p.GetPackage().HumanReadableString(), assertion.Package.HumanReadableString())
Info(pkgTag, " :zap: Building dependency")
pkgTag := fmt.Sprintf(":package: %d/%d %s ⤑ :hammer: build %s", currentN, depsN, p.GetPackage().HumanReadableString(), assertion.Package.HumanReadableString())
Info(pkgTag, " starts")
compileSpec, err := cs.FromPackage(assertion.Package)
if err != nil {
return nil, errors.Wrap(err, "Error while generating compilespec for "+assertion.Package.GetName())
@@ -600,6 +648,14 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
Debug(pkgTag, " :arrow_right_hook: :whale: Builder image from", buildImageHash)
Debug(pkgTag, " :arrow_right_hook: :whale: Package image name", currentPackageImageHash)
bus.Manager.Publish(bus.EventPackagePreBuild, struct {
CompileSpec CompilationSpec
Assert solver.PackageAssert
}{
CompileSpec: compileSpec,
Assert: assertion,
})
lastHash = currentPackageImageHash
if compileSpec.GetImage() != "" {
Debug(pkgTag, " :wrench: Compiling "+compileSpec.GetPackage().HumanReadableString()+" from image")
@@ -619,8 +675,17 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
// deperrs = append(deperrs, err)
// break // stop at first error
}
bus.Manager.Publish(bus.EventPackagePostBuild, struct {
CompileSpec CompilationSpec
Artifact Artifact
}{
CompileSpec: compileSpec,
Artifact: artifact,
})
departifacts = append(departifacts, artifact)
Info(pkgTag, ":collision: Done")
Info(pkgTag, ":white_check_mark: Done")
}
} else if len(dependencies) > 0 {
@@ -628,7 +693,8 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
}
if !cs.Options.OnlyDeps {
Info(":package:", p.GetPackage().HumanReadableString(), ":cyclone: Building package target from:", lastHash)
Info(":rocket: All dependencies are satisfied, building package requested by the user", p.GetPackage().HumanReadableString())
Info(":package:", p.GetPackage().HumanReadableString(), " Using image: ", lastHash)
artifact, err := cs.compileWithImage(lastHash, "", targetPackageHash, concurrency, keepPermissions, cs.KeepImg, p, true)
if err != nil {
return artifact, err
@@ -636,6 +702,14 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
artifact.SetDependencies(departifacts)
artifact.SetSourceAssertion(p.GetSourceAssertion())
bus.Manager.Publish(bus.EventPackagePostBuild, struct {
CompileSpec CompilationSpec
Artifact Artifact
}{
CompileSpec: p,
Artifact: artifact,
})
return artifact, err
} else {
return departifacts[len(departifacts)-1], nil
@@ -651,32 +725,51 @@ func (cs *LuetCompiler) FromPackage(p pkg.Package) (CompilationSpec, error) {
return nil, err
}
buildFile := pack.Rel(BuildFile)
if !helpers.Exists(buildFile) {
return nil, errors.New("No build file present for " + p.GetFingerPrint())
}
defFile := pack.Rel(DefinitionFile)
if !helpers.Exists(defFile) {
return nil, errors.New("No build file present for " + p.GetFingerPrint())
}
def, err := ioutil.ReadFile(defFile)
if err != nil {
return nil, err
var dataresult []byte
val := pack.Rel(DefinitionFile)
if _, err := os.Stat(pack.Rel(CollectionFile)); err == nil {
val = pack.Rel(CollectionFile)
data, err := ioutil.ReadFile(val)
if err != nil {
return nil, errors.Wrap(err, "rendering file "+val)
}
dataBuild, err := ioutil.ReadFile(pack.Rel(BuildFile))
if err != nil {
return nil, errors.Wrap(err, "rendering file "+val)
}
packsRaw, err := pkg.GetRawPackages(data)
raw := packsRaw.Find(pack.GetName(), pack.GetCategory(), pack.GetVersion())
d := map[string]interface{}{}
if len(cs.Options.BuildValuesFile) > 0 {
defBuild, err := ioutil.ReadFile(cs.Options.BuildValuesFile)
if err != nil {
return nil, errors.Wrap(err, "rendering file "+val)
}
err = yaml.Unmarshal(defBuild, &d)
if err != nil {
return nil, errors.Wrap(err, "rendering file "+val)
}
}
dat, err := helpers.RenderHelm(string(dataBuild), raw, d)
if err != nil {
return nil, errors.Wrap(err, "rendering file "+pack.Rel(BuildFile))
}
dataresult = []byte(dat)
} else {
out, err := helpers.RenderFiles(pack.Rel(BuildFile), val, cs.Options.BuildValuesFile)
if err != nil {
return nil, errors.Wrap(err, "rendering file "+pack.Rel(BuildFile))
}
dataresult = []byte(out)
}
build, err := ioutil.ReadFile(buildFile)
if err != nil {
return nil, err
}
var values templatedata
if err = yaml.Unmarshal(def, &values); err != nil {
return nil, err
}
out, err := helpers.RenderHelm(string(build), values)
if err != nil {
return nil, err
}
return NewLuetCompilationSpec([]byte(out), pack)
return NewLuetCompilationSpec(dataresult, pack)
}
func (cs *LuetCompiler) GetBackend() CompilerBackend {

View File

@@ -23,6 +23,7 @@ import (
sd "github.com/mudler/luet/pkg/compiler/backend"
helpers "github.com/mudler/luet/pkg/helpers"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
"github.com/mudler/luet/pkg/tree"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
@@ -38,7 +39,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -82,7 +83,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -108,7 +109,7 @@ var _ = Describe("Compiler", func() {
})
})
Context("Templated packages",func(){
Context("Templated packages", func() {
It("Renders", func() {
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
tmpdir, err := ioutil.TempDir("", "package")
@@ -117,10 +118,10 @@ var _ = Describe("Compiler", func() {
err = generalRecipe.Load("../../tests/fixtures/templates")
Expect(err).ToNot(HaveOccurred())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
pkg ,err := generalRecipe.GetDatabase().FindPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
pkg, err := generalRecipe.GetDatabase().FindPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
spec, err := compiler.FromPackage(pkg)
@@ -141,7 +142,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -198,7 +199,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "extra", Category: "layer", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -240,7 +241,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -264,6 +265,146 @@ var _ = Describe("Compiler", func() {
Expect(helpers.Exists(spec.Rel("test6"))).ToNot(BeTrue())
})
It("Compiles and excludes files", func() {
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
tmpdir, err := ioutil.TempDir("", "package")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
err = generalRecipe.Load("../../tests/fixtures/excludes")
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
// err = generalRecipe.Tree().ResolveDeps(3)
// Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1))
for _, artifact := range artifacts {
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())
}
Expect(helpers.Exists(spec.Rel("test5"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("marvin"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("marvot"))).ToNot(BeTrue())
Expect(helpers.Exists(spec.Rel("test6"))).To(BeTrue())
})
It("Compiles includes and excludes files", func() {
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
tmpdir, err := ioutil.TempDir("", "package")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
err = generalRecipe.Load("../../tests/fixtures/excludesincludes")
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
// err = generalRecipe.Tree().ResolveDeps(3)
// Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1))
for _, artifact := range artifacts {
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())
}
Expect(helpers.Exists(spec.Rel("test5"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("marvin"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("marvot"))).ToNot(BeTrue())
Expect(helpers.Exists(spec.Rel("test6"))).ToNot(BeTrue())
})
It("Compiles and excludes ony wanted files also from unpacked packages", func() {
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
tmpdir, err := ioutil.TempDir("", "package")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
err = generalRecipe.Load("../../tests/fixtures/excludeimage")
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
// err = generalRecipe.Tree().ResolveDeps(3)
// Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1))
for _, artifact := range artifacts {
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())
}
Expect(helpers.Exists(spec.Rel("marvin"))).ToNot(BeTrue())
Expect(helpers.Exists(spec.Rel("test5"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("test6"))).To(BeTrue())
})
It("Compiles includes and excludes ony wanted files also from unpacked packages", func() {
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
tmpdir, err := ioutil.TempDir("", "package")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
err = generalRecipe.Load("../../tests/fixtures/excludeincludeimage")
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
// err = generalRecipe.Tree().ResolveDeps(3)
// Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1))
for _, artifact := range artifacts {
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())
}
Expect(helpers.Exists(spec.Rel("marvin"))).ToNot(BeTrue())
Expect(helpers.Exists(spec.Rel("test5"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("test6"))).To(BeTrue())
})
It("Compiles and includes ony wanted files also from unpacked packages", func() {
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
tmpdir, err := ioutil.TempDir("", "package")
@@ -275,7 +416,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -313,7 +454,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "pkgs-checker", Category: "package", Version: "9999"})
Expect(err).ToNot(HaveOccurred())
@@ -354,7 +495,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -398,7 +539,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -440,7 +581,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "extra", Category: "layer", Version: "0.1"})
Expect(err).ToNot(HaveOccurred())
@@ -478,7 +619,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(10))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "vhba", Category: "sys-fs-5.4.2", Version: "20190410"})
Expect(err).ToNot(HaveOccurred())
@@ -517,7 +658,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
@@ -569,7 +710,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -612,7 +753,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "runtime", Category: "layer", Version: "0.1"})
Expect(err).ToNot(HaveOccurred())
@@ -645,7 +786,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{
Name: "dironly",
@@ -700,7 +841,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "runtime", Category: "layer", Version: "0.1"})
Expect(err).ToNot(HaveOccurred())
@@ -736,7 +877,7 @@ var _ = Describe("Compiler", func() {
err := generalRecipe.Load("../../tests/fixtures/includeimage")
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
specs, err := compiler.FromDatabase(generalRecipe.GetDatabase(), true, "")
Expect(err).ToNot(HaveOccurred())
@@ -755,7 +896,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "runtime", Category: "layer", Version: "0.1"})
Expect(err).ToNot(HaveOccurred())

View File

@@ -56,6 +56,7 @@ type CompilerOptions struct {
NoDeps bool
SolverOptions config.LuetSolverOptions
SkipIfMetadataExists bool
BuildValuesFile string
PackageTargetOnly bool
}
@@ -147,6 +148,7 @@ type ArtifactLayersSummary struct {
type CompilationSpec interface {
ImageUnpack() bool // tells if the definition is just an image
GetIncludes() []string
GetExcludes() []string
RenderBuildImage() (string, error)
WriteBuildImageDefinition(string) error

View File

@@ -102,6 +102,7 @@ type LuetCompilationSpec struct {
OutputPath string `json:"-"` // Where the build processfiles go
Unpack bool `json:"unpack"`
Includes []string `json:"includes"`
Excludes []string `json:"excludes"`
}
func NewLuetCompilationSpec(b []byte, p pkg.Package) (CompilationSpec, error) {
@@ -148,6 +149,10 @@ func (cs *LuetCompilationSpec) GetIncludes() []string {
return cs.Includes
}
func (cs *LuetCompilationSpec) GetExcludes() []string {
return cs.Excludes
}
func (cs *LuetCompilationSpec) GetRetrieve() []string {
return cs.Retrieve
}
@@ -237,6 +242,7 @@ RUN ` + s
func (cs *LuetCompilationSpec) RenderStepImage(image string) (string, error) {
spec := `
FROM ` + image + `
WORKDIR /luetbuild
ENV PACKAGE_NAME=` + cs.Package.GetName() + `
ENV PACKAGE_VERSION=` + cs.Package.GetVersion() + `
ENV PACKAGE_CATEGORY=` + cs.Package.GetCategory()

View File

@@ -23,6 +23,7 @@ import (
. "github.com/mudler/luet/pkg/compiler"
helpers "github.com/mudler/luet/pkg/helpers"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
"github.com/mudler/luet/pkg/tree"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
@@ -61,7 +62,7 @@ var _ = Describe("Spec", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"})
Expect(err).ToNot(HaveOccurred())
@@ -95,6 +96,7 @@ ENV test=1`))
Expect(err).ToNot(HaveOccurred())
Expect(dockerfile).To(Equal(`
FROM luet/base
WORKDIR /luetbuild
ENV PACKAGE_NAME=enman
ENV PACKAGE_VERSION=1.4.0
ENV PACKAGE_CATEGORY=app-admin
@@ -114,7 +116,7 @@ RUN echo bar > /test2`))
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "a", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -167,6 +169,7 @@ ENV test=1`))
Expect(dockerfile).To(Equal(`
FROM luet/base
WORKDIR /luetbuild
ENV PACKAGE_NAME=a
ENV PACKAGE_VERSION=1.0
ENV PACKAGE_CATEGORY=test

View File

@@ -64,10 +64,11 @@ type LuetGeneralConfig struct {
}
type LuetSolverOptions struct {
Type string `mapstructure:"type"`
LearnRate float32 `mapstructure:"rate"`
Discount float32 `mapstructure:"discount"`
MaxAttempts int `mapstructure:"max_attempts"`
Type string `mapstructure:"type"`
LearnRate float32 `mapstructure:"rate"`
Discount float32 `mapstructure:"discount"`
MaxAttempts int `mapstructure:"max_attempts"`
Implementation solver.SolverType `mapstructure:"implementation"`
}
func (opts LuetSolverOptions) Resolver() solver.PackageResolver {
@@ -96,7 +97,7 @@ type LuetSystemConfig struct {
TmpDirBase string `yaml:"tmpdir_base" mapstructure:"tmpdir_base"`
}
func (sc LuetSystemConfig) GetRepoDatabaseDirPath(name string) string {
func (sc *LuetSystemConfig) GetRepoDatabaseDirPath(name string) string {
dbpath := filepath.Join(sc.Rootfs, sc.DatabasePath)
dbpath = filepath.Join(dbpath, "repos/"+name)
err := os.MkdirAll(dbpath, os.ModePerm)
@@ -106,7 +107,7 @@ func (sc LuetSystemConfig) GetRepoDatabaseDirPath(name string) string {
return dbpath
}
func (sc LuetSystemConfig) GetSystemRepoDatabaseDirPath() string {
func (sc *LuetSystemConfig) GetSystemRepoDatabaseDirPath() string {
dbpath := filepath.Join(sc.Rootfs,
sc.DatabasePath)
err := os.MkdirAll(dbpath, os.ModePerm)
@@ -116,7 +117,7 @@ func (sc LuetSystemConfig) GetSystemRepoDatabaseDirPath() string {
return dbpath
}
func (sc LuetSystemConfig) GetSystemPkgsCacheDirPath() (ans string) {
func (sc *LuetSystemConfig) GetSystemPkgsCacheDirPath() (ans string) {
var cachepath string
if sc.PkgsCachePath != "" {
cachepath = sc.PkgsCachePath
@@ -134,6 +135,10 @@ func (sc LuetSystemConfig) GetSystemPkgsCacheDirPath() (ans string) {
return
}
func (sc *LuetSystemConfig) GetRootFsAbs() (string, error) {
return filepath.Abs(sc.Rootfs)
}
type LuetRepository struct {
Name string `json:"name" yaml:"name" mapstructure:"name"`
Description string `json:"description,omitempty" yaml:"description,omitempty" mapstructure:"description"`
@@ -203,6 +208,7 @@ type LuetConfig struct {
RepositoriesConfDir []string `mapstructure:"repos_confdir"`
ConfigProtectConfDir []string `mapstructure:"config_protect_confdir"`
ConfigProtectSkip bool `mapstructure:"config_protect_skip"`
ConfigFromHost bool `mapstructure:"config_from_host"`
CacheRepositories []LuetRepository `mapstructure:"repetitors"`
SystemRepositories []LuetRepository `mapstructure:"repositories"`
@@ -250,6 +256,8 @@ func GenDefault(viper *v.Viper) {
viper.SetDefault("repos_confdir", []string{"/etc/luet/repos.conf.d"})
viper.SetDefault("config_protect_confdir", []string{"/etc/luet/config.protect.d"})
viper.SetDefault("config_protect_skip", false)
// TODO: Set default to false when we are ready for migration.
viper.SetDefault("config_from_host", true)
viper.SetDefault("cache_repositories", []string{})
viper.SetDefault("system_repositories", []string{})

View File

@@ -18,6 +18,8 @@ package config
import (
"fmt"
"path/filepath"
"strings"
)
type ConfigProtectConfFile struct {
@@ -39,3 +41,79 @@ func (c *ConfigProtectConfFile) String() string {
return fmt.Sprintf("[%s] filename: %s, dirs: %s", c.Name, c.Filename,
c.Directories)
}
type ConfigProtect struct {
AnnotationDir string
MapProtected map[string]bool
}
func NewConfigProtect(annotationDir string) *ConfigProtect {
if len(annotationDir) > 0 && annotationDir[0:1] != "/" {
annotationDir = "/" + annotationDir
}
return &ConfigProtect{
AnnotationDir: annotationDir,
MapProtected: make(map[string]bool, 0),
}
}
func (c *ConfigProtect) AddAnnotationDir(d string) {
c.AnnotationDir = d
}
func (c *ConfigProtect) GetAnnotationDir() string {
return c.AnnotationDir
}
func (c *ConfigProtect) Map(files []string) {
if LuetCfg.ConfigProtectSkip {
return
}
for _, file := range files {
if file[0:1] != "/" {
file = "/" + file
}
if len(LuetCfg.GetConfigProtectConfFiles()) > 0 {
for _, conf := range LuetCfg.GetConfigProtectConfFiles() {
for _, dir := range conf.Directories {
// Note file is without / at begin (on unpack)
if strings.HasPrefix(file, filepath.Clean(dir)) {
// docker archive modifier works with path without / at begin.
c.MapProtected[file] = true
goto nextFile
}
}
}
}
if c.AnnotationDir != "" && strings.HasPrefix(file, filepath.Clean(c.AnnotationDir)) {
c.MapProtected[file] = true
}
nextFile:
}
}
func (c *ConfigProtect) Protected(file string) bool {
if file[0:1] != "/" {
file = "/" + file
}
_, ans := c.MapProtected[file]
return ans
}
func (c *ConfigProtect) GetProtectFiles(withSlash bool) []string {
ans := []string{}
for key, _ := range c.MapProtected {
if withSlash {
ans = append(ans, key)
} else {
ans = append(ans, key[1:])
}
}
return ans
}

View File

@@ -0,0 +1,118 @@
// Copyright © 2019-2020 Ettore Di Giacinto <mudler@gentoo.org>
// Daniele Rondina <geaaru@sabayonlinux.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package config_test
import (
config "github.com/mudler/luet/pkg/config"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Config", func() {
Context("Test config protect", func() {
It("Protect1", func() {
files := []string{
"etc/foo/my.conf",
"usr/bin/foo",
"usr/share/doc/foo.md",
}
cp := config.NewConfigProtect("/etc")
cp.Map(files)
Expect(cp.Protected("etc/foo/my.conf")).To(BeTrue())
Expect(cp.Protected("/etc/foo/my.conf")).To(BeTrue())
Expect(cp.Protected("usr/bin/foo")).To(BeFalse())
Expect(cp.Protected("/usr/bin/foo")).To(BeFalse())
Expect(cp.Protected("/usr/share/doc/foo.md")).To(BeFalse())
Expect(cp.GetProtectFiles(false)).To(Equal(
[]string{
"etc/foo/my.conf",
},
))
Expect(cp.GetProtectFiles(true)).To(Equal(
[]string{
"/etc/foo/my.conf",
},
))
})
It("Protect2", func() {
files := []string{
"etc/foo/my.conf",
"usr/bin/foo",
"usr/share/doc/foo.md",
}
cp := config.NewConfigProtect("")
cp.Map(files)
Expect(cp.Protected("etc/foo/my.conf")).To(BeFalse())
Expect(cp.Protected("/etc/foo/my.conf")).To(BeFalse())
Expect(cp.Protected("usr/bin/foo")).To(BeFalse())
Expect(cp.Protected("/usr/bin/foo")).To(BeFalse())
Expect(cp.Protected("/usr/share/doc/foo.md")).To(BeFalse())
Expect(cp.GetProtectFiles(false)).To(Equal(
[]string{},
))
Expect(cp.GetProtectFiles(true)).To(Equal(
[]string{},
))
})
It("Protect3: Annotation dir without initial slash", func() {
files := []string{
"etc/foo/my.conf",
"usr/bin/foo",
"usr/share/doc/foo.md",
}
cp := config.NewConfigProtect("etc")
cp.Map(files)
Expect(cp.Protected("etc/foo/my.conf")).To(BeTrue())
Expect(cp.Protected("/etc/foo/my.conf")).To(BeTrue())
Expect(cp.Protected("usr/bin/foo")).To(BeFalse())
Expect(cp.Protected("/usr/bin/foo")).To(BeFalse())
Expect(cp.Protected("/usr/share/doc/foo.md")).To(BeFalse())
Expect(cp.GetProtectFiles(false)).To(Equal(
[]string{
"etc/foo/my.conf",
},
))
Expect(cp.GetProtectFiles(true)).To(Equal(
[]string{
"/etc/foo/my.conf",
},
))
})
})
})

View File

@@ -24,6 +24,37 @@ import (
copy "github.com/otiai10/copy"
)
func OrderFiles(target string, files []string) ([]string, []string) {
var newFiles []string
var notPresent []string
for _, f := range files {
target := filepath.Join(target, f)
fi, err := os.Lstat(target)
if err != nil {
notPresent = append(notPresent, f)
continue
}
if m := fi.Mode(); !m.IsDir() {
newFiles = append(newFiles, f)
}
}
for _, f := range files {
target := filepath.Join(target, f)
fi, err := os.Lstat(target)
if err != nil {
continue
}
if m := fi.Mode(); m.IsDir() {
newFiles = append(newFiles, f)
}
}
return newFiles, notPresent
}
func ListDir(dir string) ([]string, error) {
content := []string{}

View File

@@ -16,6 +16,10 @@
package helpers_test
import (
"io/ioutil"
"os"
"path/filepath"
. "github.com/mudler/luet/pkg/helpers"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
@@ -28,4 +32,33 @@ var _ = Describe("Helpers", func() {
Expect(Exists("../../tests/fixtures/buildtree/app-admin/enman/1.4.0/build.yaml.not.exists")).To(BeFalse())
})
})
Context("Orders dir and files correctly", func() {
It("puts files first and folders at end", func() {
testDir, err := ioutil.TempDir(os.TempDir(), "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(testDir)
err = ioutil.WriteFile(filepath.Join(testDir, "foo"), []byte("test\n"), 0644)
Expect(err).ToNot(HaveOccurred())
err = ioutil.WriteFile(filepath.Join(testDir, "baz"), []byte("test\n"), 0644)
Expect(err).ToNot(HaveOccurred())
err = os.MkdirAll(filepath.Join(testDir, "bar"), 0755)
Expect(err).ToNot(HaveOccurred())
err = ioutil.WriteFile(filepath.Join(testDir, "bar", "foo"), []byte("test\n"), 0644)
Expect(err).ToNot(HaveOccurred())
err = os.MkdirAll(filepath.Join(testDir, "baz2"), 0755)
Expect(err).ToNot(HaveOccurred())
err = ioutil.WriteFile(filepath.Join(testDir, "baz2", "foo"), []byte("test\n"), 0644)
Expect(err).ToNot(HaveOccurred())
ordered, notExisting := OrderFiles(testDir, []string{"bar", "baz", "bar/foo", "baz2", "foo", "baz2/foo", "notexisting"})
Expect(ordered).To(Equal([]string{"baz", "bar/foo", "foo", "baz2/foo", "bar", "baz2"}))
Expect(notExisting).To(Equal([]string{"notexisting"}))
})
})
})

View File

@@ -1,15 +1,17 @@
package helpers
import (
"io/ioutil"
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
"helm.sh/helm/v3/pkg/chart"
"helm.sh/helm/v3/pkg/chartutil"
"helm.sh/helm/v3/pkg/engine"
"github.com/pkg/errors"
)
// RenderHelm renders the template string with helm
func RenderHelm(template string, values map[string]interface{}) (string,error) {
func RenderHelm(template string, values, d map[string]interface{}) (string, error) {
c := &chart.Chart{
Metadata: &chart.Metadata{
Name: "",
@@ -18,17 +20,52 @@ func RenderHelm(template string, values map[string]interface{}) (string,error) {
Templates: []*chart.File{
{Name: "templates", Data: []byte(template)},
},
Values: map[string]interface{}{"Values":values},
Values: map[string]interface{}{"Values": values},
}
v, err := chartutil.CoalesceValues(c, map[string]interface{}{})
v, err := chartutil.CoalesceValues(c, map[string]interface{}{"Values": d})
if err != nil {
return "",errors.Wrap(err,"while rendering template")
return "", errors.Wrap(err, "while rendering template")
}
out, err := engine.Render(c, v)
if err != nil {
return "",errors.Wrap(err,"while rendering template")
return "", errors.Wrap(err, "while rendering template")
}
return out["templates"],nil
return out["templates"], nil
}
type templatedata map[string]interface{}
func RenderFiles(toTemplate, valuesFile string, defaultFile string) (string, error) {
raw, err := ioutil.ReadFile(toTemplate)
if err != nil {
return "", errors.Wrap(err, "reading file "+toTemplate)
}
if !Exists(valuesFile) {
return "", errors.Wrap(err, "file not existing "+valuesFile)
}
val, err := ioutil.ReadFile(valuesFile)
if err != nil {
return "", errors.Wrap(err, "reading file "+valuesFile)
}
var values templatedata
d := templatedata{}
if len(defaultFile) > 0 {
def, err := ioutil.ReadFile(defaultFile)
if err != nil {
return "", errors.Wrap(err, "reading file "+valuesFile)
}
if err = yaml.Unmarshal(def, &d); err != nil {
return "", errors.Wrap(err, "unmarshalling file "+toTemplate)
}
}
if err = yaml.Unmarshal(val, &values); err != nil {
return "", errors.Wrap(err, "unmarshalling file "+toTemplate)
}
return RenderHelm(string(raw), values, d)
}

View File

@@ -16,17 +16,132 @@
package helpers_test
import (
"io/ioutil"
"os"
"path/filepath"
. "github.com/mudler/luet/pkg/helpers"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func writeFile(path string, content string) {
err := ioutil.WriteFile(path, []byte(content), 0644)
Expect(err).ToNot(HaveOccurred())
}
var _ = Describe("Helpers", func() {
Context("RenderHelm", func() {
It("Renders templates", func() {
out, err := RenderHelm("{{.Values.Test}}",map[string]interface{}{"Test":"foo"})
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo"}, map[string]interface{}{"Bar": "bar"})
Expect(err).ToNot(HaveOccurred())
Expect(out).To(Equal("foo"))
Expect(out).To(Equal("foobar"))
})
It("Renders templates with overrides", func() {
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo", "Bar": "baz"}, map[string]interface{}{"Bar": "bar"})
Expect(err).ToNot(HaveOccurred())
Expect(out).To(Equal("foobar"))
})
It("Renders templates", func() {
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo", "Bar": "bar"}, map[string]interface{}{})
Expect(err).ToNot(HaveOccurred())
Expect(out).To(Equal("foobar"))
})
It("Render files default overrides", func() {
testDir, err := ioutil.TempDir(os.TempDir(), "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(testDir)
toTemplate := filepath.Join(testDir, "totemplate.yaml")
values := filepath.Join(testDir, "values.yaml")
d := filepath.Join(testDir, "default.yaml")
writeFile(toTemplate, `{{.Values.foo}}`)
writeFile(values, `
foo: "bar"
`)
writeFile(d, `
foo: "baz"
`)
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, d)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("baz"))
})
It("Render files from values", func() {
testDir, err := ioutil.TempDir(os.TempDir(), "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(testDir)
toTemplate := filepath.Join(testDir, "totemplate.yaml")
values := filepath.Join(testDir, "values.yaml")
d := filepath.Join(testDir, "default.yaml")
writeFile(toTemplate, `{{.Values.foo}}`)
writeFile(values, `
foo: "bar"
`)
writeFile(d, `
faa: "baz"
`)
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, d)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("bar"))
})
It("Render files from values if no default", func() {
testDir, err := ioutil.TempDir(os.TempDir(), "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(testDir)
toTemplate := filepath.Join(testDir, "totemplate.yaml")
values := filepath.Join(testDir, "values.yaml")
writeFile(toTemplate, `{{.Values.foo}}`)
writeFile(values, `
foo: "bar"
`)
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, "")
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("bar"))
})
It("doesn't interpolate if no one provides the values", func() {
testDir, err := ioutil.TempDir(os.TempDir(), "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(testDir)
toTemplate := filepath.Join(testDir, "totemplate.yaml")
values := filepath.Join(testDir, "values.yaml")
d := filepath.Join(testDir, "default.yaml")
writeFile(toTemplate, `{{.Values.foo}}`)
writeFile(values, `
foao: "bar"
`)
writeFile(d, `
faa: "baz"
`)
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, d)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal(""))
})
})
})

View File

@@ -22,6 +22,7 @@ import (
"os"
"path"
"path/filepath"
"time"
. "github.com/mudler/luet/pkg/logger"
@@ -30,6 +31,8 @@ import (
"github.com/mudler/luet/pkg/helpers"
"github.com/cavaliercoder/grab"
"github.com/schollz/progressbar/v3"
)
type HttpClient struct {
@@ -101,20 +104,61 @@ func (c *HttpClient) DownloadArtifact(artifact compiler.Artifact) (compiler.Arti
}
resp := client.Do(req)
bar := progressbar.NewOptions64(
resp.Size(),
progressbar.OptionSetDescription(
fmt.Sprintf("[cyan] %s - [reset]",
filepath.Base(resp.Request.HTTPRequest.URL.RequestURI()))),
progressbar.OptionSetRenderBlankState(true),
progressbar.OptionEnableColorCodes(config.LuetCfg.GetLogging().Color),
progressbar.OptionClearOnFinish(),
progressbar.OptionShowBytes(true),
progressbar.OptionShowCount(),
progressbar.OptionSetPredictTime(true),
progressbar.OptionFullWidth(),
progressbar.OptionSetTheme(progressbar.Theme{
Saucer: "[white]=[reset]",
SaucerHead: "[white]>[reset]",
SaucerPadding: " ",
BarStart: "[",
BarEnd: "]",
}))
bar.Reset()
// start download loop
t := time.NewTicker(500 * time.Millisecond)
defer t.Stop()
download_loop:
for {
select {
case <-t.C:
bar.Set64(resp.BytesComplete())
case <-resp.Done:
// download is complete
break download_loop
}
}
if err = resp.Err(); err != nil {
continue
}
Info("Downloaded", artifactName, "of",
fmt.Sprintf("%.2f", (float64(resp.BytesComplete())/1000)/1000), "MB (",
fmt.Sprintf("%.2f", (float64(resp.BytesPerSecond())/1024)/1024), "MiB/s )")
Debug("Copying file ", filepath.Join(temp, artifactName), "to", cacheFile)
err = helpers.CopyFile(filepath.Join(temp, artifactName), cacheFile)
if err != nil {
continue
}
Info("\nDownloaded", artifactName, "of",
fmt.Sprintf("%.2f", (float64(resp.BytesComplete())/1000)/1000), "MB (",
fmt.Sprintf("%.2f", (float64(resp.BytesPerSecond())/1024)/1024), "MiB/s )")
Debug("\nCopying file ", filepath.Join(temp, artifactName), "to", cacheFile)
err = helpers.CopyFile(filepath.Join(temp, artifactName), cacheFile)
bar.Finish()
ok = true
break
}

View File

@@ -38,15 +38,26 @@ func NewLocalClient(r RepoData) *LocalClient {
func (c *LocalClient) DownloadArtifact(artifact compiler.Artifact) (compiler.Artifact, error) {
var err error
rootfs := ""
artifactName := path.Base(artifact.GetPath())
cacheFile := filepath.Join(config.LuetCfg.GetSystem().GetSystemPkgsCacheDirPath(), artifactName)
if !config.LuetCfg.ConfigFromHost {
rootfs, err = config.LuetCfg.GetSystem().GetRootFsAbs()
if err != nil {
return nil, err
}
}
// Check if file is already in cache
if helpers.Exists(cacheFile) {
Info("Use artifact", artifactName, "from cache.")
} else {
ok := false
for _, uri := range c.RepoData.Urls {
uri = filepath.Join(rootfs, uri)
Info("Downloading artifact", artifactName, "from", uri)
//defer os.Remove(file.Name())
@@ -72,8 +83,20 @@ func (c *LocalClient) DownloadFile(name string) (string, error) {
var err error
var file *os.File = nil
rootfs := ""
if !config.LuetCfg.ConfigFromHost {
rootfs, err = config.LuetCfg.GetSystem().GetRootFsAbs()
if err != nil {
return "", err
}
}
ok := false
for _, uri := range c.RepoData.Urls {
uri = filepath.Join(rootfs, uri)
Info("Downloading file", name, "from", uri)
file, err = config.LuetCfg.GetSystem().TempFile("localclient")
if err != nil {

View File

@@ -19,6 +19,7 @@ package installer
import (
"io/ioutil"
"path"
"path/filepath"
"regexp"
"github.com/ghodss/yaml"
@@ -29,8 +30,21 @@ import (
func LoadConfigProtectConfs(c *LuetConfig) error {
var regexConfs = regexp.MustCompile(`.yml$`)
var err error
rootfs := ""
// Respect the rootfs param on read repositories
if !c.ConfigFromHost {
rootfs, err = c.GetSystem().GetRootFsAbs()
if err != nil {
return err
}
}
for _, cdir := range c.ConfigProtectConfDir {
cdir = filepath.Join(rootfs, cdir)
Debug("Parsing Config Protect Directory", cdir, "...")
files, err := ioutil.ReadDir(cdir)

View File

@@ -47,7 +47,7 @@ func (f *LuetFinalizer) RunInstall(s *System) error {
for _, c := range f.Install {
toRun := append(args, c)
Info("Executing finalizer on ", s.Target, cmd, toRun)
Info(":shell: Executing finalizer on ", s.Target, cmd, toRun)
if s.Target == "/" {
cmd := exec.Command(cmd, toRun...)
stdoutStderr, err := cmd.CombinedOutput()

View File

@@ -16,7 +16,6 @@
package installer
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
@@ -24,13 +23,14 @@ import (
"strings"
"sync"
. "github.com/logrusorgru/aurora"
"github.com/mudler/luet/pkg/bus"
compiler "github.com/mudler/luet/pkg/compiler"
"github.com/mudler/luet/pkg/config"
"github.com/mudler/luet/pkg/helpers"
. "github.com/mudler/luet/pkg/logger"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
"github.com/mudler/luet/pkg/tree"
"github.com/pkg/errors"
)
@@ -45,6 +45,7 @@ type LuetInstallerOptions struct {
FullUninstall, FullCleanUninstall bool
CheckConflicts bool
SolverUpgrade, RemoveUnavailableOnUpgrade, UpgradeNewRevisions bool
Ask bool
}
type LuetInstaller struct {
@@ -63,85 +64,115 @@ func NewLuetInstaller(opts LuetInstallerOptions) Installer {
return &LuetInstaller{Options: opts}
}
func (l *LuetInstaller) Upgrade(s *System) error {
syncedRepos, err := l.SyncRepositories(true)
if err != nil {
return err
}
Info(":thinking: Computing upgrade, please hang tight")
// computeUpgrade returns the packages to be uninstalled and installed in a system to perform an upgrade
// based on the system repositories
func (l *LuetInstaller) computeUpgrade(syncedRepos Repositories, s *System) (pkg.Packages, pkg.Packages, error) {
toInstall := pkg.Packages{}
var uninstall pkg.Packages
var err error
// First match packages against repositories by priority
allRepos := pkg.NewInMemoryDatabase(false)
syncedRepos.SyncDatabase(allRepos)
// compute a "big" world
solv := solver.NewResolver(s.Database, allRepos, pkg.NewInMemoryDatabase(false), l.Options.SolverOptions.Resolver())
var uninstall pkg.Packages
solv := solver.NewResolver(solver.Options{Type: l.Options.SolverOptions.Implementation, Concurrency: l.Options.Concurrency}, s.Database, allRepos, pkg.NewInMemoryDatabase(false), l.Options.SolverOptions.Resolver())
var solution solver.PackagesAssertions
if l.Options.SolverUpgrade {
uninstall, solution, err = solv.UpgradeUniverse(l.Options.RemoveUnavailableOnUpgrade)
if err != nil {
return errors.Wrap(err, "Failed solving solution for upgrade")
return uninstall, toInstall, errors.Wrap(err, "Failed solving solution for upgrade")
}
} else {
uninstall, solution, err = solv.Upgrade(!l.Options.FullUninstall, l.Options.NoDeps)
if err != nil {
return errors.Wrap(err, "Failed solving solution for upgrade")
return uninstall, toInstall, errors.Wrap(err, "Failed solving solution for upgrade")
}
}
if len(uninstall) > 0 {
Info("Packages marked for uninstall:")
}
for _, p := range uninstall {
Info(fmt.Sprintf("- %s", p.HumanReadableString()))
}
if len(solution) > 0 {
Info("Packages marked for upgrade:")
}
toInstall := pkg.Packages{}
for _, assertion := range solution {
// Be sure to filter from solutions packages already installed in the system
if _, err := s.Database.FindPackage(assertion.Package); err != nil && assertion.Value {
Info(fmt.Sprintf("- %s", assertion.Package.HumanReadableString()))
toInstall = append(toInstall, assertion.Package)
}
}
if l.Options.UpgradeNewRevisions {
Info("Checking packages with new revisions available")
for _, p := range s.Database.World() {
matches := syncedRepos.PackageMatches(pkg.Packages{p})
if len(matches) == 0 {
// Package missing. the user should run luet upgrade --universe
Info("Installed packages seems to be missing from remote repositories.")
Info("It is suggested to run 'luet upgrade --universe'")
continue
}
for _, artefact := range matches[0].Repo.GetIndex() {
if artefact.GetCompileSpec().GetPackage() == nil {
return errors.New("Package in compilespec empty")
return uninstall, toInstall, errors.New("Package in compilespec empty")
}
if artefact.GetCompileSpec().GetPackage().Matches(p) && artefact.GetCompileSpec().GetPackage().GetBuildTimestamp() != p.GetBuildTimestamp() {
toInstall = append(toInstall, matches[0].Package).Unique()
uninstall = append(uninstall, p).Unique()
Info(
fmt.Sprintf("- %s ( %s vs %s ) repo: %s (date: %s)",
p.HumanReadableString(),
artefact.GetCompileSpec().GetPackage().GetBuildTimestamp(),
p.GetBuildTimestamp(),
matches[0].Repo.GetName(),
matches[0].Repo.GetLastUpdate(),
))
}
}
}
}
return uninstall, toInstall, nil
}
func packsToList(p pkg.Packages) string {
var packs []string
for _, pp := range p {
packs = append(packs, pp.HumanReadableString())
}
return strings.Join(packs, " ")
}
// Upgrade upgrades a System based on the Installer options. Returns error in case of failure
func (l *LuetInstaller) Upgrade(s *System) error {
syncedRepos, err := l.SyncRepositories(true)
if err != nil {
return err
}
Info(":thinking: Computing upgrade, please hang tight... :zzz:")
if l.Options.UpgradeNewRevisions {
Info(":memo: note: will consider new build revisions while upgrading")
}
Spinner(32)
uninstall, toInstall, err := l.computeUpgrade(syncedRepos, s)
if err != nil {
return errors.Wrap(err, "failed computing upgrade")
}
SpinnerStop()
if len(uninstall) > 0 {
Info(":recycle: Packages that are going to be removed from the system:\n ", Yellow(packsToList(uninstall)).BgBlack().String())
}
if len(toInstall) > 0 {
Info(":zap: Packages that are going to be installed in the system:\n ", Green(packsToList(toInstall)).BgBlack().String())
}
if len(toInstall) == 0 && len(uninstall) == 0 {
Info("Nothing to do")
return nil
}
if l.Options.Ask {
Info("By going forward, you are also accepting the licenses of the packages that you are going to install in your system.")
if Ask() {
l.Options.Ask = false // Don't prompt anymore
return l.swap(syncedRepos, uninstall, toInstall, s)
} else {
return errors.New("Aborted by user")
}
}
Spinner(32)
defer SpinnerStop()
return l.swap(syncedRepos, uninstall, toInstall, s)
}
@@ -197,18 +228,20 @@ func (l *LuetInstaller) swap(syncedRepos Repositories, toRemove pkg.Packages, to
l.Options.Force = true
for _, u := range toRemove {
Info(":package:", u.HumanReadableString(), "Marked for deletion")
err := l.Uninstall(u, s)
if err != nil && !l.Options.Force {
Error("Failed uninstall for ", u.HumanReadableString())
return errors.Wrap(err, "uninstalling "+u.HumanReadableString())
}
}
l.Options.Force = forced
return l.install(syncedRepos, toInstall, s)
match, packages, assertions, allRepos, err := l.computeInstall(syncedRepos, toInstall, s)
if err != nil {
return errors.Wrap(err, "computing installation")
}
return l.install(syncedRepos, match, packages, assertions, allRepos, s)
}
func (l *LuetInstaller) Install(cp pkg.Packages, s *System) error {
@@ -216,7 +249,29 @@ func (l *LuetInstaller) Install(cp pkg.Packages, s *System) error {
if err != nil {
return err
}
return l.install(syncedRepos, cp, s)
match, packages, assertions, allRepos, err := l.computeInstall(syncedRepos, cp, s)
if err != nil {
return err
}
if len(packages) > 0 {
Info("Packages that are going to be installed in the system: \n ", Green(packsToList(packages)).BgBlack().String())
} else {
Info("No packages to install")
return nil
}
if l.Options.Ask {
Info("By going forward, you are also accepting the licenses of the packages that you are going to install in your system.")
if Ask() {
l.Options.Ask = false // Don't prompt anymore
return l.install(syncedRepos, match, packages, assertions, allRepos, s)
} else {
return errors.New("Aborted by user")
}
}
return l.install(syncedRepos, match, packages, assertions, allRepos, s)
}
func (l *LuetInstaller) download(syncedRepos Repositories, cp pkg.Packages) error {
@@ -287,7 +342,7 @@ func (l *LuetInstaller) Reclaim(s *System) error {
if err != nil {
return err
}
Info("Found package:", p.HumanReadableString())
Info(":mag: Found package:", p.HumanReadableString())
toMerge = append(toMerge, ArtifactMatch{Artifact: artefact, Package: p})
break FILES
}
@@ -308,23 +363,25 @@ func (l *LuetInstaller) Reclaim(s *System) error {
return errors.Wrap(err, "Failed creating package")
}
s.Database.SetPackageFiles(&pkg.PackageFile{PackageFingerprint: pack.GetFingerPrint(), Files: match.Artifact.GetFiles()})
Info("Reclaimed package:", pack.HumanReadableString())
Info(":zap: Reclaimed package:", pack.HumanReadableString())
}
Info("Done!")
return nil
}
func (l *LuetInstaller) install(syncedRepos Repositories, cp pkg.Packages, s *System) error {
func (l *LuetInstaller) computeInstall(syncedRepos Repositories, cp pkg.Packages, s *System) (map[string]ArtifactMatch, pkg.Packages, solver.PackagesAssertions, pkg.PackageDatabase, error) {
var p pkg.Packages
toInstall := map[string]ArtifactMatch{}
allRepos := pkg.NewInMemoryDatabase(false)
var solution solver.PackagesAssertions
// Check if the package is installed first
for _, pi := range cp {
vers, _ := s.Database.FindPackageVersions(pi)
if len(vers) >= 1 {
Warning("Filtering out package " + pi.HumanReadableString() + ", it has other versions already installed. Uninstall one of them first ")
// Warning("Filtering out package " + pi.HumanReadableString() + ", it has other versions already installed. Uninstall one of them first ")
continue
//return errors.New("Package " + pi.GetFingerPrint() + " has other versions already installed. Uninstall one of them first: " + strings.Join(vers, " "))
@@ -333,8 +390,7 @@ func (l *LuetInstaller) install(syncedRepos Repositories, cp pkg.Packages, s *Sy
}
if len(p) == 0 {
Warning("No package to install, bailing out with no errors")
return nil
return toInstall, p, solution, allRepos, nil
}
// First get metas from all repos (and decodes trees)
@@ -342,60 +398,64 @@ func (l *LuetInstaller) install(syncedRepos Repositories, cp pkg.Packages, s *Sy
// matches := syncedRepos.PackageMatches(p)
// compute a "big" world
allRepos := pkg.NewInMemoryDatabase(false)
syncedRepos.SyncDatabase(allRepos)
p = syncedRepos.ResolveSelectors(p)
toInstall := map[string]ArtifactMatch{}
var packagesToInstall pkg.Packages
var err error
var solution solver.PackagesAssertions
if !l.Options.NoDeps {
solv := solver.NewResolver(s.Database, allRepos, pkg.NewInMemoryDatabase(false), l.Options.SolverOptions.Resolver())
solv := solver.NewResolver(solver.Options{Type: l.Options.SolverOptions.Implementation, Concurrency: l.Options.Concurrency}, s.Database, allRepos, pkg.NewInMemoryDatabase(false), l.Options.SolverOptions.Resolver())
solution, err = solv.Install(p)
/// TODO: PackageAssertions needs to be a map[fingerprint]pack so lookup is in O(1)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Failed solving solution for package")
return toInstall, p, solution, allRepos, errors.Wrap(err, "Failed solving solution for package")
}
// Gathers things to install
for _, assertion := range solution {
if assertion.Value {
if _, err := s.Database.FindPackage(assertion.Package); err == nil {
// skip matching if it is installed already
continue
}
packagesToInstall = append(packagesToInstall, assertion.Package)
}
}
} else if !l.Options.OnlyDeps {
for _, currentPack := range p {
if _, err := s.Database.FindPackage(currentPack); err == nil {
// skip matching if it is installed already
continue
}
packagesToInstall = append(packagesToInstall, currentPack)
}
}
Info(":deciduous_tree: Finding packages to install")
// Gathers things to install
for _, currentPack := range packagesToInstall {
// Check if package is already installed.
if _, err := s.Database.FindPackage(currentPack); err == nil {
// skip matching if it is installed already
continue
}
matches := syncedRepos.PackageMatches(pkg.Packages{currentPack})
if len(matches) == 0 {
return errors.New("Failed matching solutions against repository for " + currentPack.HumanReadableString() + " where are definitions coming from?!")
return toInstall, p, solution, allRepos, errors.New("Failed matching solutions against repository for " + currentPack.HumanReadableString() + " where are definitions coming from?!")
}
A:
for _, artefact := range matches[0].Repo.GetIndex() {
if artefact.GetCompileSpec().GetPackage() == nil {
return errors.New("Package in compilespec empty")
return toInstall, p, solution, allRepos, errors.New("Package in compilespec empty")
}
if matches[0].Package.Matches(artefact.GetCompileSpec().GetPackage()) {
currentPack.SetBuildTimestamp(artefact.GetCompileSpec().GetPackage().GetBuildTimestamp())
// Filter out already installed
if _, err := s.Database.FindPackage(currentPack); err != nil {
toInstall[currentPack.GetFingerPrint()] = ArtifactMatch{Package: currentPack, Artifact: artefact, Repository: matches[0].Repo}
Info("\t:package:", currentPack.HumanReadableString(), "from repository", matches[0].Repo.GetName())
}
break A
}
}
}
return toInstall, p, solution, allRepos, nil
}
func (l *LuetInstaller) install(syncedRepos Repositories, toInstall map[string]ArtifactMatch, p pkg.Packages, solution solver.PackagesAssertions, allRepos pkg.PackageDatabase, s *System) error {
// Install packages into rootfs in parallel.
all := make(chan ArtifactMatch)
@@ -435,9 +495,9 @@ func (l *LuetInstaller) install(syncedRepos Repositories, cp pkg.Packages, s *Sy
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Failed creating package")
}
bus.Manager.Publish(bus.EventPackageInstall, c)
}
executedFinalizer := map[string]bool{}
var toFinalize []pkg.Package
if !l.Options.NoDeps {
// TODO: Lower those errors as warning
for _, w := range p {
@@ -449,36 +509,17 @@ func (l *LuetInstaller) install(syncedRepos Repositories, cp pkg.Packages, s *Sy
ORDER:
for _, ass := range ordered {
if ass.Value {
installed, ok := toInstall[ass.Package.GetFingerPrint()]
if !ok {
// It was a dep already installed in the system, so we can skip it safely
continue ORDER
}
treePackage, err := installed.Repository.GetTree().GetDatabase().FindPackage(ass.Package)
if err != nil {
return errors.Wrap(err, "Error getting package "+ass.Package.HumanReadableString())
}
if helpers.Exists(treePackage.Rel(tree.FinalizerFile)) {
Info("Executing finalizer for " + ass.Package.HumanReadableString())
finalizerRaw, err := ioutil.ReadFile(treePackage.Rel(tree.FinalizerFile))
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Error reading file "+treePackage.Rel(tree.FinalizerFile))
}
if _, exists := executedFinalizer[ass.Package.GetFingerPrint()]; !exists {
finalizer, err := NewLuetFinalizerFromYaml(finalizerRaw)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Error reading finalizer "+treePackage.Rel(tree.FinalizerFile))
}
err = finalizer.RunInstall(s)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Error executing install finalizer "+treePackage.Rel(tree.FinalizerFile))
}
executedFinalizer[ass.Package.GetFingerPrint()] = true
}
}
toFinalize = append(toFinalize, treePackage)
}
}
@@ -489,29 +530,11 @@ func (l *LuetInstaller) install(syncedRepos Repositories, cp pkg.Packages, s *Sy
if err != nil {
return errors.Wrap(err, "Error getting package "+c.Package.HumanReadableString())
}
if helpers.Exists(treePackage.Rel(tree.FinalizerFile)) {
Info("Executing finalizer for " + c.Package.HumanReadableString())
finalizerRaw, err := ioutil.ReadFile(treePackage.Rel(tree.FinalizerFile))
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Error reading file "+treePackage.Rel(tree.FinalizerFile))
}
if _, exists := executedFinalizer[c.Package.GetFingerPrint()]; !exists {
finalizer, err := NewLuetFinalizerFromYaml(finalizerRaw)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Error reading finalizer "+treePackage.Rel(tree.FinalizerFile))
}
err = finalizer.RunInstall(s)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Error executing install finalizer "+treePackage.Rel(tree.FinalizerFile))
}
executedFinalizer[c.Package.GetFingerPrint()] = true
}
}
toFinalize = append(toFinalize, treePackage)
}
}
return nil
return s.ExecuteFinalizers(toFinalize, l.Options.Force)
}
func (l *LuetInstaller) downloadPackage(a ArtifactMatch) (compiler.Artifact, error) {
@@ -562,9 +585,9 @@ func (l *LuetInstaller) downloadWorker(i int, wg *sync.WaitGroup, c <-chan Artif
return errors.Wrap(err, "Failed installing package "+p.Package.GetName())
}
if err == nil {
Info(":package: ", p.Package.HumanReadableString(), "downloaded")
Info("\n:package: Package ", p.Package.HumanReadableString(), "downloaded")
} else if err != nil && l.Options.Force {
Info(":package: ", p.Package.HumanReadableString(), "downloaded with failures (force download)")
Info("\n:package: ", p.Package.HumanReadableString(), "downloaded with failures (force download)")
}
}
@@ -583,9 +606,9 @@ func (l *LuetInstaller) installerWorker(i int, wg *sync.WaitGroup, c <-chan Arti
return errors.Wrap(err, "Failed installing package "+p.Package.GetName())
}
if err == nil {
Info(":package: ", p.Package.HumanReadableString(), "installed")
Info(":package: Package ", p.Package.HumanReadableString(), "installed")
} else if err != nil && l.Options.Force {
Info(":package: ", p.Package.HumanReadableString(), "installed with failures (force install)")
Info(":package: Package ", p.Package.HumanReadableString(), "installed with failures (forced install)")
}
}
@@ -593,16 +616,39 @@ func (l *LuetInstaller) installerWorker(i int, wg *sync.WaitGroup, c <-chan Arti
}
func (l *LuetInstaller) uninstall(p pkg.Package, s *System) error {
var cp *config.ConfigProtect
annotationDir := ""
files, err := s.Database.GetPackageFiles(p)
if err != nil {
return errors.Wrap(err, "Failed getting installed files")
}
// Remove from target
for _, f := range files {
target := filepath.Join(s.Target, f)
Debug("Removing", target)
if !config.LuetCfg.ConfigProtectSkip {
if p.HasAnnotation(string(pkg.ConfigProtectAnnnotation)) {
dir, ok := p.GetAnnotations()[string(pkg.ConfigProtectAnnnotation)]
if ok {
annotationDir = dir
}
}
cp = config.NewConfigProtect(annotationDir)
cp.Map(files)
}
toRemove, notPresent := helpers.OrderFiles(s.Target, files)
// Remove from target
for _, f := range toRemove {
target := filepath.Join(s.Target, f)
if !config.LuetCfg.ConfigProtectSkip && cp.Protected(f) {
Debug("Preserving protected file:", f)
continue
}
Debug("Removing", target)
if l.Options.PreserveSystemEssentialData &&
strings.HasPrefix(f, config.LuetCfg.GetSystem().GetSystemPkgsCacheDirPath()) ||
strings.HasPrefix(f, config.LuetCfg.GetSystem().GetSystemRepoDatabaseDirPath()) {
@@ -610,11 +656,41 @@ func (l *LuetInstaller) uninstall(p pkg.Package, s *System) error {
continue
}
err := os.Remove(target)
fi, err := os.Lstat(target)
if err != nil {
Warning("Failed removing file (not present in the system target ?)", target)
Warning("File not found (it was before?) ", err.Error())
continue
}
switch mode := fi.Mode(); {
case mode.IsDir():
files, err := ioutil.ReadDir(target)
if err != nil {
Warning("Failed reading folder", target, err.Error())
}
if len(files) != 0 {
Debug("Preserving not-empty folder", target)
continue
}
}
if err = os.Remove(target); err != nil {
Warning("Failed removing file (maybe not present in the system target anymore ?)", target, err.Error())
}
}
for _, f := range notPresent {
target := filepath.Join(s.Target, f)
if !config.LuetCfg.ConfigProtectSkip && cp.Protected(f) {
Debug("Preserving protected file:", f)
continue
}
if err = os.Remove(target); err != nil {
Debug("Failed removing file (not present in the system target)", target, err.Error())
}
}
err = s.Database.RemovePackageFiles(p)
if err != nil {
return errors.Wrap(err, "Failed removing package files from database")
@@ -624,16 +700,15 @@ func (l *LuetInstaller) uninstall(p pkg.Package, s *System) error {
return errors.Wrap(err, "Failed removing package from database")
}
Info(p.GetFingerPrint(), "Removed")
bus.Manager.Publish(bus.EventPackageUnInstall, p)
Info(":recycle:", p.GetFingerPrint(), "Removed :heavy_check_mark:")
return nil
}
func (l *LuetInstaller) Uninstall(p pkg.Package, s *System) error {
Spinner(32)
defer SpinnerStop()
Info("Uninstalling :package:", p.HumanReadableString(), "hang tight")
func (l *LuetInstaller) computeUninstall(p pkg.Package, s *System) (pkg.Packages, error) {
var toUninstall pkg.Packages
// compute uninstall from all world - remove packages in parallel - run uninstall finalizer (in order) TODO - mark the uninstallation in db
// Get installed definition
checkConflicts := l.Options.CheckConflicts
@@ -650,44 +725,71 @@ func (l *LuetInstaller) Uninstall(p pkg.Package, s *System) error {
for _, i := range s.Database.World() {
_, err := installedtmp.CreatePackage(i)
if err != nil {
return errors.Wrap(err, "Failed create temporary in-memory db")
return toUninstall, errors.Wrap(err, "Failed create temporary in-memory db")
}
}
if !l.Options.NoDeps {
Info("Finding :package:", p.HumanReadableString(), "dependency graph :deciduous_tree:")
solv := solver.NewResolver(installedtmp, installedtmp, pkg.NewInMemoryDatabase(false), l.Options.SolverOptions.Resolver())
solv := solver.NewResolver(solver.Options{Type: l.Options.SolverOptions.Implementation, Concurrency: l.Options.Concurrency}, installedtmp, installedtmp, pkg.NewInMemoryDatabase(false), l.Options.SolverOptions.Resolver())
var solution pkg.Packages
var err error
if l.Options.FullCleanUninstall {
solution, err = solv.UninstallUniverse(pkg.Packages{p})
if err != nil {
return errors.Wrap(err, "Could not solve the uninstall constraints. Tip: try with --solver-type qlearning or with --force, or by removing packages excluding their dependencies with --nodeps")
return toUninstall, errors.Wrap(err, "Could not solve the uninstall constraints. Tip: try with --solver-type qlearning or with --force, or by removing packages excluding their dependencies with --nodeps")
}
} else {
solution, err = solv.Uninstall(p, checkConflicts, full)
solution, err = solv.Uninstall(checkConflicts, full, p)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Could not solve the uninstall constraints. Tip: try with --solver-type qlearning or with --force, or by removing packages excluding their dependencies with --nodeps")
return toUninstall, errors.Wrap(err, "Could not solve the uninstall constraints. Tip: try with --solver-type qlearning or with --force, or by removing packages excluding their dependencies with --nodeps")
}
}
for _, p := range solution {
Info("Uninstalling", p.HumanReadableString())
toUninstall = append(toUninstall, p)
}
} else {
toUninstall = append(toUninstall, p)
}
return toUninstall, nil
}
func (l *LuetInstaller) Uninstall(p pkg.Package, s *System) error {
Spinner(32)
toUninstall, err := l.computeUninstall(p, s)
if err != nil {
return errors.Wrap(err, "while computing uninstall")
}
SpinnerStop()
uninstall := func() error {
for _, p := range toUninstall {
err := l.uninstall(p, s)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Uninstall failed")
}
}
} else {
Info("Uninstalling", p.HumanReadableString(), "without deps")
err := l.uninstall(p, s)
if err != nil && !l.Options.Force {
return errors.Wrap(err, "Uninstall failed")
}
Info(":package:", p.HumanReadableString(), "uninstalled")
return nil
}
return nil
if len(toUninstall) == 0 {
Info("Nothing to do")
return nil
}
Info(":recycle: Packages that are going to be removed from the system:\n ", Yellow(packsToList(toUninstall)).BgBlack().String())
if l.Options.Ask {
Info("By going forward, you are also accepting the licenses of the packages that you are going to install in your system.")
if Ask() {
l.Options.Ask = false // Don't prompt anymore
return uninstall()
} else {
return errors.New("Aborted by user")
}
}
return uninstall()
}
func (l *LuetInstaller) Repositories(r []Repository) { l.PackageRepositories = r }

View File

@@ -24,6 +24,8 @@ import (
compiler "github.com/mudler/luet/pkg/compiler"
backend "github.com/mudler/luet/pkg/compiler/backend"
"github.com/mudler/luet/pkg/helpers"
solver "github.com/mudler/luet/pkg/solver"
. "github.com/mudler/luet/pkg/installer"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/tree"
@@ -47,7 +49,7 @@ var _ = Describe("Installer", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -163,7 +165,7 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(),
generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -281,7 +283,7 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -399,7 +401,7 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -486,7 +488,7 @@ urls:
Expect(len(generalRecipe2.GetDatabase().GetPackages())).To(Equal(1))
c = compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe2.GetDatabase(), compiler.NewDefaultCompilerOptions())
c = compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe2.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err = c.FromPackage(&pkg.DefaultPackage{Name: "alpine", Category: "seed", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -545,7 +547,7 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -663,8 +665,8 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
Expect(len(generalRecipeNewRepo.GetDatabase().GetPackages())).To(Equal(3))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c2 := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipeNewRepo.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
c2 := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipeNewRepo.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -795,7 +797,7 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -912,7 +914,7 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -1017,7 +1019,7 @@ urls:
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -1109,7 +1111,7 @@ urls:
Expect(len(generalRecipe2.GetDatabase().GetPackages())).To(Equal(3))
c = compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe2.GetDatabase(), compiler.NewDefaultCompilerOptions())
c = compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe2.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err = c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.1"})
Expect(err).ToNot(HaveOccurred())

View File

@@ -26,6 +26,7 @@ import (
"strings"
"time"
"github.com/mudler/luet/pkg/bus"
"github.com/mudler/luet/pkg/compiler"
"github.com/mudler/luet/pkg/config"
"github.com/mudler/luet/pkg/helpers"
@@ -426,6 +427,14 @@ func (r *LuetSystemRepository) Write(dst string, resetRevision bool) error {
r.Name, r.Revision, r.LastUpdate,
))
bus.Manager.Publish(bus.EventRepositoryPreBuild, struct {
Repo LuetSystemRepository
Path string
}{
Repo: *r,
Path: dst,
})
// Create tree and repository file
archive, err := config.LuetCfg.GetSystem().TempDir("archive")
if err != nil {
@@ -506,6 +515,14 @@ func (r *LuetSystemRepository) Write(dst string, resetRevision bool) error {
return err
}
bus.Manager.Publish(bus.EventRepositoryPostBuild, struct {
Repo LuetSystemRepository
Path string
}{
Repo: *r,
Path: dst,
})
return nil
}

View File

@@ -28,6 +28,7 @@ import (
"github.com/mudler/luet/pkg/helpers"
. "github.com/mudler/luet/pkg/installer"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
"github.com/mudler/luet/pkg/tree"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
@@ -48,7 +49,7 @@ var _ = Describe("Repository", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -116,11 +117,11 @@ var _ = Describe("Repository", func() {
Expect(len(generalRecipe2.GetDatabase().GetPackages())).To(Equal(1))
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler2 := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe2.GetDatabase(), compiler.NewDefaultCompilerOptions())
compiler2 := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe2.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec2, err := compiler2.FromPackage(&pkg.DefaultPackage{Name: "alpine", Category: "seed", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions(), solver.Options{Type: solver.SingleCoreSimple})
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())

View File

@@ -1,7 +1,12 @@
package installer
import (
. "github.com/mudler/luet/pkg/logger"
"github.com/mudler/luet/pkg/helpers"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/tree"
"github.com/pkg/errors"
)
type System struct {
@@ -12,3 +17,31 @@ type System struct {
func (s *System) World() (pkg.Packages, error) {
return s.Database.World(), nil
}
type templatedata map[string]interface{}
func (s *System) ExecuteFinalizers(packs []pkg.Package, force bool) error {
executedFinalizer := map[string]bool{}
for _, p := range packs {
if helpers.Exists(p.Rel(tree.FinalizerFile)) {
out, err := helpers.RenderFiles(p.Rel(tree.FinalizerFile), p.Rel(tree.DefinitionFile), "")
if err != nil && !force {
return errors.Wrap(err, "reading file "+p.Rel(tree.FinalizerFile))
}
if _, exists := executedFinalizer[p.GetFingerPrint()]; !exists {
Info("Executing finalizer for " + p.HumanReadableString())
finalizer, err := NewLuetFinalizerFromYaml([]byte(out))
if err != nil && !force {
return errors.Wrap(err, "Error reading finalizer "+p.Rel(tree.FinalizerFile))
}
err = finalizer.RunInstall(s)
if err != nil && !force {
return errors.Wrap(err, "Error executing install finalizer "+p.Rel(tree.FinalizerFile))
}
executedFinalizer[p.GetFingerPrint()] = true
}
}
}
return nil
}

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"regexp"
"strings"
. "github.com/mudler/luet/pkg/config"
@@ -36,6 +37,22 @@ func GetAurora() Aurora {
return aurora
}
func Ask() bool {
var input string
Info("Do you want to continue with this operation? [y/N]: ")
_, err := fmt.Scanln(&input)
if err != nil {
return false
}
input = strings.ToLower(input)
if input == "y" || input == "yes" {
return true
}
return false
}
func ZapLogger() error {
var err error
if z == nil {
@@ -183,7 +200,7 @@ func msg(level string, withoutColor bool, msg ...interface{}) {
case "debug":
levelMsg = White(message).BgBlack().String()
case "info":
levelMsg = Bold(White(message)).BgBlack().String()
levelMsg = message
case "error":
levelMsg = Bold(Red(":bomb: " + message + ":fire:")).BgBlack().String()
}

View File

@@ -28,6 +28,7 @@ type PackageDatabase interface {
}
type PackageSet interface {
GetRevdeps(p Package) (Packages, error)
GetPackages() []string //Ids
CreatePackage(pkg Package) (string, error)
GetPackage(ID string) (Package, error)

View File

@@ -0,0 +1,116 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package pkg_test
import (
"io/ioutil"
"os"
"strconv"
. "github.com/mudler/luet/pkg/package"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Database benchmark", func() {
Context("BoltDB", func() {
a := NewPackage("A", ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
tmpfile, _ := ioutil.TempFile(os.TempDir(), "tests")
defer os.Remove(tmpfile.Name()) // clean up
var db PackageSet
BeforeEach(func() {
tmpfile, _ = ioutil.TempFile(os.TempDir(), "tests")
defer os.Remove(tmpfile.Name()) // clean up
db = NewBoltDatabase(tmpfile.Name())
if os.Getenv("BENCHMARK_TESTS") != "true" {
Skip("BENCHMARK_TESTS not enabled")
}
})
Measure("it should be fast in computing world from a 50000 dataset", func(b Benchmarker) {
for i := 0; i < 50000; i++ {
a = NewPackage("A"+strconv.Itoa(i), ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
_, err := db.CreatePackage(a)
Expect(err).ToNot(HaveOccurred())
}
runtime := b.Time("runtime", func() {
packs := db.World()
Expect(len(packs)).To(Equal(50000))
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 30), "World() shouldn't take too long.")
}, 1)
Measure("it should be fast in computing world from a 100000 dataset", func(b Benchmarker) {
for i := 0; i < 100000; i++ {
a = NewPackage("A"+strconv.Itoa(i), ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
_, err := db.CreatePackage(a)
Expect(err).ToNot(HaveOccurred())
}
runtime := b.Time("runtime", func() {
packs := db.World()
Expect(len(packs)).To(Equal(100000))
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 30), "World() shouldn't take too long.")
}, 1)
})
Context("InMemory", func() {
a := NewPackage("A", ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
tmpfile, _ := ioutil.TempFile(os.TempDir(), "tests")
defer os.Remove(tmpfile.Name()) // clean up
var db PackageSet
BeforeEach(func() {
tmpfile, _ = ioutil.TempFile(os.TempDir(), "tests")
defer os.Remove(tmpfile.Name()) // clean up
db = NewInMemoryDatabase(false)
if os.Getenv("BENCHMARK_TESTS") != "true" {
Skip("BENCHMARK_TESTS not enabled")
}
})
Measure("it should be fast in computing world from a 100000 dataset", func(b Benchmarker) {
runtime := b.Time("runtime", func() {
for i := 0; i < 100000; i++ {
a = NewPackage("A"+strconv.Itoa(i), ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
_, err := db.CreatePackage(a)
Expect(err).ToNot(HaveOccurred())
}
packs := db.World()
Expect(len(packs)).To(Equal(100000))
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 10), "World() shouldn't take too long.")
}, 2)
})
})

View File

@@ -86,6 +86,17 @@ func (db *BoltDatabase) Retrieve(ID string) ([]byte, error) {
return enc, nil
}
// GetRevdeps uses a new inmemory db to calcuate revdeps
// TODO: Have a memory instance for boltdb, so we don't compute each time we get called
// as this is REALLY expensive. But we don't perform usually those operations in a file db.
func (db *BoltDatabase) GetRevdeps(p Package) (Packages, error) {
memory := NewInMemoryDatabase(false)
for _, p := range db.World() {
memory.CreatePackage(p)
}
return memory.GetRevdeps(p)
}
func (db *BoltDatabase) FindPackage(tofind Package) (Package, error) {
// Provides: Return the replaced package here
if provided, err := db.getProvide(tofind); err == nil {
@@ -162,26 +173,17 @@ func (db *BoltDatabase) GetAllPackages(packages chan Package) error {
return err
}
defer bolt.Close()
// Fetching records one by one (useful when the bucket contains a lot of records)
//query := bolt.Select()
var packs []Package
var packs []DefaultPackage
err = bolt.All(&packs)
if err != nil {
return err
}
for _, r := range packs {
packages <- r
packages <- &r
}
return nil
// return query.Each(new(DefaultPackage), func(record interface{}) error {
// u := record.(*DefaultPackage)
// packages <- u
// return err
// })
}
// Encode encodes the package to string.
@@ -316,16 +318,23 @@ func (db *BoltDatabase) RemovePackage(p Package) error {
}
func (db *BoltDatabase) World() Packages {
var packs []DefaultPackage
var all []Package
// FIXME: This should all be locked in the db - for now forbid the solver to be run in threads.
for _, k := range db.GetPackages() {
pack, err := db.GetPackage(k)
if err == nil {
all = append(all, pack)
}
bolt, err := storm.Open(db.Path, storm.BoltOptions(0600, &bbolt.Options{Timeout: 30 * time.Second}))
if err != nil {
return Packages([]Package{})
}
return Packages(all)
defer bolt.Close()
err = bolt.All(&packs)
if err != nil {
return Packages([]Package{})
}
models := make([]Package, len(packs))
for i, _ := range packs {
models[i] = &packs[i]
}
return Packages(models)
}
func (db *BoltDatabase) FindPackageCandidate(p Package) (Package, error) {
@@ -376,6 +385,11 @@ func (db *BoltDatabase) FindPackages(p Package) (Packages, error) {
// FindPackageVersions return the list of the packages beloging to cat/name
func (db *BoltDatabase) FindPackageVersions(p Package) (Packages, error) {
// Provides: Treat as the replaced package here
if provided, err := db.getProvide(p); err == nil {
p = provided
}
var versionsInWorld []Package
for _, w := range db.World() {
if w.GetName() != p.GetName() || w.GetCategory() != p.GetCategory() {
@@ -390,11 +404,7 @@ func (db *BoltDatabase) FindPackageVersions(p Package) (Packages, error) {
func (db *BoltDatabase) FindPackageLabel(labelKey string) (Packages, error) {
var ans []Package
for _, k := range db.GetPackages() {
pack, err := db.GetPackage(k)
if err != nil {
return ans, err
}
for _, pack := range db.World() {
if pack.HasLabel(labelKey) {
ans = append(ans, pack)
}
@@ -410,11 +420,7 @@ func (db *BoltDatabase) FindPackageLabelMatch(pattern string) (Packages, error)
return nil, errors.New("Invalid regex " + pattern + "!")
}
for _, k := range db.GetPackages() {
pack, err := db.GetPackage(k)
if err != nil {
return ans, err
}
for _, pack := range db.World() {
if pack.MatchLabel(re) {
ans = append(ans, pack)
}
@@ -431,12 +437,7 @@ func (db *BoltDatabase) FindPackageMatch(pattern string) (Packages, error) {
return nil, errors.New("Invalid regex " + pattern + "!")
}
for _, k := range db.GetPackages() {
pack, err := db.GetPackage(k)
if err != nil {
return ans, err
}
for _, pack := range db.World() {
if re.MatchString(pack.HumanReadableString()) {
ans = append(ans, pack)
}

View File

@@ -0,0 +1,152 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or ItNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package pkg_test
import (
"io/ioutil"
"os"
"regexp"
. "github.com/mudler/luet/pkg/package"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("BoltDB Database", func() {
tmpfile, _ := ioutil.TempFile(os.TempDir(), "tests")
defer os.Remove(tmpfile.Name()) // clean up
var db PackageDatabase
BeforeEach(func() {
tmpfile, _ = ioutil.TempFile(os.TempDir(), "tests")
defer os.Remove(tmpfile.Name()) // clean up
db = NewBoltDatabase(tmpfile.Name())
})
Context("Simple package", func() {
a := NewPackage("A", ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
It("Find packages", func() {
ID, err := db.CreatePackage(a)
Expect(err).ToNot(HaveOccurred())
pack, err := db.GetPackage(ID)
Expect(err).ToNot(HaveOccurred())
Expect(pack).To(Equal(a))
ids := db.GetPackages()
Expect(ids).To(Equal([]string{"1"}))
pack, err = db.FindPackage(a)
Expect(err).ToNot(HaveOccurred())
Expect(pack).To(Equal(a))
})
It("Expands correctly", func() {
a := NewPackage("A", ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
a1 := NewPackage("A", "1.0", []*DefaultPackage{}, []*DefaultPackage{})
a11 := NewPackage("A", "1.1", []*DefaultPackage{}, []*DefaultPackage{})
a01 := NewPackage("A", "0.1", []*DefaultPackage{}, []*DefaultPackage{})
re := regexp.MustCompile("project[0-9][=].*")
for _, p := range []Package{a1, a11, a01} {
_, err := db.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
lst, err := a.Expand(db)
Expect(err).ToNot(HaveOccurred())
Expect(lst).To(ContainElement(a11))
Expect(lst).To(ContainElement(a1))
Expect(lst).ToNot(ContainElement(a01))
Expect(len(lst)).To(Equal(2))
p := lst.Best(nil)
Expect(p).To(Equal(a11))
// Test annotation with null map
Expect(a.MatchAnnotation(re)).To(Equal(false))
})
It("Find best package candidate", func() {
db := NewInMemoryDatabase(false)
a := NewPackage("A", "1.0", []*DefaultPackage{}, []*DefaultPackage{})
a1 := NewPackage("A", "1.1", []*DefaultPackage{}, []*DefaultPackage{})
a3 := NewPackage("A", "1.3", []*DefaultPackage{}, []*DefaultPackage{})
_, err := db.CreatePackage(a)
Expect(err).ToNot(HaveOccurred())
_, err = db.CreatePackage(a1)
Expect(err).ToNot(HaveOccurred())
_, err = db.CreatePackage(a3)
Expect(err).ToNot(HaveOccurred())
s := NewPackage("A", ">=1.0", []*DefaultPackage{}, []*DefaultPackage{})
pack, err := db.FindPackageCandidate(s)
Expect(err).ToNot(HaveOccurred())
Expect(pack).To(Equal(a3))
})
It("Find specific package candidate", func() {
db := NewInMemoryDatabase(false)
a := NewPackage("A", "1.0", []*DefaultPackage{}, []*DefaultPackage{})
a1 := NewPackage("A", "1.1", []*DefaultPackage{}, []*DefaultPackage{})
a3 := NewPackage("A", "1.3", []*DefaultPackage{}, []*DefaultPackage{})
_, err := db.CreatePackage(a)
Expect(err).ToNot(HaveOccurred())
_, err = db.CreatePackage(a1)
Expect(err).ToNot(HaveOccurred())
_, err = db.CreatePackage(a3)
Expect(err).ToNot(HaveOccurred())
s := NewPackage("A", "=1.0", []*DefaultPackage{}, []*DefaultPackage{})
pack, err := db.FindPackageCandidate(s)
Expect(err).ToNot(HaveOccurred())
Expect(pack).To(Equal(a))
})
It("Provides replaces definitions", func() {
db := NewInMemoryDatabase(false)
a := NewPackage("A", "1.0", []*DefaultPackage{}, []*DefaultPackage{})
a1 := NewPackage("A", "1.1", []*DefaultPackage{}, []*DefaultPackage{})
a3 := NewPackage("A", "1.3", []*DefaultPackage{}, []*DefaultPackage{})
a3.SetProvides([]*DefaultPackage{{Name: "A", Category: "", Version: "1.0"}})
Expect(a3.GetProvides()).To(Equal([]*DefaultPackage{{Name: "A", Category: "", Version: "1.0"}}))
_, err := db.CreatePackage(a)
Expect(err).ToNot(HaveOccurred())
_, err = db.CreatePackage(a1)
Expect(err).ToNot(HaveOccurred())
_, err = db.CreatePackage(a3)
Expect(err).ToNot(HaveOccurred())
s := NewPackage("A", "1.0", []*DefaultPackage{}, []*DefaultPackage{})
pack, err := db.FindPackage(s)
Expect(err).ToNot(HaveOccurred())
Expect(pack).To(Equal(a3))
})
})
})

View File

@@ -31,6 +31,7 @@ var DBInMemoryInstance = &InMemoryDatabase{
Database: map[string]string{},
CacheNoVersion: map[string]map[string]interface{}{},
ProvidesDatabase: map[string]map[string]Package{},
RevDepsDatabase: map[string]map[string]Package{},
}
type InMemoryDatabase struct {
@@ -39,6 +40,7 @@ type InMemoryDatabase struct {
FileDatabase map[string][]string
CacheNoVersion map[string]map[string]interface{}
ProvidesDatabase map[string]map[string]Package
RevDepsDatabase map[string]map[string]Package
}
func NewInMemoryDatabase(singleton bool) PackageDatabase {
@@ -50,6 +52,7 @@ func NewInMemoryDatabase(singleton bool) PackageDatabase {
Database: map[string]string{},
CacheNoVersion: map[string]map[string]interface{}{},
ProvidesDatabase: map[string]map[string]Package{},
RevDepsDatabase: map[string]map[string]Package{},
}
}
return DBInMemoryInstance
@@ -125,6 +128,47 @@ func (db *InMemoryDatabase) GetAllPackages(packages chan Package) error {
return nil
}
func (db *InMemoryDatabase) getRevdeps(p Package, visited map[string]interface{}) (Packages, error) {
var versionsInWorld Packages
if _, ok := visited[p.HumanReadableString()]; ok {
return versionsInWorld, nil
}
visited[p.HumanReadableString()] = true
var res Packages
packs, err := db.FindPackages(p)
if err != nil {
return res, err
}
for _, pp := range packs {
// db.Lock()
list := db.RevDepsDatabase[pp.GetFingerPrint()]
// db.Unlock()
for _, revdep := range list {
dep, err := db.FindPackage(revdep)
if err != nil {
return res, err
}
res = append(res, dep)
packs, err := db.getRevdeps(dep, visited)
if err != nil {
return res, err
}
res = append(res, packs...)
}
}
return res.Unique(), nil
}
// GetRevdeps returns the package reverse dependencies,
// matching also selectors in versions (>, <, >=, <=)
// TODO: Code should use db explictly
func (db *InMemoryDatabase) GetRevdeps(p Package) (Packages, error) {
return db.getRevdeps(p, make(map[string]interface{}))
}
// Encode encodes the package to string.
// It returns an ID which can be used to retrieve the package later on.
func (db *InMemoryDatabase) CreatePackage(p Package) (string, error) {
@@ -143,9 +187,16 @@ func (db *InMemoryDatabase) CreatePackage(p Package) (string, error) {
return "", err
}
db.populateCaches(pd)
return ID, nil
}
func (db *InMemoryDatabase) populateCaches(p Package) {
pd, _ := p.(*DefaultPackage)
// Create extra cache between package -> []versions
db.Lock()
defer db.Unlock()
// Provides: Store package provides, we will reuse this when walking deps
for _, provide := range pd.Provides {
@@ -157,21 +208,41 @@ func (db *InMemoryDatabase) CreatePackage(p Package) (string, error) {
db.ProvidesDatabase[provide.GetPackageName()][provide.GetVersion()] = p
}
_, ok = db.CacheNoVersion[p.GetPackageName()]
_, ok := db.CacheNoVersion[p.GetPackageName()]
if !ok {
db.CacheNoVersion[p.GetPackageName()] = make(map[string]interface{})
}
db.CacheNoVersion[p.GetPackageName()][p.GetVersion()] = nil
db.Unlock()
return ID, nil
for _, re := range pd.GetRequires() {
packages, _ := db.FindPackages(re)
db.Lock()
for _, pa := range packages {
_, ok := db.RevDepsDatabase[pa.GetFingerPrint()]
if !ok {
db.RevDepsDatabase[pa.GetFingerPrint()] = make(map[string]Package)
}
db.RevDepsDatabase[pa.GetFingerPrint()][pd.GetFingerPrint()] = pd
}
_, ok := db.RevDepsDatabase[re.GetFingerPrint()]
if !ok {
db.RevDepsDatabase[re.GetFingerPrint()] = make(map[string]Package)
}
db.RevDepsDatabase[re.GetFingerPrint()][pd.GetFingerPrint()] = pd
db.Unlock()
}
}
func (db *InMemoryDatabase) getProvide(p Package) (Package, error) {
db.Lock()
pa, ok := db.ProvidesDatabase[p.GetPackageName()][p.GetVersion()]
if !ok {
versions, ok := db.ProvidesDatabase[p.GetPackageName()]
db.Unlock()
defer db.Unlock()
if !ok {
return nil, errors.New("No versions found for package")
@@ -195,6 +266,7 @@ func (db *InMemoryDatabase) getProvide(p Package) (Package, error) {
return nil, errors.New("No package provides this")
}
db.Unlock()
return db.FindPackage(pa)
}
@@ -225,7 +297,13 @@ func (db *InMemoryDatabase) FindPackage(p Package) (Package, error) {
// FindPackages return the list of the packages beloging to cat/name
func (db *InMemoryDatabase) FindPackageVersions(p Package) (Packages, error) {
// Provides: Treat as the replaced package here
if provided, err := db.getProvide(p); err == nil {
p = provided
}
db.Lock()
versions, ok := db.CacheNoVersion[p.GetPackageName()]
db.Unlock()
if !ok {
return nil, errors.New("No versions found for package")
}
@@ -242,29 +320,38 @@ func (db *InMemoryDatabase) FindPackageVersions(p Package) (Packages, error) {
// FindPackages return the list of the packages beloging to cat/name (any versions in requested range)
func (db *InMemoryDatabase) FindPackages(p Package) (Packages, error) {
if !p.IsSelector() {
pack, err := db.FindPackage(p)
if err != nil {
return []Package{}, err
}
return []Package{pack}, nil
}
// Provides: Treat as the replaced package here
if provided, err := db.getProvide(p); err == nil {
p = provided
}
db.Lock()
var matches []*DefaultPackage
versions, ok := db.CacheNoVersion[p.GetPackageName()]
for ve := range versions {
match, _ := p.SelectorMatchVersion(ve, nil)
if match {
matches = append(matches, &DefaultPackage{Name: p.GetName(), Category: p.GetCategory(), Version: ve})
}
}
db.Unlock()
if !ok {
return nil, errors.New(fmt.Sprintf("No versions found for: %s", p.HumanReadableString()))
}
var versionsInWorld []Package
for ve, _ := range versions {
match, err := p.SelectorMatchVersion(ve, nil)
for _, p := range matches {
w, err := db.FindPackage(p)
if err != nil {
return nil, errors.Wrap(err, "Error on match selector")
}
if match {
w, err := db.FindPackage(&DefaultPackage{Name: p.GetName(), Category: p.GetCategory(), Version: ve})
if err != nil {
return nil, errors.Wrap(err, "Cache mismatch - this shouldn't happen")
}
versionsInWorld = append(versionsInWorld, w)
return nil, errors.Wrap(err, "Cache mismatch - this shouldn't happen")
}
versionsInWorld = append(versionsInWorld, w)
}
return Packages(versionsInWorld), nil
}

View File

@@ -40,6 +40,7 @@ import (
// FIXME: Currently some of the methods are returning DefaultPackages due to JSON serialization of the package
type Package interface {
Encode(PackageDatabase) (string, error)
Related(definitiondb PackageDatabase) Packages
BuildFormula(PackageDatabase, PackageDatabase) ([]bf.Formula, error)
@@ -48,7 +49,6 @@ type Package interface {
Requires([]*DefaultPackage) Package
Conflicts([]*DefaultPackage) Package
Revdeps(PackageDatabase) Packages
ExpandedRevdeps(definitiondb PackageDatabase, visited map[string]interface{}) Packages
LabelDeps(PackageDatabase, string) Packages
GetProvides() []*DefaultPackage
@@ -146,6 +146,60 @@ func DefaultPackageFromYaml(yml []byte) (DefaultPackage, error) {
return unescaped, nil
}
type rawPackages []map[string]interface{}
func (r rawPackages) Find(name, category, version string) map[string]interface{} {
for _, v := range r {
if v["name"] == name && v["category"] == category && v["version"] == version {
return v
}
}
return map[string]interface{}{}
}
func GetRawPackages(yml []byte) (rawPackages, error) {
var rawPackages struct {
Packages []map[string]interface{} `yaml:"packages"`
}
source, err := yaml.YAMLToJSON(yml)
if err != nil {
return []map[string]interface{}{}, err
}
rawIn := json.RawMessage(source)
bytes, err := rawIn.MarshalJSON()
if err != nil {
return []map[string]interface{}{}, err
}
err = json.Unmarshal(bytes, &rawPackages)
if err != nil {
return []map[string]interface{}{}, err
}
return rawPackages.Packages, nil
}
func DefaultPackagesFromYaml(yml []byte) ([]DefaultPackage, error) {
var unescaped struct {
Packages []DefaultPackage `json:"packages"`
}
source, err := yaml.YAMLToJSON(yml)
if err != nil {
return []DefaultPackage{}, err
}
rawIn := json.RawMessage(source)
bytes, err := rawIn.MarshalJSON()
if err != nil {
return []DefaultPackage{}, err
}
err = json.Unmarshal(bytes, &unescaped)
if err != nil {
return []DefaultPackage{}, err
}
return unescaped.Packages, nil
}
// Major and minor gets escaped when marshalling in JSON, making compiler fails recognizing selectors for expansion
func (t *DefaultPackage) JSON() ([]byte, error) {
buffer := &bytes.Buffer{}
@@ -451,52 +505,47 @@ func (p *DefaultPackage) Revdeps(definitiondb PackageDatabase) Packages {
return versionsInWorld
}
// ExpandedRevdeps returns the package reverse dependencies,
// matching also selectors in versions (>, <, >=, <=)
func (p *DefaultPackage) ExpandedRevdeps(definitiondb PackageDatabase, visited map[string]interface{}) Packages {
func walkPackage(p Package, definitiondb PackageDatabase, visited map[string]interface{}) Packages {
var versionsInWorld Packages
if _, ok := visited[p.HumanReadableString()]; ok {
return versionsInWorld
}
visited[p.HumanReadableString()] = true
for _, w := range definitiondb.World() {
if w.Matches(p) {
continue
revdeps, _ := definitiondb.GetRevdeps(p)
for _, r := range revdeps {
versionsInWorld = append(versionsInWorld, r)
}
if !p.IsSelector() {
versionsInWorld = append(versionsInWorld, p)
}
for _, re := range p.GetRequires() {
versions, _ := re.Expand(definitiondb)
for _, r := range versions {
versionsInWorld = append(versionsInWorld, r)
versionsInWorld = append(versionsInWorld, walkPackage(r, definitiondb, visited)...)
}
match := false
for _, re := range w.GetRequires() {
if re.Matches(p) {
match = true
}
if !match {
packages, _ := re.Expand(definitiondb)
for _, pa := range packages {
if pa.Matches(p) {
match = true
}
}
}
// if ok, _ := w.RequiresContains(definitiondb, p); ok {
}
if match {
versionsInWorld = append(versionsInWorld, w)
versionsInWorld = append(versionsInWorld, w.ExpandedRevdeps(definitiondb, visited).Unique()...)
}
// }
}
//visited[p.HumanReadableString()] = true
for _, re := range p.GetConflicts() {
versions, _ := re.Expand(definitiondb)
for _, r := range versions {
versionsInWorld = append(versionsInWorld, r)
versionsInWorld = append(versionsInWorld, walkPackage(r, definitiondb, visited)...)
}
}
return versionsInWorld.Unique()
}
func (p *DefaultPackage) Related(definitiondb PackageDatabase) Packages {
return walkPackage(p, definitiondb, map[string]interface{}{})
}
func (p *DefaultPackage) LabelDeps(definitiondb PackageDatabase, labelKey string) Packages {
var pkgsWithLabelInWorld Packages
// TODO: check if integrate some index to improve
@@ -656,7 +705,7 @@ func (pack *DefaultPackage) buildFormula(definitiondb PackageDatabase, db Packag
C = bf.Var(encodedC)
// Or the Candidate is true, or all the others might be not true
// This forces the CDCL sat implementation to look first at a solution with C=true
formulas = append(formulas, bf.Or(bf.Not(A), bf.Or(bf.Or(C, bf.Or(priorityConstraints...)), bf.Or(bf.Not(C), bf.Or(priorityALO...)))))
formulas = append(formulas, bf.Or(bf.Not(A), bf.Or(bf.And(C, bf.Or(priorityConstraints...)), bf.And(bf.Not(C), bf.Or(priorityALO...)))))
}
// AMO - At most one

View File

@@ -220,8 +220,8 @@ var _ = Describe("Package", func() {
_, err := definitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
visited := make(map[string]interface{})
lst := a.ExpandedRevdeps(definitions, visited)
lst, err := definitions.GetRevdeps(a)
Expect(err).ToNot(HaveOccurred())
Expect(lst).To(ContainElement(c))
Expect(lst).To(ContainElement(d))
Expect(lst).To(ContainElement(e))
@@ -242,9 +242,9 @@ var _ = Describe("Package", func() {
_, err := definitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
visited := make(map[string]interface{})
lst := a.ExpandedRevdeps(definitions, visited)
lst, err := definitions.GetRevdeps(a)
Expect(err).ToNot(HaveOccurred())
Expect(lst).To(ContainElement(b))
Expect(lst).To(ContainElement(c))
Expect(lst).To(ContainElement(d))
@@ -266,9 +266,8 @@ var _ = Describe("Package", func() {
_, err := definitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
visited := make(map[string]interface{})
lst := a.ExpandedRevdeps(definitions, visited)
lst, err := definitions.GetRevdeps(a)
Expect(err).ToNot(HaveOccurred())
Expect(lst).To(ContainElement(b))
Expect(lst).To(ContainElement(c))
Expect(lst).To(ContainElement(d))

View File

@@ -19,6 +19,7 @@ package repository
import (
"io/ioutil"
"path"
"path/filepath"
"regexp"
"github.com/ghodss/yaml"
@@ -29,8 +30,21 @@ import (
func LoadRepositories(c *LuetConfig) error {
var regexRepo = regexp.MustCompile(`.yml$|.yaml$`)
var err error
rootfs := ""
// Respect the rootfs param on read repositories
if !c.ConfigFromHost {
rootfs, err = c.GetSystem().GetRootFsAbs()
if err != nil {
return err
}
}
for _, rdir := range c.RepositoriesConfDir {
rdir = filepath.Join(rootfs, rdir)
Debug("Parsing Repository Directory", rdir, "...")
files, err := ioutil.ReadDir(rdir)

View File

@@ -0,0 +1,298 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package solver_test
import (
"fmt"
"io/ioutil"
"os"
"strconv"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/tests/helpers"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
. "github.com/mudler/luet/pkg/solver"
)
var _ = Describe("Solver Benchmarks", func() {
db := pkg.NewInMemoryDatabase(false)
dbInstalled := pkg.NewInMemoryDatabase(false)
dbDefinitions := pkg.NewInMemoryDatabase(false)
var s PackageSolver
Context("Complex data sets", func() {
BeforeEach(func() {
db = pkg.NewInMemoryDatabase(false)
dbInstalled = pkg.NewInMemoryDatabase(false)
dbDefinitions = pkg.NewInMemoryDatabase(false)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
if os.Getenv("BENCHMARK_TESTS") != "true" {
Skip("BENCHMARK_TESTS not enabled")
}
})
Measure("it should be fast in resolution from a 50000 dataset", func(b Benchmarker) {
runtime := b.Time("runtime", func() {
for i := 0; i < 50000; i++ {
C := pkg.NewPackage("C"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
E := pkg.NewPackage("E"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
F := pkg.NewPackage("F"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H"+strconv.Itoa(i), "", []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D"+strconv.Itoa(i), "", []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B"+strconv.Itoa(i), "", []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A"+strconv.Itoa(i), "", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
for _, p := range []pkg.Package{A, B, C, D, E, F, G} {
_, err := dbDefinitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
_, err := dbInstalled.CreatePackage(C)
Expect(err).ToNot(HaveOccurred())
}
for i := 0; i < 1; i++ {
C := pkg.NewPackage("C"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H"+strconv.Itoa(i), "", []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D"+strconv.Itoa(i), "", []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B"+strconv.Itoa(i), "", []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A"+strconv.Itoa(i), "", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
solution, err := s.Install([]pkg.Package{A})
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: D, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: C, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: H, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: G, Value: true}))
}
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 120), "Install() shouldn't take too long.")
}, 1)
})
Context("Complex data sets - Parallel", func() {
BeforeEach(func() {
db = pkg.NewInMemoryDatabase(false)
dbInstalled = pkg.NewInMemoryDatabase(false)
dbDefinitions = pkg.NewInMemoryDatabase(false)
s = NewSolver(Options{Type: ParallelSimple, Concurrency: 10}, dbInstalled, dbDefinitions, db)
if os.Getenv("BENCHMARK_TESTS") != "true" {
Skip("BENCHMARK_TESTS not enabled")
}
})
Measure("it should be fast in resolution from a 50000 dataset", func(b Benchmarker) {
runtime := b.Time("runtime", func() {
for i := 0; i < 50000; i++ {
C := pkg.NewPackage("C"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
E := pkg.NewPackage("E"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
F := pkg.NewPackage("F"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H"+strconv.Itoa(i), "", []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D"+strconv.Itoa(i), "", []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B"+strconv.Itoa(i), "", []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A"+strconv.Itoa(i), "", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
for _, p := range []pkg.Package{A, B, C, D, E, F, G} {
_, err := dbDefinitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
_, err := dbInstalled.CreatePackage(C)
Expect(err).ToNot(HaveOccurred())
}
for i := 0; i < 1; i++ {
C := pkg.NewPackage("C"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G"+strconv.Itoa(i), "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H"+strconv.Itoa(i), "", []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D"+strconv.Itoa(i), "", []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B"+strconv.Itoa(i), "", []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A"+strconv.Itoa(i), "", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
solution, err := s.Install([]pkg.Package{A})
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: D, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: C, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: H, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: G, Value: true}))
// Expect(len(solution)).To(Equal(6))
}
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 70), "Install() shouldn't take too long.")
}, 1)
})
Context("Complex data sets - Parallel Upgrades", func() {
BeforeEach(func() {
db = pkg.NewInMemoryDatabase(false)
// dbInstalled = pkg.NewInMemoryDatabase(false)
dbDefinitions = pkg.NewInMemoryDatabase(false)
s = NewSolver(Options{Type: ParallelSimple, Concurrency: 100}, dbInstalled, dbDefinitions, db)
if os.Getenv("BENCHMARK_TESTS") != "true" {
Skip("BENCHMARK_TESTS not enabled")
}
tmpfile, _ := ioutil.TempFile(os.TempDir(), "tests")
defer os.Remove(tmpfile.Name()) // clean up
dbInstalled = pkg.NewBoltDatabase(tmpfile.Name())
})
Measure("it should be fast in resolution from a 10000*8 dataset", func(b Benchmarker) {
runtime := b.Time("runtime", func() {
for i := 2; i < 10000; i++ {
C := pkg.NewPackage("C", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
E := pkg.NewPackage("E", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
F := pkg.NewPackage("F", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H", strconv.Itoa(i), []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D", strconv.Itoa(i), []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", strconv.Itoa(i), []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A", strconv.Itoa(i), []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
for _, p := range []pkg.Package{A, B, C, D, E, F, G, H} {
_, err := dbDefinitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
}
//C := pkg.NewPackage("C", "1", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G", "1", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H", "1", []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D", "1", []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", "1", []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A", "1", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
_, err := dbInstalled.CreatePackage(A)
Expect(err).ToNot(HaveOccurred())
_, err = dbInstalled.CreatePackage(B)
Expect(err).ToNot(HaveOccurred())
_, err = dbInstalled.CreatePackage(D)
Expect(err).ToNot(HaveOccurred())
_, err = dbInstalled.CreatePackage(H)
Expect(err).ToNot(HaveOccurred())
_, err = dbInstalled.CreatePackage(G)
Expect(err).ToNot(HaveOccurred())
fmt.Println("Upgrade starts")
packages, ass, err := s.Upgrade(false, true)
Expect(err).ToNot(HaveOccurred())
Expect(packages).To(ContainElement(A))
G = pkg.NewPackage("G", "9999", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H = pkg.NewPackage("H", "9999", []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D = pkg.NewPackage("D", "9999", []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B = pkg.NewPackage("B", "9999", []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A = pkg.NewPackage("A", "9999", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
Expect(ass).To(ContainElement(PackageAssert{Package: A, Value: true}))
Expect(len(packages)).To(Equal(5))
// Expect(len(solution)).To(Equal(6))
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 70), "Install() shouldn't take too long.")
}, 1)
Measure("it should be fast in installation with 12000 packages installed and 2000*8 available", func(b Benchmarker) {
runtime := b.Time("runtime", func() {
for i := 0; i < 2000; i++ {
C := pkg.NewPackage("C", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
E := pkg.NewPackage("E", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
F := pkg.NewPackage("F", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H", strconv.Itoa(i), []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D", strconv.Itoa(i), []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", strconv.Itoa(i), []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A", strconv.Itoa(i), []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
for _, p := range []pkg.Package{A, B, C, D, E, F, G} {
_, err := dbDefinitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
fmt.Println("Creating package, run", i)
}
for i := 0; i < 12000; i++ {
x := helpers.RandomPackage()
_, err := dbInstalled.CreatePackage(x)
Expect(err).ToNot(HaveOccurred())
}
G := pkg.NewPackage("G", strconv.Itoa(50000), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H", strconv.Itoa(50000), []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D", strconv.Itoa(50000), []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", strconv.Itoa(50000), []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A", strconv.Itoa(50000), []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
ass, err := s.Install([]pkg.Package{A})
Expect(err).ToNot(HaveOccurred())
Expect(ass).To(ContainElement(PackageAssert{Package: pkg.NewPackage("A", "50000", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{}), Value: true}))
//Expect(ass).To(Equal(5))
// Expect(len(solution)).To(Equal(6))
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 70), "Install() shouldn't take too long.")
}, 1)
PMeasure("it should be fast in resolution from a 50000 dataset with upgrade universe", func(b Benchmarker) {
runtime := b.Time("runtime", func() {
for i := 0; i < 2; i++ {
C := pkg.NewPackage("C", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
E := pkg.NewPackage("E", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
F := pkg.NewPackage("F", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
G := pkg.NewPackage("G", strconv.Itoa(i), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H", strconv.Itoa(i), []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D", strconv.Itoa(i), []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", strconv.Itoa(i), []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A", strconv.Itoa(i), []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
for _, p := range []pkg.Package{A, B, C, D, E, F, G} {
_, err := dbDefinitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
fmt.Println("Creating package, run", i)
}
G := pkg.NewPackage("G", "1", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
H := pkg.NewPackage("H", "1", []*pkg.DefaultPackage{G}, []*pkg.DefaultPackage{})
D := pkg.NewPackage("D", "1", []*pkg.DefaultPackage{H}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", "1", []*pkg.DefaultPackage{D}, []*pkg.DefaultPackage{})
A := pkg.NewPackage("A", "1", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})
_, err := dbInstalled.CreatePackage(A)
Expect(err).ToNot(HaveOccurred())
fmt.Println("Upgrade starts")
packages, ass, err := s.UpgradeUniverse(true)
Expect(err).ToNot(HaveOccurred())
Expect(ass).To(ContainElement(PackageAssert{Package: pkg.NewPackage("A", "50000", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{}), Value: true}))
Expect(packages).To(ContainElement(pkg.NewPackage("A", "50000", []*pkg.DefaultPackage{B}, []*pkg.DefaultPackage{})))
Expect(packages).To(Equal(5))
// Expect(len(solution)).To(Equal(6))
})
Ω(runtime.Seconds()).Should(BeNumerically("<", 70), "Install() shouldn't take too long.")
}, 1)
})
})

View File

@@ -22,9 +22,9 @@ import (
"unicode"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/topsort"
toposort "github.com/philopon/go-toposort"
"github.com/pkg/errors"
"github.com/stevenle/topsort"
)
type PackagesAssertions []PackageAssert
@@ -150,22 +150,13 @@ func (assertions PackagesAssertions) Order(definitiondb pkg.PackageDatabase, fin
orderedAssertions := PackagesAssertions{}
unorderedAssertions := PackagesAssertions{}
fingerprints := []string{}
tmpMap := map[string]PackageAssert{}
graph := topsort.NewGraph()
for _, a := range assertions {
graph.AddNode(a.Package.GetFingerPrint())
tmpMap[a.Package.GetFingerPrint()] = a
fingerprints = append(fingerprints, a.Package.GetFingerPrint())
unorderedAssertions = append(unorderedAssertions, a) // Build a list of the ones that must be ordered
if a.Value {
unorderedAssertions = append(unorderedAssertions, a) // Build a list of the ones that must be ordered
} else {
orderedAssertions = append(orderedAssertions, a) // Keep last the ones which are not meant to be installed
}
}
sort.Sort(unorderedAssertions)
@@ -190,7 +181,7 @@ func (assertions PackagesAssertions) Order(definitiondb pkg.PackageDatabase, fin
}
// Expand also here, as we need to order them (or instead the solver should give back the dep correctly?)
graph.AddEdge(currentPkg.GetFingerPrint(), requiredDef.GetFingerPrint())
added[requiredDef.GetFingerPrint()] = nil
added[requiredDef.GetFingerPrint()] = true
}
}
result, err := graph.TopSort(fingerprint)
@@ -200,8 +191,11 @@ func (assertions PackagesAssertions) Order(definitiondb pkg.PackageDatabase, fin
for _, res := range result {
a, ok := tmpMap[res]
if !ok {
return nil, errors.New("fail looking for " + res)
// continue
//return nil, errors.New("fail looking for " + res)
// Since now we don't return the entire world as part of assertions
// if we don't find any reference must be because fingerprint we are analyzing (which is the one we are ordering against)
// is not part of the assertions, thus we can omit it from the result
continue
}
orderedAssertions = append(orderedAssertions, a)
// orderedAssertions = append(PackagesAssertions{a}, orderedAssertions...) // push upfront

View File

@@ -30,13 +30,13 @@ var _ = Describe("Decoder", func() {
db := pkg.NewInMemoryDatabase(false)
dbInstalled := pkg.NewInMemoryDatabase(false)
dbDefinitions := pkg.NewInMemoryDatabase(false)
s := NewSolver(dbInstalled, dbDefinitions, db)
s := NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
BeforeEach(func() {
db = pkg.NewInMemoryDatabase(false)
dbInstalled = pkg.NewInMemoryDatabase(false)
dbDefinitions = pkg.NewInMemoryDatabase(false)
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
})
Context("Assertion ordering", func() {
@@ -214,12 +214,10 @@ var _ = Describe("Decoder", func() {
hash2 := solution.AssertionHash()
// Expect(len(solution)).To(Equal(6))
Expect(solution[0].Package.GetName()).To(Equal("A"))
Expect(solution[1].Package.GetName()).To(Equal("G"))
Expect(solution[2].Package.GetName()).To(Equal("H"))
Expect(solution[3].Package.GetName()).To(Equal("D"))
Expect(solution[4].Package.GetName()).To(Equal("B"))
Expect(solution[0].Value).ToNot(BeTrue())
Expect(solution[0].Package.GetName()).To(Equal("G"))
Expect(solution[1].Package.GetName()).To(Equal("H"))
Expect(solution[2].Package.GetName()).To(Equal("D"))
Expect(solution[3].Package.GetName()).To(Equal("B"))
Expect(hash).ToNot(Equal(""))
Expect(hash2).ToNot(Equal(""))
@@ -390,5 +388,37 @@ var _ = Describe("Decoder", func() {
Expect(solution4.Drop(Y).AssertionHash()).To(Equal(solution4.HashFrom(Y)))
})
for index := 0; index < 300; index++ { // Just to make sure we don't have false positives
It("Always same solution", func() {
X := pkg.NewPackage("X", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
Y := pkg.NewPackage("Y", "", []*pkg.DefaultPackage{X}, []*pkg.DefaultPackage{})
Z := pkg.NewPackage("Z", "", []*pkg.DefaultPackage{X}, []*pkg.DefaultPackage{})
W := pkg.NewPackage("W", "", []*pkg.DefaultPackage{Z, Y}, []*pkg.DefaultPackage{})
for _, p := range []pkg.Package{X, Y, Z} {
_, err := dbDefinitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
for _, p := range []pkg.Package{} {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
solution, err := s.Install([]pkg.Package{W})
Expect(err).ToNot(HaveOccurred())
orderW, err := solution.Order(dbDefinitions, W.GetFingerPrint())
Expect(err).ToNot(HaveOccurred())
Expect(orderW[0].Package.GetName()).To(Equal("X"))
Expect(orderW[1].Package.GetName()).To(Equal("Y"))
Expect(orderW[2].Package.GetName()).To(Equal("Z"))
Expect(orderW[3].Package.GetName()).To(Equal("W"))
})
}
})
})

863
pkg/solver/parallel.go Normal file
View File

@@ -0,0 +1,863 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package solver
import (
//. "github.com/mudler/luet/pkg/logger"
"fmt"
"sync"
"github.com/pkg/errors"
"github.com/crillab/gophersat/bf"
pkg "github.com/mudler/luet/pkg/package"
)
// Parallel is the default Parallel for luet
type Parallel struct {
Concurrency int
DefinitionDatabase pkg.PackageDatabase
ParallelDatabase pkg.PackageDatabase
Wanted pkg.Packages
InstalledDatabase pkg.PackageDatabase
Resolver PackageResolver
}
func (s *Parallel) SetDefinitionDatabase(db pkg.PackageDatabase) {
s.DefinitionDatabase = db
}
// SetReSolver is a setter for the unsat ReSolver backend
func (s *Parallel) SetResolver(r PackageResolver) {
s.Resolver = r
}
func (s *Parallel) World() pkg.Packages {
return s.DefinitionDatabase.World()
}
func (s *Parallel) Installed() pkg.Packages {
return s.InstalledDatabase.World()
}
func (s *Parallel) noRulesWorld() bool {
for _, p := range s.World() {
if len(p.GetConflicts()) != 0 || len(p.GetRequires()) != 0 {
return false
}
}
return true
}
func (s *Parallel) noRulesInstalled() bool {
for _, p := range s.Installed() {
if len(p.GetConflicts()) != 0 || len(p.GetRequires()) != 0 {
return false
}
}
return true
}
func (s *Parallel) buildParallelFormula(formulas []bf.Formula, packages pkg.Packages) (bf.Formula, error) {
var wg = new(sync.WaitGroup)
var wg2 = new(sync.WaitGroup)
all := make(chan pkg.Package)
results := make(chan bf.Formula, 1)
for i := 0; i < s.Concurrency; i++ {
wg.Add(1)
go func(wg *sync.WaitGroup, c <-chan pkg.Package) {
defer wg.Done()
for p := range c {
solvable, err := p.BuildFormula(s.DefinitionDatabase, s.ParallelDatabase)
if err != nil {
panic(err)
}
for _, s := range solvable {
results <- s
}
}
}(wg, all)
}
wg2.Add(1)
go func() {
defer wg2.Done()
for t := range results {
formulas = append(formulas, t)
}
}()
for _, p := range packages {
all <- p
}
close(all)
wg.Wait()
close(results)
wg2.Wait()
if len(formulas) != 0 {
return bf.And(formulas...), nil
}
return bf.True, nil
}
func (s *Parallel) BuildInstalled() (bf.Formula, error) {
var formulas []bf.Formula
var packages pkg.Packages
for _, p := range s.Installed() {
packages = append(packages, p)
for _, dep := range p.Related(s.DefinitionDatabase) {
packages = append(packages, dep)
}
}
return s.buildParallelFormula(formulas, packages)
}
// BuildWorld builds the formula which olds the requirements from the package definitions
// which are available (global state)
func (s *Parallel) BuildWorld(includeInstalled bool) (bf.Formula, error) {
var formulas []bf.Formula
// NOTE: This block should be enabled in case of very old systems with outdated world sets
if includeInstalled {
solvable, err := s.BuildInstalled()
if err != nil {
return nil, err
}
//f = bf.And(f, solvable)
formulas = append(formulas, solvable)
}
return s.buildParallelFormula(formulas, s.World())
}
// BuildWorld builds the formula which olds the requirements from the package definitions
// which are available (global state)
func (s *Parallel) BuildPartialWorld(includeInstalled bool) (bf.Formula, error) {
var formulas []bf.Formula
// NOTE: This block should be enabled in case of very old systems with outdated world sets
if includeInstalled {
solvable, err := s.BuildInstalled()
if err != nil {
return nil, err
}
//f = bf.And(f, solvable)
formulas = append(formulas, solvable)
}
var wg = new(sync.WaitGroup)
var wg2 = new(sync.WaitGroup)
var packages pkg.Packages
all := make(chan pkg.Package)
results := make(chan pkg.Package, 1)
for i := 0; i < s.Concurrency; i++ {
wg.Add(1)
go func(wg *sync.WaitGroup, c <-chan pkg.Package) {
defer wg.Done()
for p := range c {
for _, dep := range p.Related(s.DefinitionDatabase) {
results <- dep
}
}
}(wg, all)
}
wg2.Add(1)
go func() {
defer wg2.Done()
for t := range results {
packages = append(packages, t)
}
}()
for _, p := range s.Wanted {
all <- p
}
close(all)
wg.Wait()
close(results)
wg2.Wait()
return s.buildParallelFormula(formulas, packages)
//return s.buildParallelFormula(formulas, s.World())
}
func (s *Parallel) getList(db pkg.PackageDatabase, lsp pkg.Packages) (pkg.Packages, error) {
var ls pkg.Packages
var wg = new(sync.WaitGroup)
var wg2 = new(sync.WaitGroup)
all := make(chan pkg.Package)
results := make(chan pkg.Package, 1)
for i := 0; i < s.Concurrency; i++ {
wg.Add(1)
go func(wg *sync.WaitGroup, c <-chan pkg.Package) {
defer wg.Done()
for p := range c {
cp, err := db.FindPackage(p)
if err != nil {
packages, err := p.Expand(db)
// Expand, and relax search - if not found pick the same one
if err != nil || len(packages) == 0 {
cp = p
} else {
cp = packages.Best(nil)
}
}
results <- cp
}
}(wg, all)
}
wg2.Add(1)
go func(wg *sync.WaitGroup) {
defer wg2.Done()
for t := range results {
ls = append(ls, t)
}
}(wg)
for _, pp := range lsp {
all <- pp
}
close(all)
wg.Wait()
close(results)
wg2.Wait()
return ls, nil
}
// Conflicts acts like ConflictsWith, but uses package's reverse dependencies to
// determine if it conflicts with the given set
func (s *Parallel) Conflicts(pack pkg.Package, lsp pkg.Packages) (bool, error) {
p, err := s.DefinitionDatabase.FindPackage(pack)
if err != nil {
p = pack
}
ls, err := s.getList(s.DefinitionDatabase, lsp)
if err != nil {
return false, errors.Wrap(err, "Package not found in definition db")
}
if s.noRulesWorld() {
return false, nil
}
temporarySet := pkg.NewInMemoryDatabase(false)
for _, p := range ls {
temporarySet.CreatePackage(p)
}
revdeps, err := temporarySet.GetRevdeps(p)
if err != nil {
return false, errors.Wrap(err, "error scanning revdeps")
}
var revdepsErr error
for _, r := range revdeps {
if revdepsErr == nil {
revdepsErr = errors.New("")
}
revdepsErr = errors.New(fmt.Sprintf("%s\n%s", revdepsErr.Error(), r.HumanReadableString()))
}
return len(revdeps) != 0, revdepsErr
}
// ConflictsWith return true if a package is part of the requirement set of a list of package
// return false otherwise (and thus it is NOT relevant to the given list)
func (s *Parallel) ConflictsWith(pack pkg.Package, lsp pkg.Packages) (bool, error) {
p, err := s.DefinitionDatabase.FindPackage(pack)
if err != nil {
p = pack //Relax search, otherwise we cannot compute solutions for packages not in definitions
}
ls, err := s.getList(s.DefinitionDatabase, lsp)
if err != nil {
return false, errors.Wrap(err, "Package not found in definition db")
}
var formulas []bf.Formula
if s.noRulesWorld() {
return false, nil
}
encodedP, err := p.Encode(s.ParallelDatabase)
if err != nil {
return false, err
}
P := bf.Var(encodedP)
r, err := s.BuildWorld(false)
if err != nil {
return false, err
}
formulas = append(formulas, bf.And(bf.Not(P), r))
var wg = new(sync.WaitGroup)
var wg2 = new(sync.WaitGroup)
all := make(chan pkg.Package)
results := make(chan bf.Formula, 1)
for i := 0; i < s.Concurrency; i++ {
wg.Add(1)
go func(wg *sync.WaitGroup, c <-chan pkg.Package) {
defer wg.Done()
for i := range c {
if i.Matches(p) {
continue
}
// XXX: Skip check on any of its requires ? ( Drop to avoid removing system packages when selecting an uninstall)
// if i.RequiresContains(p) {
// fmt.Println("Requires found")
// continue
// }
encodedI, err := i.Encode(s.ParallelDatabase)
if err != nil {
panic(err)
}
I := bf.Var(encodedI)
results <- bf.And(I, r)
}
}(wg, all)
}
wg2.Add(1)
go func() {
defer wg2.Done()
for t := range results {
formulas = append(formulas, t)
}
}()
for _, p := range ls {
all <- p
}
close(all)
wg.Wait()
close(results)
wg2.Wait()
model := bf.Solve(bf.And(formulas...))
if model == nil {
return true, nil
}
return false, nil
}
func (s *Parallel) ConflictsWithInstalled(p pkg.Package) (bool, error) {
return s.ConflictsWith(p, s.Installed())
}
// UninstallUniverse takes a list of candidate package and return a list of packages that would be removed
// in order to purge the candidate. Uses the Parallel to check constraints and nothing else
//
// It can be compared to the counterpart Uninstall as this method acts like a uninstall --full
// it removes all the packages and its deps. taking also in consideration other packages that might have
// revdeps
func (s *Parallel) UninstallUniverse(toremove pkg.Packages) (pkg.Packages, error) {
if s.noRulesInstalled() {
return s.getList(s.InstalledDatabase, toremove)
}
// resolve to packages from the db
toRemove, err := s.getList(s.InstalledDatabase, toremove)
if err != nil {
return nil, errors.Wrap(err, "Package not found in definition db")
}
var formulas []bf.Formula
r, err := s.BuildInstalled()
if err != nil {
return nil, errors.Wrap(err, "Package not found in definition db")
}
// SAT encode the clauses against the world
for _, p := range toRemove.Unique() {
encodedP, err := p.Encode(s.InstalledDatabase)
if err != nil {
return nil, errors.Wrap(err, "Package not found in definition db")
}
P := bf.Var(encodedP)
formulas = append(formulas, bf.And(bf.Not(P), r))
}
markedForRemoval := pkg.Packages{}
model := bf.Solve(bf.And(formulas...))
if model == nil {
return nil, errors.New("Failed finding a solution")
}
assertion, err := DecodeModel(model, s.InstalledDatabase)
if err != nil {
return nil, errors.Wrap(err, "while decoding model from solution")
}
for _, a := range assertion {
if !a.Value {
if p, err := s.InstalledDatabase.FindPackage(a.Package); err == nil {
markedForRemoval = append(markedForRemoval, p)
}
}
}
return markedForRemoval, nil
}
// UpgradeUniverse mark packages for removal and returns a solution. It considers
// the Universe db as authoritative
// See also on the subject: https://arxiv.org/pdf/1007.1021.pdf
func (s *Parallel) UpgradeUniverse(dropremoved bool) (pkg.Packages, PackagesAssertions, error) {
var formulas []bf.Formula
// we first figure out which aren't up-to-date
// which has to be removed
// and which needs to be upgraded
removed := pkg.Packages{}
// TODO: this is memory expensive, we need to optimize this
universe := pkg.NewInMemoryDatabase(false)
for _, p := range s.DefinitionDatabase.World() {
universe.CreatePackage(p)
}
for _, p := range s.Installed() {
universe.CreatePackage(p)
}
// Build constraints for the whole defdb
r, err := s.BuildWorld(true)
if err != nil {
return nil, nil, errors.Wrap(err, "couldn't build world constraints")
}
var wg = new(sync.WaitGroup)
var wg2 = new(sync.WaitGroup)
all := make(chan pkg.Package)
results := make(chan bf.Formula, 1)
for i := 0; i < s.Concurrency; i++ {
wg.Add(1)
go func(wg *sync.WaitGroup, c <-chan pkg.Package) {
defer wg.Done()
for p := range c {
available, err := universe.FindPackageVersions(p)
if err != nil {
removed = append(removed, p) /// FIXME: Racy
}
if len(available) == 0 {
continue
}
bestmatch := available.Best(nil)
// Found a better version available
if !bestmatch.Matches(p) {
encodedP, _ := p.Encode(universe)
P := bf.Var(encodedP)
results <- bf.And(bf.Not(P), r)
encodedP, _ = bestmatch.Encode(universe)
P = bf.Var(encodedP)
results <- bf.And(P, r)
}
}
}(wg, all)
}
wg2.Add(1)
go func() {
defer wg2.Done()
for t := range results {
formulas = append(formulas, t)
}
}()
// Grab all the installed ones, see if they are eligible for update
for _, p := range s.Installed() {
all <- p
}
close(all)
wg.Wait()
close(results)
wg2.Wait()
// Treat removed packages from universe as marked for deletion
if dropremoved {
// SAT encode the clauses against the world
for _, p := range removed {
encodedP, err := p.Encode(universe)
if err != nil {
return nil, nil, errors.Wrap(err, "couldn't encode package")
}
P := bf.Var(encodedP)
formulas = append(formulas, bf.And(bf.Not(P), r))
}
}
markedForRemoval := pkg.Packages{}
if len(formulas) == 0 {
return pkg.Packages{}, PackagesAssertions{}, nil
}
model := bf.Solve(bf.And(formulas...))
if model == nil {
return nil, nil, errors.New("Failed finding a solution")
}
assertion, err := DecodeModel(model, universe)
if err != nil {
return nil, nil, errors.Wrap(err, "while decoding model from solution")
}
for _, a := range assertion {
if !a.Value {
if p, err := s.InstalledDatabase.FindPackage(a.Package); err == nil {
markedForRemoval = append(markedForRemoval, p)
}
}
}
return markedForRemoval, assertion, nil
}
// Upgrade compute upgrades of the package against the world definition.
// It accepts two boolean indicating if it has to check for conflicts or try to attempt a full upgrade
func (s *Parallel) Upgrade(checkconflicts, full bool) (pkg.Packages, PackagesAssertions, error) {
// First get candidates that needs to be upgraded..
toUninstall := pkg.Packages{}
toInstall := pkg.Packages{}
// we do this in memory so we take into account of provides
universe := pkg.NewInMemoryDatabase(false)
for _, p := range s.DefinitionDatabase.World() {
universe.CreatePackage(p)
}
installedcopy := pkg.NewInMemoryDatabase(false)
var wg = new(sync.WaitGroup)
var wg2 = new(sync.WaitGroup)
all := make(chan pkg.Package)
results := make(chan []pkg.Package, 1)
for i := 0; i < s.Concurrency; i++ {
wg.Add(1)
go func(wg *sync.WaitGroup, c <-chan pkg.Package) {
defer wg.Done()
for p := range c {
installedcopy.CreatePackage(p)
packages, err := universe.FindPackageVersions(p)
if err == nil && len(packages) != 0 {
best := packages.Best(nil)
if !best.Matches(p) {
results <- []pkg.Package{p, best}
}
}
}
}(wg, all)
}
wg2.Add(1)
go func() {
defer wg2.Done()
for t := range results {
toUninstall = append(toUninstall, t[0])
toInstall = append(toInstall, t[1])
}
}()
for _, p := range s.InstalledDatabase.World() {
all <- p
}
close(all)
wg.Wait()
close(results)
wg2.Wait()
s2 := &Parallel{Concurrency: s.Concurrency, InstalledDatabase: installedcopy, DefinitionDatabase: s.DefinitionDatabase, ParallelDatabase: pkg.NewInMemoryDatabase(false)}
s2.SetResolver(s.Resolver)
if !full {
ass := PackagesAssertions{}
for _, i := range toInstall {
ass = append(ass, PackageAssert{Package: i.(*pkg.DefaultPackage), Value: true})
}
}
// Then try to uninstall the versions in the system, and store that tree
r, err := s.Uninstall(checkconflicts, false, toUninstall...)
if err != nil {
return nil, nil, errors.Wrap(err, "Could not compute upgrade - couldn't uninstall candidates ")
}
for _, z := range r {
err = installedcopy.RemovePackage(z)
if err != nil {
return nil, nil, errors.Wrap(err, "Could not compute upgrade - couldn't remove copy of package targetted for removal")
}
}
if len(toInstall) == 0 {
return toUninstall, PackagesAssertions{}, nil
}
assertions, e := s2.Install(toInstall)
return toUninstall, assertions, e
// To that tree, ask to install the versions that should be upgraded, and try to solve
// Return the solution
}
// Uninstall takes a candidate package and return a list of packages that would be removed
// in order to purge the candidate. Returns error if unsat.
func (s *Parallel) Uninstall(checkconflicts, full bool, packs ...pkg.Package) (pkg.Packages, error) {
if len(packs) == 0 {
return pkg.Packages{}, nil
}
var res pkg.Packages
toRemove := pkg.Packages{}
for _, c := range packs {
candidate, err := s.InstalledDatabase.FindPackage(c)
if err != nil {
// return nil, errors.Wrap(err, "Couldn't find required package in db definition")
packages, err := c.Expand(s.InstalledDatabase)
// Info("Expanded", packages, err)
if err != nil || len(packages) == 0 {
candidate = c
} else {
candidate = packages.Best(nil)
}
//Relax search, otherwise we cannot compute solutions for packages not in definitions
// return nil, errors.Wrap(err, "Package not found between installed")
}
toRemove = append(toRemove, candidate)
}
// Build a fake "Installed" - Candidate and its requires tree
var InstalledMinusCandidate pkg.Packages
// We are asked to not perform a full uninstall (checking all the possible requires that could
// be removed). Let's only check if we can remove the selected package
if !full && checkconflicts {
for _, candidate := range toRemove {
if conflicts, err := s.Conflicts(candidate, s.Installed()); conflicts {
return nil, err
}
}
return toRemove, nil
}
// TODO: Can be optimized
for _, i := range s.Installed() {
matched := false
for _, candidate := range toRemove {
if !i.Matches(candidate) {
contains, err := candidate.RequiresContains(s.ParallelDatabase, i)
if err != nil {
return nil, errors.Wrap(err, "Failed getting installed list")
}
if !contains {
matched = true
}
}
}
if matched {
InstalledMinusCandidate = append(InstalledMinusCandidate, i)
}
}
s2 := &Parallel{Concurrency: s.Concurrency, InstalledDatabase: pkg.NewInMemoryDatabase(false), DefinitionDatabase: s.DefinitionDatabase, ParallelDatabase: pkg.NewInMemoryDatabase(false)}
s2.SetResolver(s.Resolver)
// Get the requirements to install the candidate
asserts, err := s2.Install(toRemove)
if err != nil {
return nil, err
}
for _, a := range asserts {
if a.Value {
if !checkconflicts {
res = append(res, a.Package)
continue
}
c, err := s.ConflictsWithInstalled(a.Package)
if err != nil {
return nil, err
}
// If doesn't conflict with installed we just consider it for removal and look for the next one
if !c {
res = append(res, a.Package)
continue
}
// If does conflicts, give it another chance by checking conflicts if in case we didn't installed our candidate and all the required packages in the system
c, err = s.ConflictsWith(a.Package, InstalledMinusCandidate)
if err != nil {
return nil, err
}
if !c {
res = append(res, a.Package)
}
}
}
return res, nil
}
// BuildFormula builds the main solving formula that is evaluated by the sat Parallel.
func (s *Parallel) BuildFormula() (bf.Formula, error) {
var formulas []bf.Formula
r, err := s.BuildPartialWorld(false)
if err != nil {
return nil, err
}
var wg = new(sync.WaitGroup)
var wg2 = new(sync.WaitGroup)
all := make(chan pkg.Package)
results := make(chan bf.Formula, 1)
for i := 0; i < s.Concurrency; i++ {
wg.Add(1)
go func(wg *sync.WaitGroup, c <-chan pkg.Package) {
defer wg.Done()
for wanted := range c {
encodedW, err := wanted.Encode(s.ParallelDatabase)
if err != nil {
panic(err)
}
W := bf.Var(encodedW)
installedWorld := s.Installed()
//TODO:Optimize
if len(installedWorld) == 0 {
results <- W
continue
}
for _, installed := range installedWorld {
encodedI, err := installed.Encode(s.ParallelDatabase)
if err != nil {
panic(err)
}
I := bf.Var(encodedI)
results <- bf.And(W, I)
}
}
}(wg, all)
}
wg2.Add(1)
go func() {
defer wg2.Done()
for t := range results {
formulas = append(formulas, t)
}
}()
for _, wanted := range s.Wanted {
all <- wanted
}
close(all)
wg.Wait()
close(results)
wg2.Wait()
formulas = append(formulas, r)
return bf.And(formulas...), nil
}
func (s *Parallel) solve(f bf.Formula) (map[string]bool, bf.Formula, error) {
model := bf.Solve(f)
if model == nil {
return model, f, errors.New("Unsolvable")
}
return model, f, nil
}
// Solve builds the formula given the current state and returns package assertions
func (s *Parallel) Solve() (PackagesAssertions, error) {
var model map[string]bool
var err error
f, err := s.BuildFormula()
if err != nil {
return nil, err
}
model, _, err = s.solve(f)
if err != nil && s.Resolver != nil {
return s.Resolver.Solve(f, s)
}
if err != nil {
return nil, err
}
return DecodeModel(model, s.ParallelDatabase)
}
// Install given a list of packages, returns package assertions to indicate the packages that must be installed in the system in order
// to statisfy all the constraints
func (s *Parallel) Install(c pkg.Packages) (PackagesAssertions, error) {
coll, err := s.getList(s.DefinitionDatabase, c)
if err != nil {
return nil, errors.Wrap(err, "Packages not found in definition db")
}
s.Wanted = coll
if s.noRulesWorld() {
var ass PackagesAssertions
for _, p := range s.Installed() {
ass = append(ass, PackageAssert{Package: p.(*pkg.DefaultPackage), Value: true})
}
for _, p := range s.Wanted {
ass = append(ass, PackageAssert{Package: p.(*pkg.DefaultPackage), Value: true})
}
return ass, nil
}
return s.Solve()
}

1296
pkg/solver/parallel_test.go Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -28,13 +28,13 @@ var _ = Describe("Resolver", func() {
db := pkg.NewInMemoryDatabase(false)
dbInstalled := pkg.NewInMemoryDatabase(false)
dbDefinitions := pkg.NewInMemoryDatabase(false)
s := NewSolver(dbInstalled, dbDefinitions, db)
s := NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
BeforeEach(func() {
db = pkg.NewInMemoryDatabase(false)
dbInstalled = pkg.NewInMemoryDatabase(false)
dbDefinitions = pkg.NewInMemoryDatabase(false)
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
})
Context("Conflict set", func() {
@@ -79,13 +79,13 @@ var _ = Describe("Resolver", func() {
solution, err := s.Install([]pkg.Package{D, F}) // D and F should go as they have no deps. A/E should be filtered by QLearn
Expect(err).ToNot(HaveOccurred())
Expect(len(solution)).To(Equal(6))
Expect(len(solution)).To(Equal(3))
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: false}))
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: A, Value: true}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: B, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: C, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: D, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: E, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: E, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: F, Value: true}))
})
@@ -112,12 +112,12 @@ var _ = Describe("Resolver", func() {
solution, err := s.Install([]pkg.Package{A, D})
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: false}))
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: A, Value: true}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: B, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: C, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: D, Value: true}))
Expect(len(solution)).To(Equal(4))
Expect(len(solution)).To(Equal(2))
})
It("will find out that we can install D and F by ignoring E and A", func() {
@@ -142,13 +142,13 @@ var _ = Describe("Resolver", func() {
solution, err := s.Install([]pkg.Package{A, D, E, F}) // D and F should go as they have no deps. A/E should be filtered by QLearn
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: false}))
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: A, Value: true}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: B, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: C, Value: true})) // Was already installed
Expect(solution).To(ContainElement(PackageAssert{Package: D, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: E, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: E, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: F, Value: true}))
Expect(len(solution)).To(Equal(6))
Expect(len(solution)).To(Equal(3))
})
})

View File

@@ -26,11 +26,18 @@ import (
pkg "github.com/mudler/luet/pkg/package"
)
type SolverType int
const (
SingleCoreSimple = 0
ParallelSimple = iota
)
// PackageSolver is an interface to a generic package solving algorithm
type PackageSolver interface {
SetDefinitionDatabase(pkg.PackageDatabase)
Install(p pkg.Packages) (PackagesAssertions, error)
Uninstall(candidate pkg.Package, checkconflicts, full bool) (pkg.Packages, error)
Uninstall(checkconflicts, full bool, candidate ...pkg.Package) (pkg.Packages, error)
ConflictsWithInstalled(p pkg.Package) (bool, error)
ConflictsWith(p pkg.Package, ls pkg.Packages) (bool, error)
Conflicts(pack pkg.Package, lsp pkg.Packages) (bool, error)
@@ -56,16 +63,30 @@ type Solver struct {
Resolver PackageResolver
}
// NewSolver accepts as argument two lists of packages, the first is the initial set,
// the second represent all the known packages.
func NewSolver(installed pkg.PackageDatabase, definitiondb pkg.PackageDatabase, solverdb pkg.PackageDatabase) PackageSolver {
return NewResolver(installed, definitiondb, solverdb, &DummyPackageResolver{})
type Options struct {
Type SolverType
Concurrency int
}
// NewReSolver accepts as argument two lists of packages, the first is the initial set,
// NewSolver accepts as argument two lists of packages, the first is the initial set,
// the second represent all the known packages.
func NewResolver(installed pkg.PackageDatabase, definitiondb pkg.PackageDatabase, solverdb pkg.PackageDatabase, re PackageResolver) PackageSolver {
return &Solver{InstalledDatabase: installed, DefinitionDatabase: definitiondb, SolverDatabase: solverdb, Resolver: re}
func NewSolver(t Options, installed pkg.PackageDatabase, definitiondb pkg.PackageDatabase, solverdb pkg.PackageDatabase) PackageSolver {
return NewResolver(t, installed, definitiondb, solverdb, &DummyPackageResolver{})
}
// NewResolver accepts as argument two lists of packages, the first is the initial set,
// the second represent all the known packages.
// Using constructors as in the future we foresee warmups for hot-restore solver cache
func NewResolver(t Options, installed pkg.PackageDatabase, definitiondb pkg.PackageDatabase, solverdb pkg.PackageDatabase, re PackageResolver) PackageSolver {
var s PackageSolver
switch t.Type {
case SingleCoreSimple:
s = &Solver{InstalledDatabase: installed, DefinitionDatabase: definitiondb, SolverDatabase: solverdb, Resolver: re}
case ParallelSimple:
s = &Parallel{InstalledDatabase: installed, DefinitionDatabase: definitiondb, ParallelDatabase: solverdb, Resolver: re, Concurrency: t.Concurrency}
}
return s
}
// SetDefinitionDatabase is a setter for the definition Database
@@ -110,7 +131,15 @@ func (s *Solver) noRulesInstalled() bool {
func (s *Solver) BuildInstalled() (bf.Formula, error) {
var formulas []bf.Formula
var packages pkg.Packages
for _, p := range s.Installed() {
packages = append(packages, p)
for _, dep := range p.Related(s.DefinitionDatabase) {
packages = append(packages, dep)
}
}
for _, p := range packages {
solvable, err := p.BuildFormula(s.DefinitionDatabase, s.SolverDatabase)
if err != nil {
return nil, err
@@ -138,6 +167,7 @@ func (s *Solver) BuildWorld(includeInstalled bool) (bf.Formula, error) {
}
for _, p := range s.World() {
solvable, err := p.BuildFormula(s.DefinitionDatabase, s.SolverDatabase)
if err != nil {
return nil, err
@@ -147,6 +177,44 @@ func (s *Solver) BuildWorld(includeInstalled bool) (bf.Formula, error) {
return bf.And(formulas...), nil
}
// BuildWorld builds the formula which olds the requirements from the package definitions
// which are available (global state)
func (s *Solver) BuildPartialWorld(includeInstalled bool) (bf.Formula, error) {
var formulas []bf.Formula
// NOTE: This block shouldf be enabled in case of very old systems with outdated world sets
if includeInstalled {
solvable, err := s.BuildInstalled()
if err != nil {
return nil, err
}
//f = bf.And(f, solvable)
formulas = append(formulas, solvable)
}
var packages pkg.Packages
for _, p := range s.Wanted {
// packages = append(packages, p)
for _, dep := range p.Related(s.DefinitionDatabase) {
packages = append(packages, dep)
}
}
for _, p := range packages {
solvable, err := p.BuildFormula(s.DefinitionDatabase, s.SolverDatabase)
if err != nil {
return nil, err
}
formulas = append(formulas, solvable...)
}
if len(formulas) != 0 {
return bf.And(formulas...), nil
}
return bf.True, nil
}
func (s *Solver) getList(db pkg.PackageDatabase, lsp pkg.Packages) (pkg.Packages, error) {
var ls pkg.Packages
@@ -187,8 +255,11 @@ func (s *Solver) Conflicts(pack pkg.Package, lsp pkg.Packages) (bool, error) {
for _, p := range ls {
temporarySet.CreatePackage(p)
}
visited := make(map[string]interface{})
revdeps := p.ExpandedRevdeps(temporarySet, visited)
revdeps, err := temporarySet.GetRevdeps(p)
if err != nil {
return false, errors.Wrap(err, "error scanning revdeps")
}
var revdepsErr error
for _, r := range revdeps {
@@ -356,17 +427,6 @@ func (s *Solver) UpgradeUniverse(dropremoved bool) (pkg.Packages, PackagesAssert
}
}
// resolve to packages from the db to be able to encode correctly
oldPackages, err := s.getList(universe, notUptodate)
if err != nil {
return nil, nil, errors.Wrap(err, "couldn't get package marked for removal from universe")
}
updates, err := s.getList(universe, toUpgrade)
if err != nil {
return nil, nil, errors.Wrap(err, "couldn't get package marked for update from universe")
}
var formulas []bf.Formula
// Build constraints for the whole defdb
@@ -377,11 +437,11 @@ func (s *Solver) UpgradeUniverse(dropremoved bool) (pkg.Packages, PackagesAssert
// Treat removed packages from universe as marked for deletion
if dropremoved {
oldPackages = append(oldPackages, removed...)
notUptodate = append(notUptodate, removed...)
}
// SAT encode the clauses against the world
for _, p := range oldPackages.Unique() {
for _, p := range notUptodate.Unique() {
encodedP, err := p.Encode(universe)
if err != nil {
return nil, nil, errors.Wrap(err, "couldn't encode package")
@@ -390,7 +450,7 @@ func (s *Solver) UpgradeUniverse(dropremoved bool) (pkg.Packages, PackagesAssert
formulas = append(formulas, bf.And(bf.Not(P), r))
}
for _, p := range updates {
for _, p := range toUpgrade {
encodedP, err := p.Encode(universe)
if err != nil {
return nil, nil, errors.Wrap(err, "couldn't encode package")
@@ -400,6 +460,10 @@ func (s *Solver) UpgradeUniverse(dropremoved bool) (pkg.Packages, PackagesAssert
}
markedForRemoval := pkg.Packages{}
if len(formulas) == 0 {
return pkg.Packages{}, PackagesAssertions{}, nil
}
model := bf.Solve(bf.And(formulas...))
if model == nil {
return nil, nil, errors.New("Failed finding a solution")
@@ -428,27 +492,27 @@ func (s *Solver) Upgrade(checkconflicts, full bool) (pkg.Packages, PackagesAsser
toUninstall := pkg.Packages{}
toInstall := pkg.Packages{}
availableCache := map[string]pkg.Packages{}
// we do this in memory so we take into account of provides
universe := pkg.NewInMemoryDatabase(false)
for _, p := range s.DefinitionDatabase.World() {
// Each one, should be expanded
availableCache[p.GetName()+p.GetCategory()] = append(availableCache[p.GetName()+p.GetCategory()], p)
universe.CreatePackage(p)
}
installedcopy := pkg.NewInMemoryDatabase(false)
for _, p := range s.InstalledDatabase.World() {
installedcopy.CreatePackage(p)
packages, ok := availableCache[p.GetName()+p.GetCategory()]
if ok && len(packages) != 0 {
packages, err := universe.FindPackageVersions(p)
if err == nil && len(packages) != 0 {
best := packages.Best(nil)
if best.GetVersion() != p.GetVersion() {
if !best.Matches(p) {
toUninstall = append(toUninstall, p)
toInstall = append(toInstall, best)
}
}
}
s2 := NewSolver(installedcopy, s.DefinitionDatabase, pkg.NewInMemoryDatabase(false))
s2 := NewSolver(Options{Type: SingleCoreSimple}, installedcopy, s.DefinitionDatabase, pkg.NewInMemoryDatabase(false))
s2.SetResolver(s.Resolver)
if !full {
ass := PackagesAssertions{}
@@ -456,23 +520,25 @@ func (s *Solver) Upgrade(checkconflicts, full bool) (pkg.Packages, PackagesAsser
ass = append(ass, PackageAssert{Package: i.(*pkg.DefaultPackage), Value: true})
}
}
// Then try to uninstall the versions in the system, and store that tree
for _, p := range toUninstall {
r, err := s.Uninstall(p, checkconflicts, false)
r, err := s.Uninstall(checkconflicts, false, toUninstall.Unique()...)
if err != nil {
return nil, nil, errors.Wrap(err, "Could not compute upgrade - couldn't uninstall candidates ")
}
for _, z := range r {
err = installedcopy.RemovePackage(z)
if err != nil {
return nil, nil, errors.Wrap(err, "Could not compute upgrade - couldn't uninstall selected candidate "+p.GetFingerPrint())
}
for _, z := range r {
err = installedcopy.RemovePackage(z)
if err != nil {
return nil, nil, errors.Wrap(err, "Could not compute upgrade - couldn't remove copy of package targetted for removal")
}
return nil, nil, errors.Wrap(err, "Could not compute upgrade - couldn't remove copy of package targetted for removal")
}
}
r, e := s2.Install(toInstall)
return toUninstall, r, e
if len(toInstall) == 0 {
return toUninstall, PackagesAssertions{}, nil
}
assertions, err := s2.Install(toInstall.Unique())
return toUninstall, assertions, err
// To that tree, ask to install the versions that should be upgraded, and try to solve
// Return the solution
@@ -480,52 +546,72 @@ func (s *Solver) Upgrade(checkconflicts, full bool) (pkg.Packages, PackagesAsser
// Uninstall takes a candidate package and return a list of packages that would be removed
// in order to purge the candidate. Returns error if unsat.
func (s *Solver) Uninstall(c pkg.Package, checkconflicts, full bool) (pkg.Packages, error) {
var res pkg.Packages
candidate, err := s.InstalledDatabase.FindPackage(c)
if err != nil {
// return nil, errors.Wrap(err, "Couldn't find required package in db definition")
packages, err := c.Expand(s.InstalledDatabase)
// Info("Expanded", packages, err)
if err != nil || len(packages) == 0 {
candidate = c
} else {
candidate = packages.Best(nil)
}
//Relax search, otherwise we cannot compute solutions for packages not in definitions
// return nil, errors.Wrap(err, "Package not found between installed")
func (s *Solver) Uninstall(checkconflicts, full bool, packs ...pkg.Package) (pkg.Packages, error) {
if len(packs) == 0 {
return pkg.Packages{}, nil
}
var res pkg.Packages
toRemove := pkg.Packages{}
for _, c := range packs {
candidate, err := s.InstalledDatabase.FindPackage(c)
if err != nil {
// return nil, errors.Wrap(err, "Couldn't find required package in db definition")
packages, err := c.Expand(s.InstalledDatabase)
// Info("Expanded", packages, err)
if err != nil || len(packages) == 0 {
candidate = c
} else {
candidate = packages.Best(nil)
}
//Relax search, otherwise we cannot compute solutions for packages not in definitions
// return nil, errors.Wrap(err, "Package not found between installed")
}
toRemove = append(toRemove, candidate)
}
// Build a fake "Installed" - Candidate and its requires tree
var InstalledMinusCandidate pkg.Packages
// We are asked to not perform a full uninstall (checking all the possible requires that could
// be removed). Let's only check if we can remove the selected package
if !full && checkconflicts {
if conflicts, err := s.Conflicts(candidate, s.Installed()); conflicts {
return nil, err
} else {
return pkg.Packages{candidate}, nil
for _, candidate := range toRemove {
if conflicts, err := s.Conflicts(candidate, s.Installed()); conflicts {
return nil, err
}
}
return toRemove, nil
}
// TODO: Can be optimized
for _, i := range s.Installed() {
if !i.Matches(candidate) {
contains, err := candidate.RequiresContains(s.SolverDatabase, i)
if err != nil {
return nil, errors.Wrap(err, "Failed getting installed list")
}
if !contains {
InstalledMinusCandidate = append(InstalledMinusCandidate, i)
matched := false
for _, candidate := range toRemove {
if !i.Matches(candidate) {
contains, err := candidate.RequiresContains(s.SolverDatabase, i)
if err != nil {
return nil, errors.Wrap(err, "Failed getting installed list")
}
if !contains {
matched = true
}
}
}
if matched {
InstalledMinusCandidate = append(InstalledMinusCandidate, i)
}
}
s2 := NewSolver(pkg.NewInMemoryDatabase(false), s.DefinitionDatabase, pkg.NewInMemoryDatabase(false))
s2 := NewSolver(Options{Type: SingleCoreSimple}, pkg.NewInMemoryDatabase(false), s.DefinitionDatabase, pkg.NewInMemoryDatabase(false))
s2.SetResolver(s.Resolver)
// Get the requirements to install the candidate
asserts, err := s2.Install(pkg.Packages{candidate})
asserts, err := s2.Install(toRemove)
if err != nil {
return nil, err
}
@@ -566,16 +652,19 @@ func (s *Solver) Uninstall(c pkg.Package, checkconflicts, full bool) (pkg.Packag
// BuildFormula builds the main solving formula that is evaluated by the sat solver.
func (s *Solver) BuildFormula() (bf.Formula, error) {
var formulas []bf.Formula
r, err := s.BuildWorld(false)
r, err := s.BuildPartialWorld(false)
if err != nil {
return nil, err
}
for _, wanted := range s.Wanted {
encodedW, err := wanted.Encode(s.SolverDatabase)
if err != nil {
return nil, err
}
W := bf.Var(encodedW)
// allW = append(allW, W)
installedWorld := s.Installed()
//TODO:Optimize
if len(installedWorld) == 0 {
@@ -593,8 +682,8 @@ func (s *Solver) BuildFormula() (bf.Formula, error) {
}
}
formulas = append(formulas, r)
formulas = append(formulas, r)
return bf.And(formulas...), nil
}

View File

@@ -28,13 +28,13 @@ var _ = Describe("Solver", func() {
db := pkg.NewInMemoryDatabase(false)
dbInstalled := pkg.NewInMemoryDatabase(false)
dbDefinitions := pkg.NewInMemoryDatabase(false)
s := NewSolver(dbInstalled, dbDefinitions, db)
s := NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
BeforeEach(func() {
db = pkg.NewInMemoryDatabase(false)
dbInstalled = pkg.NewInMemoryDatabase(false)
dbDefinitions = pkg.NewInMemoryDatabase(false)
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
})
Context("Simple set", func() {
It("Solves correctly if the selected package has no requirements or conflicts and we have nothing installed yet", func() {
@@ -52,7 +52,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(err).ToNot(HaveOccurred())
@@ -75,7 +75,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{B})
Expect(err).ToNot(HaveOccurred())
@@ -101,17 +101,17 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: C, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: E, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: false}))
Expect(solution).To(ContainElement(PackageAssert{Package: D, Value: false}))
// Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: false}))
//Expect(solution).To(ContainElement(PackageAssert{Package: D, Value: false}))
Expect(len(solution)).To(Equal(5))
Expect(len(solution)).To(Equal(3))
})
It("Solves correctly if the selected package to install has requirements", func() {
@@ -130,7 +130,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(err).ToNot(HaveOccurred())
@@ -156,7 +156,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(err).ToNot(HaveOccurred())
@@ -181,7 +181,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -209,7 +209,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -236,7 +236,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -263,7 +263,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{C})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -291,7 +291,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{&pkg.DefaultPackage{Name: "c", Version: ">1.0", Category: "test"}})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -317,7 +317,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -345,7 +345,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A, B})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -391,7 +391,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{C})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -401,7 +401,7 @@ var _ = Describe("Solver", func() {
Expect(solution).ToNot(ContainElement(PackageAssert{Package: D, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: E, Value: true}))
Expect(len(solution)).To(Equal(4))
Expect(len(solution)).To(Equal(3))
Expect(err).ToNot(HaveOccurred())
})
@@ -431,7 +431,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A, B})
Expect(solution).To(ContainElement(PackageAssert{Package: A, Value: true}))
@@ -476,7 +476,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A2, B})
Expect(solution).To(ContainElement(PackageAssert{Package: A2, Value: true}))
@@ -514,7 +514,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A2})
Expect(solution).To(ContainElement(PackageAssert{Package: A2, Value: true}))
@@ -529,7 +529,7 @@ var _ = Describe("Solver", func() {
Expect(solution).To(ContainElement(PackageAssert{Package: D1, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: E, Value: true}))
Expect(len(solution)).To(Equal(6))
Expect(len(solution)).To(Equal(5))
Expect(err).ToNot(HaveOccurred())
})
@@ -555,7 +555,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Install([]pkg.Package{A2})
Expect(solution).To(ContainElement(PackageAssert{Package: A2, Value: true}))
@@ -570,7 +570,7 @@ var _ = Describe("Solver", func() {
Expect(solution).To(ContainElement(PackageAssert{Package: D1, Value: false}))
Expect(solution).ToNot(ContainElement(PackageAssert{Package: E, Value: true}))
Expect(len(solution)).To(Equal(6))
Expect(len(solution)).To(Equal(5))
Expect(err).ToNot(HaveOccurred())
})
@@ -591,9 +591,9 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Uninstall(A, true, true)
solution, err := s.Uninstall(true, true, A)
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(A))
@@ -617,9 +617,9 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.Uninstall(&pkg.DefaultPackage{Name: "A", Version: ">1.0"}, true, true)
solution, err := s.Uninstall(true, true, &pkg.DefaultPackage{Name: "A", Version: ">1.0"})
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(A))
@@ -643,7 +643,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
solution, err := s.Uninstall(A, true, true)
solution, err := s.Uninstall(true, true, A)
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(A))
@@ -667,7 +667,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
solution, err := s.Uninstall(A, true, true)
solution, err := s.Uninstall(true, true, A)
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(A))
@@ -690,7 +690,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
solution, err := s.Uninstall(A, true, true)
solution, err := s.Uninstall(true, true, A)
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(A))
@@ -715,7 +715,7 @@ var _ = Describe("Solver", func() {
Expect(err).ToNot(HaveOccurred())
}
solution, err := s.Uninstall(A, true, true)
solution, err := s.Uninstall(true, true, A)
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(A))
@@ -741,7 +741,7 @@ var _ = Describe("Solver", func() {
Expect(err).ToNot(HaveOccurred())
}
solution, err := s.Uninstall(A, true, true)
solution, err := s.Uninstall(true, true, A)
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(A))
@@ -768,7 +768,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.UninstallUniverse(pkg.Packages{A})
Expect(err).ToNot(HaveOccurred())
@@ -794,7 +794,7 @@ var _ = Describe("Solver", func() {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
s = NewSolver(dbInstalled, dbDefinitions, db)
s = NewSolver(Options{Type: SingleCoreSimple}, dbInstalled, dbDefinitions, db)
solution, err := s.UninstallUniverse(pkg.Packages{
&pkg.DefaultPackage{Name: "A", Version: ">1.0"}})
@@ -1070,7 +1070,7 @@ var _ = Describe("Solver", func() {
}
val, err := s.Conflicts(D, dbInstalled.World())
Expect(err.Error()).To(Equal("\n/A-\n/B-"))
Expect(err.Error()).To(Or(Equal("\n/A-\n/B-"), Equal("\n/B-\n/A-")))
Expect(val).To(BeTrue())
})
@@ -1209,7 +1209,6 @@ var _ = Describe("Solver", func() {
})
})
Context("Upgrades", func() {
C := pkg.NewPackage("c", "1.5", []*pkg.DefaultPackage{&pkg.DefaultPackage{Name: "a", Version: ">=1.0", Category: "test"}}, []*pkg.DefaultPackage{})
C.SetCategory("test")
B := pkg.NewPackage("b", "1.0", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
@@ -1219,6 +1218,17 @@ var _ = Describe("Solver", func() {
A1 := pkg.NewPackage("a", "1.2", []*pkg.DefaultPackage{&pkg.DefaultPackage{Name: "b", Version: "1.0", Category: "test"}}, []*pkg.DefaultPackage{})
A1.SetCategory("test")
BeforeEach(func() {
C = pkg.NewPackage("c", "1.5", []*pkg.DefaultPackage{&pkg.DefaultPackage{Name: "a", Version: ">=1.0", Category: "test"}}, []*pkg.DefaultPackage{})
C.SetCategory("test")
B = pkg.NewPackage("b", "1.0", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
B.SetCategory("test")
A = pkg.NewPackage("a", "1.1", []*pkg.DefaultPackage{&pkg.DefaultPackage{Name: "b", Version: "1.0", Category: "test"}}, []*pkg.DefaultPackage{})
A.SetCategory("test")
A1 = pkg.NewPackage("a", "1.2", []*pkg.DefaultPackage{&pkg.DefaultPackage{Name: "b", Version: "1.0", Category: "test"}}, []*pkg.DefaultPackage{})
A1.SetCategory("test")
})
It("upgrades correctly", func() {
for _, p := range []pkg.Package{A1, B, C} {
_, err := dbDefinitions.CreatePackage(p)
@@ -1240,7 +1250,31 @@ var _ = Describe("Solver", func() {
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: true}))
Expect(solution).To(ContainElement(PackageAssert{Package: C, Value: false}))
Expect(len(solution)).To(Equal(3))
})
It("upgrades correctly with provides", func() {
B.SetProvides([]*pkg.DefaultPackage{&pkg.DefaultPackage{Name: "a", Version: ">=0", Category: "test"}, &pkg.DefaultPackage{Name: "c", Version: ">=0", Category: "test"}})
for _, p := range []pkg.Package{A1, B} {
_, err := dbDefinitions.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
for _, p := range []pkg.Package{A, C} {
_, err := dbInstalled.CreatePackage(p)
Expect(err).ToNot(HaveOccurred())
}
uninstall, solution, err := s.Upgrade(true, true)
Expect(err).ToNot(HaveOccurred())
Expect(len(uninstall)).To(Equal(2))
Expect(uninstall[1].GetName()).To(Equal("c"))
Expect(uninstall[1].GetVersion()).To(Equal("1.5"))
Expect(uninstall[0].GetName()).To(Equal("a"))
Expect(uninstall[0].GetVersion()).To(Equal("1.1"))
Expect(solution).To(ContainElement(PackageAssert{Package: B, Value: true}))
Expect(len(solution)).To(Equal(1))
})
It("UpgradeUniverse upgrades correctly", func() {

View File

@@ -44,6 +44,14 @@ type DefaultPackageSanitized struct {
Labels map[string]string `json:"labels,omitempty" yaml:"labels,omitempty"`
}
func NewDefaultPackageSanitizedFromYaml(data []byte) (*DefaultPackageSanitized, error) {
ans := &DefaultPackageSanitized{}
if err := yaml.Unmarshal(data, ans); err != nil {
return nil, err
}
return ans, nil
}
func NewDefaultPackageSanitized(p pkg.Package) *DefaultPackageSanitized {
ans := &DefaultPackageSanitized{
Name: p.GetName(),
@@ -110,3 +118,12 @@ func NewDefaultPackageSanitized(p pkg.Package) *DefaultPackageSanitized {
func (p *DefaultPackageSanitized) Yaml() ([]byte, error) {
return yaml.Marshal(p)
}
func (p *DefaultPackageSanitized) Clone() (*DefaultPackageSanitized, error) {
data, err := p.Yaml()
if err != nil {
return nil, err
}
return NewDefaultPackageSanitizedFromYaml(data)
}

View File

@@ -1,32 +0,0 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package gentoo_test
import (
"testing"
. "github.com/mudler/luet/cmd"
config "github.com/mudler/luet/pkg/config"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestGentooBuilder(t *testing.T) {
RegisterFailHandler(Fail)
LoadConfig(config.LuetCfg)
RunSpecs(t, "Gentoo Suite")
}

View File

@@ -1,142 +0,0 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package gentoo
// NOTE: Look here as an example of the builder definition executor
// https://gist.github.com/adnaan/6ca68c7985c6f851def3
import (
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
. "github.com/mudler/luet/pkg/logger"
tree "github.com/mudler/luet/pkg/tree"
pkg "github.com/mudler/luet/pkg/package"
)
type MemoryDB int
const (
InMemory MemoryDB = iota
BoltDB MemoryDB = iota
)
func NewGentooBuilder(e EbuildParser, concurrency int, db MemoryDB) tree.Parser {
return &GentooBuilder{EbuildParser: e, Concurrency: concurrency}
}
type GentooBuilder struct {
EbuildParser EbuildParser
Concurrency int
DBType MemoryDB
}
type EbuildParser interface {
ScanEbuild(string) (pkg.Packages, error)
}
func (gb *GentooBuilder) scanEbuild(path string, db pkg.PackageDatabase) error {
defer func() {
if r := recover(); r != nil {
Error(r)
}
}()
pkgs, err := gb.EbuildParser.ScanEbuild(path)
if err != nil {
return err
}
for _, p := range pkgs {
_, err := db.FindPackage(p)
if err != nil {
_, err := db.CreatePackage(p)
if err != nil {
return err
}
}
}
return nil
}
func (gb *GentooBuilder) worker(i int, wg *sync.WaitGroup, s <-chan string, db pkg.PackageDatabase) {
defer wg.Done()
for path := range s {
Info("#"+strconv.Itoa(i), "parsing", path)
err := gb.scanEbuild(path, db)
if err != nil {
Error(path, ":", err.Error())
}
}
}
func (gb *GentooBuilder) Generate(dir string) (pkg.PackageDatabase, error) {
var toScan = make(chan string)
Spinner(27)
defer SpinnerStop()
var db pkg.PackageDatabase
// Support for
switch gb.DBType {
case InMemory:
db = pkg.NewInMemoryDatabase(false)
case BoltDB:
tmpfile, err := ioutil.TempFile("", "boltdb")
if err != nil {
return nil, err
}
db = pkg.NewBoltDatabase(tmpfile.Name())
default:
db = pkg.NewInMemoryDatabase(false)
}
Debug("Concurrency", gb.Concurrency)
// the waitgroup will allow us to wait for all the goroutines to finish at the end
var wg = new(sync.WaitGroup)
for i := 0; i < gb.Concurrency; i++ {
wg.Add(1)
go gb.worker(i, wg, toScan, db)
}
// TODO: Handle cleaning after? Cleanup implemented in GetPackageSet().Clean()
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
// Ensure that only file with suffix .ebuild are elaborated.
// and ignore .swp files or files with string ebuild on name
if strings.HasSuffix(info.Name(), ".ebuild") {
toScan <- path
}
return nil
})
close(toScan)
wg.Wait()
if err != nil {
return db, err
}
return db, nil
}

View File

@@ -1,177 +0,0 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package gentoo_test
import (
"fmt"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
pkg "github.com/mudler/luet/pkg/package"
. "github.com/mudler/luet/pkg/tree/builder/gentoo"
)
type FakeParser struct {
}
func (f *FakeParser) ScanEbuild(path string) (pkg.Packages, error) {
return pkg.Packages{&pkg.DefaultPackage{Name: path}}, nil
}
var _ = Describe("GentooBuilder", func() {
Context("Simple test", func() {
for _, dbType := range []MemoryDB{InMemory, BoltDB} {
It("parses correctly deps", func() {
gb := NewGentooBuilder(&FakeParser{}, 20, dbType)
tree, err := gb.Generate("../../../../tests/fixtures/overlay")
defer func() {
Expect(tree.Clean()).ToNot(HaveOccurred())
}()
Expect(err).ToNot(HaveOccurred())
Expect(len(tree.GetPackages())).To(Equal(10))
})
}
})
Context("Parse ebuild1", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/overlay/app-crypt/pinentry-gnome/pinentry-gnome-1.0.0-r2.ebuild")
It("parses correctly deps", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(pkgs[0].GetLicense()).To(Equal("GPL-2"))
Expect(pkgs[0].GetDescription()).To(Equal("GNOME 3 frontend for pinentry"))
})
})
Context("Parse ebuild2", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/mod_dav_svn-1.12.2.ebuild")
It("Parsing ebuild2", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(pkgs[0].GetLicense()).To(Equal("Subversion"))
Expect(pkgs[0].GetDescription()).To(Equal("Subversion WebDAV support"))
})
})
Context("Parse ebuild3", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/linux-sources-1.ebuild")
It("Check parsing of the ebuild3", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(0))
Expect(pkgs[0].GetLicense()).To(Equal(""))
Expect(pkgs[0].GetDescription()).To(Equal("Virtual for Linux kernel sources"))
})
})
Context("Parse ebuild4", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/sabayon-mce-1.1-r5.ebuild")
It("Check parsing of the ebuild4", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(2))
Expect(pkgs[0].GetLicense()).To(Equal("GPL-2"))
Expect(pkgs[0].GetDescription()).To(Equal("Sabayon Linux Media Center Infrastructure"))
})
})
Context("Parse ebuild5", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/libreoffice-l10n-meta-6.2.8.2.ebuild")
It("Check parsing of the ebuild5", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(146))
Expect(pkgs[0].GetLicense()).To(Equal("LGPL-2"))
Expect(pkgs[0].GetDescription()).To(Equal("LibreOffice.org localisation meta-package"))
})
})
Context("Parse ebuild6", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/pkgs-checker-0.2.0.ebuild")
It("Check parsing of the ebuild6", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(0))
Expect(pkgs[0].GetLicense()).To(Equal("GPL-3"))
Expect(pkgs[0].GetDescription()).To(Equal("Sabayon Packages Checker"))
})
})
Context("Parse ebuild7", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/calamares-sabayon-base-modules-1.15.ebuild")
It("Check parsing of the ebuild7", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(2))
Expect(pkgs[0].GetLicense()).To(Equal("CC-BY-SA-4.0"))
Expect(pkgs[0].GetDescription()).To(Equal("Sabayon Official Calamares base modules"))
})
})
Context("Parse ebuild8", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/subversion-1.12.0.ebuild")
It("Check parsing of the ebuild8", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(25))
Expect(pkgs[0].GetLicense()).To(Equal("Subversion GPL-2"))
Expect(pkgs[0].GetDescription()).To(Equal("Advanced version control system"))
})
})
Context("Parse ebuild9", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/kodi-raspberrypi-16.0.ebuild")
PIt("Check parsing of the ebuild9", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(66))
Expect(pkgs[0].GetLicense()).To(Equal("GPL-2"))
Expect(pkgs[0].GetDescription()).To(Equal("Kodi is a free and open source media-player and entertainment hub"))
})
})
Context("Parse ebuild10", func() {
parser := &SimpleEbuildParser{}
pkgs, err := parser.ScanEbuild("../../../../tests/fixtures/parser/tango-icon-theme-0.8.90-r1.ebuild")
It("Check parsing of the ebuild10", func() {
Expect(err).ToNot(HaveOccurred())
fmt.Println("PKG ", pkgs[0])
Expect(len(pkgs[0].GetRequires())).To(Equal(2))
Expect(pkgs[0].GetLicense()).To(Equal("public-domain"))
Expect(pkgs[0].GetDescription()).To(Equal("SVG and PNG icon theme from the Tango project"))
})
})
})

View File

@@ -1,447 +0,0 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package gentoo
// NOTE: Look here as an example of the builder definition executor
// https://gist.github.com/adnaan/6ca68c7985c6f851def3
import (
"context"
"errors"
"fmt"
"io/ioutil"
"path/filepath"
"regexp"
"strings"
"time"
. "github.com/mudler/luet/pkg/logger"
_gentoo "github.com/Sabayon/pkgs-checker/pkg/gentoo"
pkg "github.com/mudler/luet/pkg/package"
"mvdan.cc/sh/v3/expand"
"mvdan.cc/sh/v3/shell"
"mvdan.cc/sh/v3/syntax"
)
const (
uriRegex = "(.*[.]tar[.].*|.*[.]zip|.*[.]run|.*[.]png|.*[.]rpm|.*[.]gz)"
)
// SimpleEbuildParser ignores USE flags and generates just 1-1 package
type SimpleEbuildParser struct {
World pkg.PackageDatabase
}
type GentooDependency struct {
Use string
UseCondition _gentoo.PackageCond
SubDeps []*GentooDependency
Dep *_gentoo.GentooPackage
}
type GentooRDEPEND struct {
Dependencies []*GentooDependency
}
func NewGentooDependency(pkg, use string) (*GentooDependency, error) {
var err error
ans := &GentooDependency{
Use: use,
SubDeps: make([]*GentooDependency, 0),
}
if strings.HasPrefix(use, "!") {
ans.Use = ans.Use[1:]
ans.UseCondition = _gentoo.PkgCondNot
}
if pkg != "" {
ans.Dep, err = _gentoo.ParsePackageStr(pkg)
if err != nil {
return nil, err
}
// TODO: Fix this on parsing phase for handle correctly ${PV}
if strings.HasSuffix(ans.Dep.Name, "-") {
ans.Dep.Name = ans.Dep.Name[:len(ans.Dep.Name)-1]
}
}
return ans, nil
}
func (d *GentooDependency) String() string {
if d.Dep != nil {
return fmt.Sprintf("%s", d.Dep)
} else {
return fmt.Sprintf("%s %d %s", d.Use, d.UseCondition, d.SubDeps)
}
}
func (d *GentooDependency) GetDepsList() []*GentooDependency {
ans := make([]*GentooDependency, 0)
if len(d.SubDeps) > 0 {
for _, d2 := range d.SubDeps {
list := d2.GetDepsList()
ans = append(ans, list...)
}
}
if d.Dep != nil {
ans = append(ans, d)
}
return ans
}
func (d *GentooDependency) AddSubDependency(pkg, use string) (*GentooDependency, error) {
ans, err := NewGentooDependency(pkg, use)
if err != nil {
return nil, err
}
d.SubDeps = append(d.SubDeps, ans)
return ans, nil
}
func (r *GentooRDEPEND) GetDependencies() []*GentooDependency {
ans := make([]*GentooDependency, 0)
for _, d := range r.Dependencies {
list := d.GetDepsList()
ans = append(ans, list...)
}
// the same dependency could be available in multiple use flags.
// It's needed avoid duplicate.
m := make(map[string]*GentooDependency, 0)
for _, p := range ans {
m[p.String()] = p
}
ans = make([]*GentooDependency, 0)
for _, p := range m {
ans = append(ans, p)
}
return ans
}
func ParseRDEPEND(rdepend string) (*GentooRDEPEND, error) {
var lastdep []*GentooDependency = make([]*GentooDependency, 0)
var pendingDep = false
var orDep = false
var dep *GentooDependency
var err error
ans := &GentooRDEPEND{
Dependencies: make([]*GentooDependency, 0),
}
if rdepend != "" {
rdepends := strings.Split(rdepend, "\n")
for _, rr := range rdepends {
rr = strings.TrimSpace(rr)
if rr == "" {
continue
}
if strings.HasPrefix(rr, "|| (") {
orDep = true
continue
}
if orDep {
rr = strings.TrimSpace(rr)
if rr == ")" {
orDep = false
}
continue
}
if strings.Index(rr, "?") > 0 {
// use flag present
if pendingDep {
dep, err = lastdep[len(lastdep)-1].AddSubDependency("", rr[:strings.Index(rr, "?")])
if err != nil {
Debug("Ignoring subdependency ", rr[:strings.Index(rr, "?")])
}
} else {
dep, err = NewGentooDependency("", rr[:strings.Index(rr, "?")])
if err != nil {
Debug("Ignoring dep", rr)
} else {
ans.Dependencies = append(ans.Dependencies, dep)
}
}
if strings.Index(rr, ")") < 0 {
pendingDep = true
lastdep = append(lastdep, dep)
}
if strings.Index(rr, "|| (") >= 0 {
// Ignore dep in or
continue
}
fields := strings.Split(rr[strings.Index(rr, "?")+1:], " ")
for _, f := range fields {
f = strings.TrimSpace(f)
if f == ")" || f == "(" || f == "" {
continue
}
_, err = dep.AddSubDependency(f, "")
if err != nil {
Debug("Ignoring subdependency ", f)
}
}
} else if pendingDep {
fields := strings.Split(rr, " ")
for _, f := range fields {
f = strings.TrimSpace(f)
if f == ")" || f == "(" || f == "" {
continue
}
_, err = lastdep[len(lastdep)-1].AddSubDependency(f, "")
if err != nil {
return nil, err
}
}
if strings.Index(rr, ")") >= 0 {
lastdep = lastdep[:len(lastdep)-1]
if len(lastdep) == 0 {
pendingDep = false
}
}
} else {
rr = strings.TrimSpace(rr)
// Check if there multiple deps in single row
fields := strings.Split(rr, " ")
if len(fields) > 1 {
for _, rrr := range fields {
rrr = strings.TrimSpace(rrr)
if rrr == "" {
continue
}
dep, err := NewGentooDependency(rrr, "")
if err != nil {
Debug("Ignoring dep", rr)
} else {
ans.Dependencies = append(ans.Dependencies, dep)
}
}
} else {
dep, err := NewGentooDependency(rr, "")
if err != nil {
Debug("Ignoring dep", rr)
} else {
ans.Dependencies = append(ans.Dependencies, dep)
}
}
}
}
}
return ans, nil
}
func SourceFile(ctx context.Context, path string, pkg *_gentoo.GentooPackage) (map[string]expand.Variable, error) {
content, err := ioutil.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("could not open: %v", err)
}
scontent := string(content)
// Add default Genoo Variables
ebuild := fmt.Sprintf("P=%s\n", pkg.GetP()) +
fmt.Sprintf("PN=%s\n", pkg.GetPN()) +
fmt.Sprintf("PV=%s\n", pkg.GetPV()) +
fmt.Sprintf("PVR=%s\n", pkg.GetPVR())
// Disable inherit
scontent = strings.ReplaceAll(scontent, "inherit", "#inherit")
// Disable function from eclass (TODO: check how fix better this)
scontent = strings.ReplaceAll(scontent, "need_apache", "#need_apache")
scontent = strings.ReplaceAll(scontent, "want_apache", "#want_apache")
regexFuncs := regexp.MustCompile(
"[a-zA-Z]+.*[_][a-z]+[(][)][\\s]{",
)
matches := regexFuncs.FindAllIndex([]byte(scontent), -1)
// Drop section after functions (src_*, *() {)
if len(matches) > 0 {
ebuild = ebuild + scontent[:matches[0][0]]
} else {
ebuild = ebuild + scontent
}
// [[ ${PV} == "9999" ]] is not supported. Workaround but we need a better solution.
regexDoubleBrakets := regexp.MustCompile(
//"[[][[].*",
"^[[][[].*",
//"^.*\[\[.*\]\]",
)
matchDB := regexDoubleBrakets.FindAllIndex([]byte(ebuild), -1)
if len(matchDB) > 0 {
ebuild = ebuild[:matchDB[0][0]] + "#" + ebuild[matchDB[0][0]:]
}
//fmt.Println("EBUILD ", ebuild)
file, err := syntax.NewParser().Parse(strings.NewReader(ebuild), path)
if err != nil {
return nil, fmt.Errorf("could not parse: %v", err)
}
return shell.SourceNode(ctx, file)
}
// ScanEbuild returns a list of packages (always one with SimpleEbuildParser) decoded from an ebuild.
func (ep *SimpleEbuildParser) ScanEbuild(path string) (pkg.Packages, error) {
Debug("Starting parsing of ebuild", path)
pkgstr := filepath.Base(path)
paths := strings.Split(filepath.Dir(path), "/")
pkgstr = paths[len(paths)-2] + "/" + strings.Replace(pkgstr, ".ebuild", "", -1)
gp, err := _gentoo.ParsePackageStr(pkgstr)
if err != nil {
return pkg.Packages{}, errors.New("Error on parsing package string")
}
pack := &pkg.DefaultPackage{
Name: gp.Name,
Version: fmt.Sprintf("%s%s", gp.Version, gp.VersionSuffix),
Category: gp.Category,
Uri: make([]string, 0),
}
Debug("Prepare package ", pack.Category+"/"+pack.Name+"-"+pack.Version)
// Adding a timeout of 60secs, as with some bash files it can hang indefinetly
timeout, cancel := context.WithTimeout(context.Background(), 60*time.Second)
defer cancel()
vars, err := SourceFile(timeout, path, gp)
if err != nil {
Error("Error on source file ", pack.Name, ": ", err)
return pkg.Packages{}, err
}
// Retrieve slot
slot, ok := vars["SLOT"]
if ok && slot.String() != "0" {
pack.SetCategory(fmt.Sprintf("%s-%s", gp.Category, slot.String()))
}
// TODO: Handle this a bit better
iuse, ok := vars["IUSE"]
if ok {
uses := strings.Split(strings.TrimSpace(iuse.String()), " ")
for _, u := range uses {
pack.AddUse(u)
}
}
// Retrieve package description
descr, ok := vars["DESCRIPTION"]
if ok {
pack.SetDescription(descr.String())
}
// Retrieve package license
license, ok := vars["LICENSE"]
if ok {
pack.SetLicense(license.String())
}
uri, ok := vars["SRC_URI"]
if ok {
// TODO: handle mirror:
uris := strings.Split(uri.String(), "\n")
for _, u := range uris {
u = strings.TrimSpace(u)
if u == "" {
continue
}
if match, _ := regexp.Match(uriRegex, []byte(u)); match {
if strings.Index(u, "(") >= 0 {
regexUri := regexp.MustCompile("(http|ftp|mirror).*[ ]")
matches := regexUri.FindAllIndex([]byte(u), -1)
if len(matches) > 0 {
u = u[matches[0][0]:matches[0][1]]
} else {
continue
}
}
pack.AddURI(u)
Debug("Add uri ", u)
} else {
Debug("Skip uri ", u)
}
}
}
rdepend, ok := vars["RDEPEND"]
if ok {
gRDEPEND, err := ParseRDEPEND(rdepend.String())
if err != nil {
Warning("Error on parsing RDEPEND for package ", pack.Category+"/"+pack.Name, err)
return pkg.Packages{pack}, nil
// return pkg.Packages{}, err
}
pack.PackageConflicts = []*pkg.DefaultPackage{}
pack.PackageRequires = []*pkg.DefaultPackage{}
// TODO: See how handle use flags enabled.
// and if it's correct get list of deps directly.
for _, d := range gRDEPEND.GetDependencies() {
//TODO: Resolve to db or create a new one.
//TODO: handle SLOT too.
dep := &pkg.DefaultPackage{
Name: d.Dep.Name,
Version: d.Dep.Version + d.Dep.VersionSuffix,
Category: d.Dep.Category,
}
Debug(fmt.Sprintf("For package %s found dep: %s/%s %s",
gp, dep.Category, dep.Name, dep.Version))
if d.Dep.Condition == _gentoo.PkgCondNot {
pack.PackageConflicts = append(pack.PackageConflicts, dep)
} else {
pack.PackageRequires = append(pack.PackageRequires, dep)
}
}
}
Debug("Finished processing ebuild", path, "deps ", len(pack.PackageRequires))
//TODO: Deps and conflicts
return pkg.Packages{pack}, nil
}

View File

@@ -1,669 +0,0 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package gentoo_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
_gentoo "github.com/Sabayon/pkgs-checker/pkg/gentoo"
. "github.com/mudler/luet/pkg/tree/builder/gentoo"
)
var _ = Describe("GentooBuilder", func() {
Context("Parse RDEPEND1", func() {
rdepend := `
app-crypt/sbsigntools
x11-themes/sabayon-artwork-grub
sys-boot/os-prober
app-arch/xz-utils
>=sys-libs/ncurses-5.2-r5:0=
`
gr, err := ParseRDEPEND(rdepend)
It("Check error", func() {
Expect(err).Should(BeNil())
})
It("Check gr", func() {
Expect(gr).ShouldNot(BeNil())
})
It("Check deps #", func() {
Expect(len(gr.Dependencies)).Should(Equal(5))
})
It("Check dep1", func() {
Expect(*gr.Dependencies[0]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sbsigntools",
Category: "app-crypt",
Slot: "0",
},
},
))
})
It("Check dep2", func() {
Expect(*gr.Dependencies[1]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sabayon-artwork-grub",
Category: "x11-themes",
Slot: "0",
},
},
))
})
It("Check dep5", func() {
Expect(*gr.Dependencies[4]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "ncurses",
Category: "sys-libs",
Slot: "0=",
Version: "5.2",
VersionSuffix: "-r5",
Condition: _gentoo.PkgCondGreaterEqual,
},
},
))
})
})
Context("Parse RDEPEND2", func() {
rdepend := `
app-crypt/sbsigntools
x11-themes/sabayon-artwork-grub
sys-boot/os-prober
app-arch/xz-utils
>=sys-libs/ncurses-5.2-r5:0=
mount? ( sys-fs/fuse )
`
gr, err := ParseRDEPEND(rdepend)
It("Check error", func() {
Expect(err).Should(BeNil())
})
It("Check gr", func() {
Expect(gr).ShouldNot(BeNil())
})
It("Check deps #", func() {
Expect(len(gr.Dependencies)).Should(Equal(6))
})
It("Check dep1", func() {
Expect(*gr.Dependencies[0]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sbsigntools",
Category: "app-crypt",
Slot: "0",
},
},
))
})
It("Check dep2", func() {
Expect(*gr.Dependencies[1]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sabayon-artwork-grub",
Category: "x11-themes",
Slot: "0",
},
},
))
})
It("Check dep5", func() {
Expect(*gr.Dependencies[4]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "ncurses",
Category: "sys-libs",
Slot: "0=",
Version: "5.2",
VersionSuffix: "-r5",
Condition: _gentoo.PkgCondGreaterEqual,
},
},
))
})
It("Check dep6", func() {
Expect(*gr.Dependencies[5]).Should(Equal(
GentooDependency{
Use: "mount",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: []*GentooDependency{
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "fuse",
Category: "sys-fs",
Slot: "0",
},
},
},
Dep: nil,
},
))
})
})
Context("Parse RDEPEND3", func() {
rdepend := `
app-crypt/sbsigntools
x11-themes/sabayon-artwork-grub
sys-boot/os-prober
app-arch/xz-utils
>=sys-libs/ncurses-5.2-r5:0=
mount? ( sys-fs/fuse =sys-apps/pmount-0.9.99_alpha-r5:= )
`
gr, err := ParseRDEPEND(rdepend)
It("Check error", func() {
Expect(err).Should(BeNil())
})
It("Check gr", func() {
Expect(gr).ShouldNot(BeNil())
})
It("Check deps #", func() {
Expect(len(gr.Dependencies)).Should(Equal(6))
})
It("Check dep1", func() {
Expect(*gr.Dependencies[0]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sbsigntools",
Category: "app-crypt",
Slot: "0",
},
},
))
})
It("Check dep2", func() {
Expect(*gr.Dependencies[1]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sabayon-artwork-grub",
Category: "x11-themes",
Slot: "0",
},
},
))
})
It("Check dep5", func() {
Expect(*gr.Dependencies[4]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "ncurses",
Category: "sys-libs",
Slot: "0=",
Version: "5.2",
VersionSuffix: "-r5",
Condition: _gentoo.PkgCondGreaterEqual,
},
},
))
})
It("Check dep6", func() {
Expect(*gr.Dependencies[5]).Should(Equal(
GentooDependency{
Use: "mount",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: []*GentooDependency{
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "fuse",
Category: "sys-fs",
Slot: "0",
},
},
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "pmount",
Category: "sys-apps",
Condition: _gentoo.PkgCondEqual,
Version: "0.9.99",
VersionSuffix: "_alpha-r5",
Slot: "=",
},
},
},
Dep: nil,
},
))
})
})
Context("Parse RDEPEND4", func() {
rdepend := `
app-crypt/sbsigntools
x11-themes/sabayon-artwork-grub
sys-boot/os-prober
app-arch/xz-utils
>=sys-libs/ncurses-5.2-r5:0=
!mount? ( sys-fs/fuse =sys-apps/pmount-0.9.99_alpha-r5:= )
`
gr, err := ParseRDEPEND(rdepend)
It("Check error", func() {
Expect(err).Should(BeNil())
})
It("Check gr", func() {
Expect(gr).ShouldNot(BeNil())
})
It("Check deps #", func() {
Expect(len(gr.Dependencies)).Should(Equal(6))
})
It("Check dep1", func() {
Expect(*gr.Dependencies[0]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sbsigntools",
Category: "app-crypt",
Slot: "0",
},
},
))
})
It("Check dep2", func() {
Expect(*gr.Dependencies[1]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sabayon-artwork-grub",
Category: "x11-themes",
Slot: "0",
},
},
))
})
It("Check dep5", func() {
Expect(*gr.Dependencies[4]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "ncurses",
Category: "sys-libs",
Slot: "0=",
Version: "5.2",
VersionSuffix: "-r5",
Condition: _gentoo.PkgCondGreaterEqual,
},
},
))
})
It("Check dep6", func() {
Expect(*gr.Dependencies[5]).Should(Equal(
GentooDependency{
Use: "mount",
UseCondition: _gentoo.PkgCondNot,
SubDeps: []*GentooDependency{
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "fuse",
Category: "sys-fs",
Slot: "0",
},
},
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "pmount",
Category: "sys-apps",
Condition: _gentoo.PkgCondEqual,
Version: "0.9.99",
VersionSuffix: "_alpha-r5",
Slot: "=",
},
},
},
Dep: nil,
},
))
})
})
Context("Parse RDEPEND5", func() {
rdepend := `
app-crypt/sbsigntools
>=sys-libs/ncurses-5.2-r5:0=
mount? (
sys-fs/fuse
=sys-apps/pmount-0.9.99_alpha-r5:=
)
`
gr, err := ParseRDEPEND(rdepend)
It("Check error", func() {
Expect(err).Should(BeNil())
})
It("Check gr", func() {
Expect(gr).ShouldNot(BeNil())
})
It("Check deps #", func() {
Expect(len(gr.Dependencies)).Should(Equal(3))
})
It("Check dep1", func() {
Expect(*gr.Dependencies[0]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sbsigntools",
Category: "app-crypt",
Slot: "0",
},
},
))
})
It("Check dep2", func() {
Expect(*gr.Dependencies[1]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "ncurses",
Category: "sys-libs",
Slot: "0=",
Version: "5.2",
VersionSuffix: "-r5",
Condition: _gentoo.PkgCondGreaterEqual,
},
},
))
})
It("Check dep3", func() {
Expect(*gr.Dependencies[2]).Should(Equal(
GentooDependency{
Use: "mount",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: []*GentooDependency{
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "fuse",
Category: "sys-fs",
Slot: "0",
},
},
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "pmount",
Category: "sys-apps",
Condition: _gentoo.PkgCondEqual,
Version: "0.9.99",
VersionSuffix: "_alpha-r5",
Slot: "=",
},
},
},
Dep: nil,
},
))
})
})
Context("Parse RDEPEND6", func() {
rdepend := `
app-crypt/sbsigntools
>=sys-libs/ncurses-5.2-r5:0=
mount? (
sys-fs/fuse
=sys-apps/pmount-0.9.99_alpha-r5:= )
`
gr, err := ParseRDEPEND(rdepend)
It("Check error", func() {
Expect(err).Should(BeNil())
})
It("Check gr", func() {
Expect(gr).ShouldNot(BeNil())
})
It("Check deps #", func() {
Expect(len(gr.Dependencies)).Should(Equal(3))
})
It("Check dep1", func() {
Expect(*gr.Dependencies[0]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sbsigntools",
Category: "app-crypt",
Slot: "0",
},
},
))
})
})
Context("Parse RDEPEND7", func() {
rdepend := `
app-crypt/sbsigntools
>=sys-libs/ncurses-5.2-r5:0=
mount? (
sys-fs/fuse
=sys-apps/pmount-0.9.99_alpha-r5:=
ext2? (
sys-fs/genext2fs
)
)
`
gr, err := ParseRDEPEND(rdepend)
It("Check error", func() {
Expect(err).Should(BeNil())
})
It("Check gr", func() {
Expect(gr).ShouldNot(BeNil())
})
It("Check deps #", func() {
Expect(len(gr.Dependencies)).Should(Equal(3))
})
It("Check dep1", func() {
Expect(*gr.Dependencies[0]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "sbsigntools",
Category: "app-crypt",
Slot: "0",
},
},
))
})
It("Check dep2", func() {
Expect(*gr.Dependencies[1]).Should(Equal(
GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "ncurses",
Category: "sys-libs",
Slot: "0=",
Version: "5.2",
VersionSuffix: "-r5",
Condition: _gentoo.PkgCondGreaterEqual,
},
},
))
})
It("Check dep3", func() {
Expect(*gr.Dependencies[2]).Should(Equal(
GentooDependency{
Use: "mount",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: []*GentooDependency{
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "fuse",
Category: "sys-fs",
Slot: "0",
},
},
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "pmount",
Category: "sys-apps",
Condition: _gentoo.PkgCondEqual,
Version: "0.9.99",
VersionSuffix: "_alpha-r5",
Slot: "=",
},
},
&GentooDependency{
Use: "ext2",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: []*GentooDependency{
&GentooDependency{
Use: "",
UseCondition: _gentoo.PkgCondInvalid,
SubDeps: make([]*GentooDependency, 0),
Dep: &_gentoo.GentooPackage{
Name: "genext2fs",
Category: "sys-fs",
Slot: "0",
},
},
},
Dep: nil,
},
},
},
))
})
})
Context("Simple test", func() {
for _, dbType := range []MemoryDB{InMemory, BoltDB} {
It("parses correctly deps", func() {
gb := NewGentooBuilder(&SimpleEbuildParser{}, 20, dbType)
tree, err := gb.Generate("../../../../tests/fixtures/overlay")
Expect(err).ToNot(HaveOccurred())
defer func() {
Expect(tree.Clean()).ToNot(HaveOccurred())
}()
Expect(len(tree.GetPackages())).To(Equal(10))
for _, p := range tree.World() {
Expect(p.GetName()).To(ContainSubstring("pinentry"))
Expect(p.GetVersion()).To(ContainSubstring("1."))
}
})
}
})
})

View File

@@ -74,39 +74,92 @@ func (r *CompilerRecipe) Load(path string) error {
return errors.Wrap(err, "Error on walk path "+currentpath)
}
if info.Name() != DefinitionFile {
if info.Name() != DefinitionFile && info.Name() != CollectionFile {
return nil // Skip with no errors
}
pack, err := ReadDefinitionFile(currentpath)
if err != nil {
return err
}
// Path is set only internally when tree is loaded from disk
pack.SetPath(filepath.Dir(currentpath))
switch info.Name() {
case DefinitionFile:
// Instead of rdeps, have a different tree for build deps.
compileDefPath := pack.Rel(CompilerDefinitionFile)
if helpers.Exists(compileDefPath) {
dat, err := ioutil.ReadFile(compileDefPath)
pack, err := ReadDefinitionFile(currentpath)
if err != nil {
return errors.Wrap(err,
"Error reading file "+CompilerDefinitionFile+" from "+
filepath.Dir(currentpath))
return err
}
// Path is set only internally when tree is loaded from disk
pack.SetPath(filepath.Dir(currentpath))
// Instead of rdeps, have a different tree for build deps.
compileDefPath := pack.Rel(CompilerDefinitionFile)
if helpers.Exists(compileDefPath) {
dat, err := helpers.RenderFiles(compileDefPath, currentpath, "")
if err != nil {
return errors.Wrap(err,
"Error templating file "+CompilerDefinitionFile+" from "+
filepath.Dir(currentpath))
}
packbuild, err := pkg.DefaultPackageFromYaml([]byte(dat))
if err != nil {
return errors.Wrap(err,
"Error reading yaml "+CompilerDefinitionFile+" from "+
filepath.Dir(currentpath))
}
pack.Requires(packbuild.GetRequires())
pack.Conflicts(packbuild.GetConflicts())
}
_, err = r.Database.CreatePackage(&pack)
if err != nil {
return errors.Wrap(err, "Error creating package "+pack.GetName())
}
case CollectionFile:
dat, err := ioutil.ReadFile(currentpath)
if err != nil {
return errors.Wrap(err, "Error reading file "+currentpath)
}
packs, err := pkg.DefaultPackagesFromYaml(dat)
if err != nil {
return errors.Wrap(err, "Error reading yaml "+currentpath)
}
packsRaw, err := pkg.GetRawPackages(dat)
for _, pack := range packs {
pack.SetPath(filepath.Dir(currentpath))
// Instead of rdeps, have a different tree for build deps.
compileDefPath := pack.Rel(CompilerDefinitionFile)
if helpers.Exists(compileDefPath) {
raw := packsRaw.Find(pack.GetName(), pack.GetCategory(), pack.GetVersion())
buildyaml, err := ioutil.ReadFile(compileDefPath)
if err != nil {
return errors.Wrap(err, "Error reading file "+currentpath)
}
dat, err := helpers.RenderHelm(string(buildyaml), raw, map[string]interface{}{})
if err != nil {
return errors.Wrap(err,
"Error templating file "+CompilerDefinitionFile+" from "+
filepath.Dir(currentpath))
}
packbuild, err := pkg.DefaultPackageFromYaml([]byte(dat))
if err != nil {
return errors.Wrap(err,
"Error reading yaml "+CompilerDefinitionFile+" from "+
filepath.Dir(currentpath))
}
pack.Requires(packbuild.GetRequires())
pack.Conflicts(packbuild.GetConflicts())
}
_, err = r.Database.CreatePackage(&pack)
if err != nil {
return errors.Wrap(err, "Error creating package "+pack.GetName())
}
}
packbuild, err := pkg.DefaultPackageFromYaml(dat)
if err != nil {
return errors.Wrap(err,
"Error reading yaml "+CompilerDefinitionFile+" from "+
filepath.Dir(currentpath))
}
pack.Requires(packbuild.GetRequires())
pack.Conflicts(packbuild.GetConflicts())
}
_, err = r.Database.CreatePackage(&pack)
if err != nil {
return errors.Wrap(err, "Error creating package "+pack.GetName())
}
return nil

View File

@@ -85,7 +85,7 @@ func (r *InstallerRecipe) Load(path string) error {
// the function that handles each file or dir
var ff = func(currentpath string, info os.FileInfo, err error) error {
if info.Name() != DefinitionFile {
if info.Name() != DefinitionFile && info.Name() != CollectionFile {
return nil // Skip with no errors
}
@@ -93,16 +93,35 @@ func (r *InstallerRecipe) Load(path string) error {
if err != nil {
return errors.Wrap(err, "Error reading file "+currentpath)
}
pack, err := pkg.DefaultPackageFromYaml(dat)
if err != nil {
return errors.Wrap(err, "Error reading yaml "+currentpath)
}
// Path is set only internally when tree is loaded from disk
pack.SetPath(filepath.Dir(currentpath))
_, err = r.Database.CreatePackage(&pack)
if err != nil {
return errors.Wrap(err, "Error creating package "+pack.GetName())
switch info.Name() {
case DefinitionFile:
pack, err := pkg.DefaultPackageFromYaml(dat)
if err != nil {
return errors.Wrap(err, "Error reading yaml "+currentpath)
}
// Path is set only internally when tree is loaded from disk
pack.SetPath(filepath.Dir(currentpath))
_, err = r.Database.CreatePackage(&pack)
if err != nil {
return errors.Wrap(err, "Error creating package "+pack.GetName())
}
case CollectionFile:
packs, err := pkg.DefaultPackagesFromYaml(dat)
if err != nil {
return errors.Wrap(err, "Error reading yaml "+currentpath)
}
for _, p := range packs {
// Path is set only internally when tree is loaded from disk
p.SetPath(filepath.Dir(currentpath))
_, err = r.Database.CreatePackage(&p)
if err != nil {
return errors.Wrap(err, "Error creating package "+p.GetName())
}
}
}
return nil

View File

@@ -34,6 +34,7 @@ import (
const (
DefinitionFile = "definition.yaml"
CollectionFile = "collection.yaml"
)
func NewGeneralRecipe(db pkg.PackageDatabase) Builder { return &Recipe{Database: db} }
@@ -94,7 +95,7 @@ func (r *Recipe) Load(path string) error {
// the function that handles each file or dir
var ff = func(currentpath string, info os.FileInfo, err error) error {
if info.Name() != DefinitionFile {
if info.Name() != DefinitionFile && info.Name() != CollectionFile {
return nil // Skip with no errors
}
@@ -102,16 +103,34 @@ func (r *Recipe) Load(path string) error {
if err != nil {
return errors.Wrap(err, "Error reading file "+currentpath)
}
pack, err := pkg.DefaultPackageFromYaml(dat)
if err != nil {
return errors.Wrap(err, "Error reading yaml "+currentpath)
}
// Path is set only internally when tree is loaded from disk
pack.SetPath(filepath.Dir(currentpath))
_, err = r.Database.CreatePackage(&pack)
if err != nil {
return errors.Wrap(err, "Error creating package "+pack.GetName())
switch info.Name() {
case DefinitionFile:
pack, err := pkg.DefaultPackageFromYaml(dat)
if err != nil {
return errors.Wrap(err, "Error reading yaml "+currentpath)
}
// Path is set only internally when tree is loaded from disk
pack.SetPath(filepath.Dir(currentpath))
_, err = r.Database.CreatePackage(&pack)
if err != nil {
return errors.Wrap(err, "Error creating package "+pack.GetName())
}
case CollectionFile:
packs, err := pkg.DefaultPackagesFromYaml(dat)
if err != nil {
return errors.Wrap(err, "Error reading yaml "+currentpath)
}
for _, p := range packs {
// Path is set only internally when tree is loaded from disk
p.SetPath(filepath.Dir(currentpath))
_, err = r.Database.CreatePackage(&p)
if err != nil {
return errors.Wrap(err, "Error creating package "+p.GetName())
}
}
}
return nil

View File

@@ -1,136 +0,0 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
// Recipe is a builder imeplementation.
// It reads a Tree and spit it in human readable form (YAML), called recipe,
// It also loads a tree (recipe) from a YAML (to a db, e.g. BoltDB), allowing to query it
// with the solver, using the package object.
package tree_test
import (
"io/ioutil"
"os"
pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver"
gentoo "github.com/mudler/luet/pkg/tree/builder/gentoo"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
. "github.com/mudler/luet/pkg/tree"
)
type FakeParser struct {
}
var _ = Describe("Recipe", func() {
for _, dbType := range []gentoo.MemoryDB{gentoo.InMemory, gentoo.BoltDB} {
Context("Tree generation and storing", func() {
It("parses and writes a tree", func() {
tmpdir, err := ioutil.TempDir("", "tree")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
gb := gentoo.NewGentooBuilder(&gentoo.SimpleEbuildParser{}, 20, dbType)
tree, err := gb.Generate("../../tests/fixtures/overlay")
Expect(err).ToNot(HaveOccurred())
defer func() {
Expect(tree.Clean()).ToNot(HaveOccurred())
}()
Expect(len(tree.GetPackages())).To(Equal(10))
generalRecipe := NewGeneralRecipe(tree)
err = generalRecipe.Save(tmpdir)
Expect(err).ToNot(HaveOccurred())
})
})
Context("Reloading trees", func() {
It("writes and reads back the same tree", func() {
tmpdir, err := ioutil.TempDir("", "tree")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
gb := gentoo.NewGentooBuilder(&gentoo.SimpleEbuildParser{}, 20, dbType)
tree, err := gb.Generate("../../tests/fixtures/overlay")
Expect(err).ToNot(HaveOccurred())
defer func() {
Expect(tree.Clean()).ToNot(HaveOccurred())
}()
Expect(len(tree.GetPackages())).To(Equal(10))
generalRecipe := NewGeneralRecipe(tree)
err = generalRecipe.Save(tmpdir)
Expect(err).ToNot(HaveOccurred())
db := pkg.NewInMemoryDatabase(false)
generalRecipe = NewGeneralRecipe(db)
generalRecipe.WithDatabase(nil)
Expect(generalRecipe.GetDatabase()).To(BeNil())
err = generalRecipe.Load(tmpdir)
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(10))
for _, p := range tree.World() {
Expect(p.GetName()).To(ContainSubstring("pinentry"))
}
})
})
Context("Simple solving with the fixture tree", func() {
It("writes and reads back the same tree", func() {
tmpdir, err := ioutil.TempDir("", "tree")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
gb := gentoo.NewGentooBuilder(&gentoo.SimpleEbuildParser{}, 20, dbType)
tree, err := gb.Generate("../../tests/fixtures/overlay")
Expect(err).ToNot(HaveOccurred())
defer func() {
Expect(tree.Clean()).ToNot(HaveOccurred())
}()
Expect(len(tree.GetPackages())).To(Equal(10))
pack, err := tree.FindPackage(&pkg.DefaultPackage{
Name: "pinentry",
Version: "1.0.0-r2",
Category: "app-crypt",
}) // Note: the definition depends on pinentry-base without an explicit version
Expect(err).ToNot(HaveOccurred())
s := solver.NewSolver(pkg.NewInMemoryDatabase(false), tree, tree)
solution, err := s.Install([]pkg.Package{pack})
Expect(err).ToNot(HaveOccurred())
Expect(len(solution)).To(Equal(33))
var allSol string
for _, sol := range solution {
allSol = allSol + "\n" + sol.ToString()
}
Expect(allSol).To(ContainSubstring("app-crypt/pinentry-base 1.0.0 installed"))
Expect(allSol).To(ContainSubstring("app-crypt/pinentry 1.1.0-r2 not installed"))
Expect(allSol).To(ContainSubstring("app-crypt/pinentry 1.0.0-r2 installed"))
})
})
}
})

View File

@@ -59,7 +59,7 @@ var _ = Describe("Tree", func() {
Expect(len(CfromD.GetRequires()) != 0).To(BeTrue())
Expect(CfromD.GetRequires()[0].GetName()).To(Equal("b"))
s := solver.NewSolver(pkg.NewInMemoryDatabase(false), generalRecipe.GetDatabase(), db)
s := solver.NewSolver(solver.Options{Type: solver.SingleCoreSimple}, pkg.NewInMemoryDatabase(false), generalRecipe.GetDatabase(), db)
pack, err := generalRecipe.GetDatabase().FindPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -69,31 +69,25 @@ var _ = Describe("Tree", func() {
solution, err = solution.Order(generalRecipe.GetDatabase(), pack.GetFingerPrint())
Expect(err).ToNot(HaveOccurred())
Expect(solution[0].Package.GetName()).To(Equal("a"))
Expect(solution[0].Value).To(BeFalse())
Expect(solution[0].Package.GetName()).To(Equal("b"))
Expect(solution[0].Value).To(BeTrue())
Expect(solution[1].Package.GetName()).To(Equal("b"))
Expect(solution[1].Package.GetName()).To(Equal("c"))
Expect(solution[1].Value).To(BeTrue())
Expect(solution[2].Package.GetName()).To(Equal("c"))
Expect(solution[2].Package.GetName()).To(Equal("d"))
Expect(solution[2].Value).To(BeTrue())
Expect(solution[3].Package.GetName()).To(Equal("d"))
Expect(solution[3].Value).To(BeTrue())
Expect(len(solution)).To(Equal(4))
Expect(len(solution)).To(Equal(3))
newsolution := solution.Drop(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"})
Expect(len(newsolution)).To(Equal(3))
Expect(len(newsolution)).To(Equal(2))
Expect(newsolution[0].Package.GetName()).To(Equal("a"))
Expect(newsolution[0].Value).To(BeFalse())
Expect(newsolution[0].Package.GetName()).To(Equal("b"))
Expect(newsolution[0].Value).To(BeTrue())
Expect(newsolution[1].Package.GetName()).To(Equal("b"))
Expect(newsolution[1].Package.GetName()).To(Equal("c"))
Expect(newsolution[1].Value).To(BeTrue())
Expect(newsolution[2].Package.GetName()).To(Equal("c"))
Expect(newsolution[2].Value).To(BeTrue())
}
})
})
@@ -131,7 +125,7 @@ var _ = Describe("Tree", func() {
Expect(len(CfromD.GetRequires()) != 0).To(BeTrue())
Expect(CfromD.GetRequires()[0].GetName()).To(Equal("b"))
s := solver.NewSolver(pkg.NewInMemoryDatabase(false), generalRecipe.GetDatabase(), db)
s := solver.NewSolver(solver.Options{Type: solver.SingleCoreSimple}, pkg.NewInMemoryDatabase(false), generalRecipe.GetDatabase(), db)
Dd, err := generalRecipe.GetDatabase().FindPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
@@ -146,11 +140,11 @@ var _ = Describe("Tree", func() {
base, err := generalRecipe.GetDatabase().FindPackage(&pkg.DefaultPackage{Name: "base", Category: "layer", Version: "0.2"})
Expect(err).ToNot(HaveOccurred())
Expect(solution).To(ContainElement(solver.PackageAssert{Package: pack.(*pkg.DefaultPackage), Value: false}))
Expect(solution).ToNot(ContainElement(solver.PackageAssert{Package: pack.(*pkg.DefaultPackage), Value: true}))
Expect(solution).To(ContainElement(solver.PackageAssert{Package: D.(*pkg.DefaultPackage), Value: true}))
Expect(solution).To(ContainElement(solver.PackageAssert{Package: extra.(*pkg.DefaultPackage), Value: false}))
Expect(solution).To(ContainElement(solver.PackageAssert{Package: base.(*pkg.DefaultPackage), Value: false}))
Expect(len(solution)).To(Equal(6))
Expect(solution).ToNot(ContainElement(solver.PackageAssert{Package: extra.(*pkg.DefaultPackage), Value: true}))
Expect(solution).ToNot(ContainElement(solver.PackageAssert{Package: base.(*pkg.DefaultPackage), Value: true}))
Expect(len(solution)).To(Equal(3))
}
})
})

View File

@@ -0,0 +1,7 @@
image: quay.io/mocaccino/extra
steps:
- touch /{{.Values.name}}
- touch /build-extra-{{.Values.foo}}
- touch /{{.Values.name}}-{{.Values.bb}}
unpack: true

View File

@@ -0,0 +1,13 @@
packages:
- name: "a"
category: "distro"
version: "0.1"
foo: "baz"
- name: "b"
category: "distro"
version: "0.3"
foo: "f"
- name: "c"
category: "distro"
version: "0.3"
foo: "bar"

View File

@@ -0,0 +1,2 @@
install:
- touch /finalize-{{.Values.name}}

View File

@@ -0,0 +1,4 @@
image: quay.io/mocaccino/extra
steps:
- touch /{{.Values.name}}
- touch /{{.Values.name}}-{{.Values.bb}}

View File

@@ -0,0 +1,3 @@
name: foo
category: test
version: "1.1"

6
tests/fixtures/collections/build.yaml vendored Normal file
View File

@@ -0,0 +1,6 @@
image: quay.io/mocaccino/extra
steps:
- touch /{{.Values.name}}
- touch /build-extra-{{.Values.foo}}
unpack: true

View File

@@ -0,0 +1,13 @@
packages:
- name: "a"
category: "distro"
version: "0.1"
foo: "baz"
- name: "b"
category: "distro"
version: "0.3"
foo: "f"
- name: "c"
category: "distro"
version: "0.3"
foo: "bar"

View File

@@ -0,0 +1,2 @@
install:
- touch /finalize-{{.Values.name}}

14
tests/fixtures/excludeimage/build.yaml vendored Normal file
View File

@@ -0,0 +1,14 @@
requires:
- category: "layer"
name: "seed"
version: "1.0"
prelude:
- echo foo > /test
- echo bar > /test2
steps:
- echo artifact5 > /test5
- echo artifact6 > /test6
- echo artifact43 > /marvin
unpack: true
excludes:
- marvin

View File

@@ -0,0 +1,3 @@
category: "test"
name: "b"
version: "1.0"

View File

@@ -0,0 +1,2 @@
image: alpine
unpack: true

View File

@@ -0,0 +1,3 @@
category: "layer"
name: "seed"
version: "1.0"

View File

@@ -0,0 +1,17 @@
requires:
- category: "layer"
name: "seed"
version: "1.0"
prelude:
- echo foo > /test
- echo bar > /test2
steps:
- echo artifact5 > /test5
- echo artifact6 > /test6
- echo artifact43 > /marvin
unpack: true
excludes:
- marvin
includes:
- test.*
- mar.*

View File

@@ -0,0 +1,3 @@
category: "test"
name: "b"
version: "1.0"

View File

@@ -0,0 +1,2 @@
image: alpine
unpack: true

View File

@@ -0,0 +1,3 @@
category: "layer"
name: "seed"
version: "1.0"

11
tests/fixtures/excludes/build.yaml vendored Normal file
View File

@@ -0,0 +1,11 @@
image: "alpine"
prelude:
- echo foo > /test
- echo bar > /test2
steps:
- echo artifact5 > /test5
- echo artifact6 > /test6
- echo artifact43 > /marvin
- echo "foo" > /marvot
excludes:
- marvot

View File

@@ -0,0 +1,3 @@
category: "test"
name: "b"
version: "1.0"

View File

@@ -0,0 +1,14 @@
image: "alpine"
prelude:
- echo foo > /test
- echo bar > /test2
steps:
- echo artifact5 > /test5
- echo artifact6 > /test6
- echo artifact43 > /marvin
- echo "foo" > /marvot
excludes:
- marvot
includes:
- /test5
- mar.*

View File

@@ -0,0 +1,3 @@
category: "test"
name: "b"
version: "1.0"

3
tests/fixtures/plugin/test-foo vendored Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/bash
echo "$1" >> $EVENT_FILE
echo "$2" >> $PAYLOAD_FILE

View File

@@ -0,0 +1,2 @@
image: "alpine"
unpack: true

View File

@@ -0,0 +1,3 @@
category: "seed"
name: "alpine"
version: "1.0"

View File

@@ -0,0 +1,2 @@
install:
- echo "{{.Values.name}}" > /tmp/foo

31
tests/helpers/package.go Normal file
View File

@@ -0,0 +1,31 @@
package helpers
import (
"math/rand"
"strconv"
"time"
pkg "github.com/mudler/luet/pkg/package"
)
const charset = "abcdefghijklmnopqrstuvwxyz" +
"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
var seededRand *rand.Rand = rand.New(
rand.NewSource(time.Now().UnixNano()))
func StringWithCharset(length int, charset string) string {
b := make([]byte, length)
for i := range b {
b[i] = charset[seededRand.Intn(len(charset))]
}
return string(b)
}
func String(length int) string {
return StringWithCharset(length, charset)
}
func RandomPackage() pkg.Package {
return pkg.NewPackage(String(5), strconv.Itoa(rand.Intn(100)), []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
}

View File

@@ -43,6 +43,7 @@ system:
rootfs: $tmpdir/testrootfs
database_path: "/"
database_engine: "boltdb"
config_from_host: true
repositories:
- name: "main"
type: "disk"
@@ -56,22 +57,22 @@ EOF
}
testInstall() {
luet install --config $tmpdir/luet.yaml test/c
#luet install --config $tmpdir/luet.yaml test/c-1.0 > /dev/null
luet install -y --config $tmpdir/luet.yaml test/c
#luet install -y --config $tmpdir/luet.yaml test/c-1.0 > /dev/null
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
}
testReInstall() {
output=$(luet install --config $tmpdir/luet.yaml test/c-1.0)
output=$(luet install -y --config $tmpdir/luet.yaml test/c-1.0)
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertContains 'contains warning' "$output" 'Filtering out'
assertContains 'contains warning' "$output" 'No packages to install'
}
testUnInstall() {
luet uninstall --config $tmpdir/luet.yaml test/c
luet uninstall -y --config $tmpdir/luet.yaml test/c
installst=$?
assertEquals 'uninstall test successfully' "$installst" "0"
assertTrue 'package uninstalled' "[ ! -e '$tmpdir/testrootfs/c' ]"
@@ -79,10 +80,10 @@ testUnInstall() {
testInstallAgain() {
assertTrue 'package uninstalled' "[ ! -e '$tmpdir/testrootfs/c' ]"
output=$(luet install --config $tmpdir/luet.yaml test/c-1.0)
output=$(luet install -y --config $tmpdir/luet.yaml test/c-1.0)
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertNotContains 'contains warning' "$output" 'Filtering out'
assertNotContains 'contains warning' "$output" 'No packages to install'
assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
assertTrue 'package in cache' "[ -e '$tmpdir/testrootfs/packages/c-test-1.0.package.tar.gz' ]"
}

View File

@@ -48,6 +48,7 @@ system:
rootfs: $tmpdir/testrootfs
database_path: "/"
database_engine: "boltdb"
config_from_host: true
repositories:
- name: "main"
type: "disk"
@@ -61,22 +62,22 @@ EOF
}
testInstall() {
luet install --config $tmpdir/luet.yaml test/c-1.0
#luet install --config $tmpdir/luet.yaml test/c-1.0 > /dev/null
luet install -y --config $tmpdir/luet.yaml test/c-1.0
#luet install -y --config $tmpdir/luet.yaml test/c-1.0 > /dev/null
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
}
testReInstall() {
output=$(luet install --config $tmpdir/luet.yaml test/c-1.0)
output=$(luet install -y --config $tmpdir/luet.yaml test/c-1.0)
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertContains 'contains warning' "$output" 'Filtering out'
assertContains 'contains warning' "$output" 'No packages to install'
}
testUnInstall() {
luet uninstall --config $tmpdir/luet.yaml test/c-1.0
luet uninstall -y --config $tmpdir/luet.yaml test/c-1.0
installst=$?
assertEquals 'uninstall test successfully' "$installst" "0"
assertTrue 'package uninstalled' "[ ! -e '$tmpdir/testrootfs/c' ]"
@@ -84,10 +85,10 @@ testUnInstall() {
testInstallAgain() {
assertTrue 'package uninstalled' "[ ! -e '$tmpdir/testrootfs/c' ]"
output=$(luet install --config $tmpdir/luet.yaml test/c-1.0)
output=$(luet install -y --config $tmpdir/luet.yaml test/c-1.0)
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertNotContains 'contains warning' "$output" 'Filtering out'
assertNotContains 'contains warning' "$output" 'No packages to install'
assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
assertTrue 'package in cache' "[ -e '$tmpdir/testrootfs/packages/c-test-1.0.package.tar.gz' ]"
}

Some files were not shown because too many files have changed in this diff Show More