mirror of
https://github.com/mudler/luet.git
synced 2025-09-03 08:14:46 +00:00
Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
0a4fe57f33 | ||
|
ccf83b0d5f | ||
|
57cefc7d6b | ||
|
97ff647f07 | ||
|
b7ac1e03d5 | ||
|
ff092db97d | ||
|
2789f59f53 | ||
|
10ae872a3e | ||
|
65a55e242e | ||
|
77c4bf1fd1 | ||
|
4d6cccb2fa | ||
|
ec7be63418 | ||
|
33b1c63815 |
16
cmd/build.go
16
cmd/build.go
@@ -75,7 +75,7 @@ Build packages specifying multiple definition trees:
|
||||
viper.BindPFlag("compression", cmd.Flags().Lookup("compression"))
|
||||
viper.BindPFlag("nodeps", cmd.Flags().Lookup("nodeps"))
|
||||
viper.BindPFlag("onlydeps", cmd.Flags().Lookup("onlydeps"))
|
||||
viper.BindPFlag("values", cmd.Flags().Lookup("values"))
|
||||
util.BindValuesFlags(cmd)
|
||||
viper.BindPFlag("backend-args", cmd.Flags().Lookup("backend-args"))
|
||||
|
||||
viper.BindPFlag("image-repository", cmd.Flags().Lookup("image-repository"))
|
||||
@@ -101,7 +101,7 @@ Build packages specifying multiple definition trees:
|
||||
all := viper.GetBool("all")
|
||||
compressionType := viper.GetString("compression")
|
||||
imageRepository := viper.GetString("image-repository")
|
||||
values := viper.GetStringSlice("values")
|
||||
values := util.ValuesFlags()
|
||||
wait := viper.GetBool("wait")
|
||||
push := viper.GetBool("push")
|
||||
pull := viper.GetBool("pull")
|
||||
@@ -160,6 +160,17 @@ Build packages specifying multiple definition trees:
|
||||
opts.Options = solver.Options{Type: solver.SingleCoreSimple, Concurrency: concurrency}
|
||||
}
|
||||
|
||||
templateFolders := []string{}
|
||||
if !fromRepo {
|
||||
for _, t := range treePaths {
|
||||
templateFolders = append(templateFolders, filepath.Join(t, "templates"))
|
||||
}
|
||||
} else {
|
||||
for _, s := range installer.SystemRepositories(LuetCfg) {
|
||||
templateFolders = append(templateFolders, filepath.Join(s.TreePath, "templates"))
|
||||
}
|
||||
}
|
||||
|
||||
luetCompiler := compiler.NewLuetCompiler(compilerBackend, generalRecipe.GetDatabase(),
|
||||
options.NoDeps(nodeps),
|
||||
options.WithBackendType(backendType),
|
||||
@@ -168,6 +179,7 @@ Build packages specifying multiple definition trees:
|
||||
options.WithPullRepositories(pullRepo),
|
||||
options.WithPushRepository(imageRepository),
|
||||
options.Rebuild(rebuild),
|
||||
options.WithTemplateFolder(templateFolders),
|
||||
options.WithSolverOptions(*opts),
|
||||
options.Wait(wait),
|
||||
options.OnlyTarget(onlyTarget),
|
||||
|
@@ -41,7 +41,7 @@ var Verbose bool
|
||||
var LockedCommands = []string{"install", "uninstall", "upgrade"}
|
||||
|
||||
const (
|
||||
LuetCLIVersion = "0.17.4"
|
||||
LuetCLIVersion = "0.17.7"
|
||||
LuetEnvPrefix = "LUET"
|
||||
license = `
|
||||
Luet Copyright (C) 2019-2021 Ettore Di Giacinto
|
||||
|
@@ -23,6 +23,7 @@ import (
|
||||
//. "github.com/mudler/luet/pkg/config"
|
||||
"github.com/ghodss/yaml"
|
||||
helpers "github.com/mudler/luet/cmd/helpers"
|
||||
"github.com/mudler/luet/cmd/util"
|
||||
"github.com/mudler/luet/pkg/compiler"
|
||||
"github.com/mudler/luet/pkg/compiler/backend"
|
||||
"github.com/mudler/luet/pkg/compiler/types/options"
|
||||
@@ -50,6 +51,7 @@ func NewTreeImageCommand() *cobra.Command {
|
||||
if len(args) != 1 {
|
||||
Fatal("Expects one package as parameter")
|
||||
}
|
||||
util.BindValuesFlags(cmd)
|
||||
viper.BindPFlag("image-repository", cmd.Flags().Lookup("image-repository"))
|
||||
|
||||
},
|
||||
@@ -59,6 +61,7 @@ func NewTreeImageCommand() *cobra.Command {
|
||||
treePath, _ := cmd.Flags().GetStringArray("tree")
|
||||
imageRepository := viper.GetString("image-repository")
|
||||
pullRepo, _ := cmd.Flags().GetStringArray("pull-repository")
|
||||
values := util.ValuesFlags()
|
||||
|
||||
out, _ := cmd.Flags().GetString("output")
|
||||
if out != "terminal" {
|
||||
@@ -80,6 +83,7 @@ func NewTreeImageCommand() *cobra.Command {
|
||||
luetCompiler := compiler.NewLuetCompiler(
|
||||
compilerBackend,
|
||||
reciper.GetDatabase(),
|
||||
options.WithBuildValues(values),
|
||||
options.WithPushRepository(imageRepository),
|
||||
options.WithPullRepositories(pullRepo),
|
||||
options.WithSolverOptions(opts),
|
||||
|
@@ -17,6 +17,7 @@ package util
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/mudler/luet/pkg/config"
|
||||
. "github.com/mudler/luet/pkg/config"
|
||||
@@ -35,6 +36,14 @@ func BindSolverFlags(cmd *cobra.Command) {
|
||||
LuetCfg.Viper.BindPFlag("solver.max_attempts", cmd.Flags().Lookup("solver-attempts"))
|
||||
}
|
||||
|
||||
func BindValuesFlags(cmd *cobra.Command) {
|
||||
viper.BindPFlag("values", cmd.Flags().Lookup("values"))
|
||||
}
|
||||
|
||||
func ValuesFlags() []string {
|
||||
return viper.GetStringSlice("values")
|
||||
}
|
||||
|
||||
func SetSystemConfig() {
|
||||
dbpath := LuetCfg.Viper.GetString("system.database_path")
|
||||
rootfs := LuetCfg.Viper.GetString("system.rootfs")
|
||||
|
2
go.mod
2
go.mod
@@ -10,7 +10,7 @@ require (
|
||||
github.com/briandowns/spinner v1.12.1-0.20201220203425-e201aaea0a31
|
||||
github.com/cavaliercoder/grab v1.0.1-0.20201108051000-98a5bfe305ec
|
||||
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc
|
||||
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765
|
||||
github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38
|
||||
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible
|
||||
github.com/docker/distribution v2.7.1+incompatible
|
||||
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible
|
||||
|
2
go.sum
2
go.sum
@@ -339,6 +339,8 @@ github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7Do
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765 h1:EO5Dm7O50aaEwv1GTFWNLAj7vNQ1OjW3+DeJCy1vTMk=
|
||||
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765/go.mod h1:S91tHga1PCZzYhCkStwZAhvp1rCc+zqtSi55I+vDWGc=
|
||||
github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38 h1:sOsE90v/iPmouqh4d3cKQl8JuqTOEXg+fP+7YuY7R6U=
|
||||
github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38/go.mod h1:S91tHga1PCZzYhCkStwZAhvp1rCc+zqtSi55I+vDWGc=
|
||||
github.com/cyphar/filepath-securejoin v0.2.2 h1:jCwT2GTP+PY5nBz3c/YL5PAIbusElVrPujOBSCj8xRg=
|
||||
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
|
||||
github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
|
@@ -42,6 +42,7 @@ import (
|
||||
"github.com/mudler/luet/pkg/solver"
|
||||
"github.com/pkg/errors"
|
||||
"gopkg.in/yaml.v2"
|
||||
"helm.sh/helm/v3/pkg/chart"
|
||||
)
|
||||
|
||||
const BuildFile = "build.yaml"
|
||||
@@ -750,13 +751,13 @@ func (cs *LuetCompiler) inheritSpecBuildOptions(p *compilerspec.LuetCompilationS
|
||||
p.BuildOptions.PullImageRepository = append(p.BuildOptions.PullImageRepository, cs.Options.PullImageRepository...)
|
||||
Debug("Inheriting pull repository from PullImageRepository buildoptions", p.BuildOptions.PullImageRepository)
|
||||
}
|
||||
|
||||
Debug(p.GetPackage().HumanReadableString(), "Build options after inherit", p.BuildOptions)
|
||||
}
|
||||
|
||||
func (cs *LuetCompiler) getSpecHash(pkgs pkg.DefaultPackages, salt string) (string, error) {
|
||||
ht := NewHashTree(cs.Database)
|
||||
overallFp := ""
|
||||
|
||||
for _, p := range pkgs {
|
||||
compileSpec, err := cs.FromPackage(p)
|
||||
if err != nil {
|
||||
@@ -1128,6 +1129,14 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, generateF
|
||||
type templatedata map[string]interface{}
|
||||
|
||||
func (cs *LuetCompiler) templatePackage(vals []map[string]interface{}, pack pkg.Package, dst templatedata) ([]byte, error) {
|
||||
// Grab shared templates first
|
||||
var chartFiles []*chart.File
|
||||
if len(cs.Options.TemplatesFolder) != 0 {
|
||||
c, err := helpers.ChartFiles(cs.Options.TemplatesFolder)
|
||||
if err == nil {
|
||||
chartFiles = c
|
||||
}
|
||||
}
|
||||
|
||||
var dataresult []byte
|
||||
val := pack.Rel(DefinitionFile)
|
||||
@@ -1165,7 +1174,7 @@ func (cs *LuetCompiler) templatePackage(vals []map[string]interface{}, pack pkg.
|
||||
return nil, errors.Wrap(err, "merging values maps")
|
||||
}
|
||||
|
||||
dat, err := helpers.RenderHelm(string(dataBuild), td, dst)
|
||||
dat, err := helpers.RenderHelm(append(chartFiles, helpers.ChartFileB(dataBuild)...), td, dst)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "rendering file "+pack.Rel(BuildFile))
|
||||
}
|
||||
@@ -1190,7 +1199,13 @@ func (cs *LuetCompiler) templatePackage(vals []map[string]interface{}, pack pkg.
|
||||
bv = append([]string{f}, bv...)
|
||||
}
|
||||
}
|
||||
out, err := helpers.RenderFiles(pack.Rel(BuildFile), val, bv...)
|
||||
|
||||
raw, err := ioutil.ReadFile(pack.Rel(BuildFile))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
out, err := helpers.RenderFiles(append(chartFiles, helpers.ChartFileB(raw)...), val, bv...)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "rendering file "+pack.Rel(BuildFile))
|
||||
}
|
||||
|
@@ -43,6 +43,9 @@ type Compiler struct {
|
||||
BackendArgs []string
|
||||
|
||||
BackendType string
|
||||
|
||||
// TemplatesFolder. should default to tree/templates
|
||||
TemplatesFolder []string
|
||||
}
|
||||
|
||||
func NewDefaultCompiler() *Compiler {
|
||||
@@ -87,6 +90,13 @@ func WithBackendType(r string) func(cfg *Compiler) error {
|
||||
}
|
||||
}
|
||||
|
||||
func WithTemplateFolder(r []string) func(cfg *Compiler) error {
|
||||
return func(cfg *Compiler) error {
|
||||
cfg.TemplatesFolder = r
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func WithBuildValues(r []string) func(cfg *Compiler) error {
|
||||
return func(cfg *Compiler) error {
|
||||
cfg.BuildValuesFile = r
|
||||
|
@@ -17,7 +17,6 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
@@ -27,6 +26,10 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
fileHelper "github.com/mudler/luet/pkg/helpers/file"
|
||||
|
||||
pkg "github.com/mudler/luet/pkg/package"
|
||||
solver "github.com/mudler/luet/pkg/solver"
|
||||
|
||||
@@ -91,7 +94,7 @@ func (opts LuetSolverOptions) Resolver() solver.PackageResolver {
|
||||
return solver.SimpleQLearningSolver()
|
||||
}
|
||||
|
||||
return &solver.DummyPackageResolver{}
|
||||
return &solver.Explainer{}
|
||||
}
|
||||
|
||||
func (opts *LuetSolverOptions) CompactString() string {
|
||||
@@ -108,15 +111,12 @@ type LuetSystemConfig struct {
|
||||
}
|
||||
|
||||
func (s *LuetSystemConfig) SetRootFS(path string) error {
|
||||
pathToSet := path
|
||||
if !filepath.IsAbs(path) {
|
||||
abs, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pathToSet = abs
|
||||
p, err := fileHelper.Rel2Abs(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.Rootfs = pathToSet
|
||||
|
||||
s.Rootfs = p
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -418,6 +418,14 @@ system:
|
||||
}
|
||||
|
||||
func (c *LuetSystemConfig) InitTmpDir() error {
|
||||
if !filepath.IsAbs(c.TmpDirBase) {
|
||||
abs, err := fileHelper.Rel2Abs(c.TmpDirBase)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "while converting relative path to absolute path")
|
||||
}
|
||||
c.TmpDirBase = abs
|
||||
}
|
||||
|
||||
if _, err := os.Stat(c.TmpDirBase); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
err = os.MkdirAll(c.TmpDirBase, os.ModePerm)
|
||||
|
@@ -287,3 +287,15 @@ func CopyDir(src string, dst string) (err error) {
|
||||
Sync: true,
|
||||
OnSymlink: func(string) copy.SymlinkAction { return copy.Shallow }})
|
||||
}
|
||||
|
||||
func Rel2Abs(s string) (string, error) {
|
||||
pathToSet := s
|
||||
if !filepath.IsAbs(s) {
|
||||
abs, err := filepath.Abs(s)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
pathToSet = abs
|
||||
}
|
||||
return pathToSet, nil
|
||||
}
|
||||
|
@@ -2,6 +2,8 @@ package helpers
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
fileHelper "github.com/mudler/luet/pkg/helpers/file"
|
||||
|
||||
@@ -13,17 +15,86 @@ import (
|
||||
"helm.sh/helm/v3/pkg/engine"
|
||||
)
|
||||
|
||||
// ChartFileB is an helper that takes a slice of bytes and construct a chart.File slice from it
|
||||
func ChartFileB(s []byte) []*chart.File {
|
||||
return []*chart.File{
|
||||
{Name: "templates", Data: s},
|
||||
}
|
||||
}
|
||||
|
||||
// ChartFileS is an helper that takes a string and construct a chart.File slice from it
|
||||
func ChartFileS(s string) []*chart.File {
|
||||
return []*chart.File{
|
||||
{Name: "templates", Data: []byte(s)},
|
||||
}
|
||||
}
|
||||
|
||||
// ChartFile reads all the given files and returns a slice of []*chart.File
|
||||
// containing the raw content and the file name for each file
|
||||
func ChartFile(s ...string) []*chart.File {
|
||||
files := []*chart.File{}
|
||||
for _, c := range s {
|
||||
raw, err := ioutil.ReadFile(c)
|
||||
if err != nil {
|
||||
return files
|
||||
}
|
||||
files = append(files, &chart.File{Name: c, Data: raw})
|
||||
}
|
||||
|
||||
return files
|
||||
}
|
||||
|
||||
// ChartFiles reads a list of paths and reads all yaml file inside. It returns a
|
||||
// slice of pointers of chart.File(s) with the raw content of the yaml
|
||||
func ChartFiles(path []string) ([]*chart.File, error) {
|
||||
var chartFiles []*chart.File
|
||||
for _, t := range path {
|
||||
rel, err := fileHelper.Rel2Abs(t)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !fileHelper.Exists(rel) {
|
||||
continue
|
||||
}
|
||||
files, err := fileHelper.ListDir(rel)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, f := range files {
|
||||
if strings.ToLower(filepath.Ext(f)) == ".yaml" {
|
||||
raw, err := ioutil.ReadFile(f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
chartFiles = append(chartFiles, &chart.File{Name: f, Data: raw})
|
||||
}
|
||||
}
|
||||
}
|
||||
return chartFiles, nil
|
||||
}
|
||||
|
||||
// RenderHelm renders the template string with helm
|
||||
func RenderHelm(template string, values, d map[string]interface{}) (string, error) {
|
||||
func RenderHelm(files []*chart.File, values, d map[string]interface{}) (string, error) {
|
||||
|
||||
// We slurp all the files into one here. This is not elegant, but still works.
|
||||
// As a reminder, the files passed here have on the head the templates in the 'templates/' folder
|
||||
// of each luet tree, and it have at the bottom the package buildpsec to be templated.
|
||||
// TODO: Replace by correctly populating the files so that the helm render engine templates it
|
||||
// correctly
|
||||
toTemplate := ""
|
||||
for _, f := range files {
|
||||
toTemplate += string(f.Data)
|
||||
}
|
||||
|
||||
c := &chart.Chart{
|
||||
Metadata: &chart.Metadata{
|
||||
Name: "",
|
||||
Version: "",
|
||||
},
|
||||
Templates: []*chart.File{
|
||||
{Name: "templates", Data: []byte(template)},
|
||||
},
|
||||
Values: map[string]interface{}{"Values": values},
|
||||
Templates: ChartFileS(toTemplate),
|
||||
Values: map[string]interface{}{"Values": values},
|
||||
}
|
||||
|
||||
v, err := chartutil.CoalesceValues(c, map[string]interface{}{"Values": d})
|
||||
@@ -71,23 +142,18 @@ func reverse(s []string) []string {
|
||||
return s
|
||||
}
|
||||
|
||||
func RenderFiles(toTemplate, valuesFile string, defaultFile ...string) (string, error) {
|
||||
raw, err := ioutil.ReadFile(toTemplate)
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "reading file "+toTemplate)
|
||||
}
|
||||
|
||||
func RenderFiles(files []*chart.File, valuesFile string, defaultFile ...string) (string, error) {
|
||||
if !fileHelper.Exists(valuesFile) {
|
||||
return "", errors.Wrap(err, "file not existing "+valuesFile)
|
||||
return "", errors.New("file does not exist: " + valuesFile)
|
||||
}
|
||||
val, err := ioutil.ReadFile(valuesFile)
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "reading file "+valuesFile)
|
||||
return "", errors.Wrap(err, "reading file: "+valuesFile)
|
||||
}
|
||||
|
||||
var values templatedata
|
||||
if err = yaml.Unmarshal(val, &values); err != nil {
|
||||
return "", errors.Wrap(err, "unmarshalling file "+toTemplate)
|
||||
return "", errors.Wrap(err, "unmarshalling values")
|
||||
}
|
||||
|
||||
dst, err := UnMarshalValues(defaultFile)
|
||||
@@ -95,5 +161,5 @@ func RenderFiles(toTemplate, valuesFile string, defaultFile ...string) (string,
|
||||
return "", errors.Wrap(err, "unmarshalling values")
|
||||
}
|
||||
|
||||
return RenderHelm(string(raw), values, dst)
|
||||
return RenderHelm(files, values, dst)
|
||||
}
|
||||
|
@@ -30,21 +30,21 @@ func writeFile(path string, content string) {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
}
|
||||
|
||||
var _ = Describe("Helpers", func() {
|
||||
var _ = Describe("Helm", func() {
|
||||
Context("RenderHelm", func() {
|
||||
It("Renders templates", func() {
|
||||
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo"}, map[string]interface{}{"Bar": "bar"})
|
||||
out, err := RenderHelm(ChartFileS("{{.Values.Test}}{{.Values.Bar}}"), map[string]interface{}{"Test": "foo"}, map[string]interface{}{"Bar": "bar"})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(out).To(Equal("foobar"))
|
||||
})
|
||||
It("Renders templates with overrides", func() {
|
||||
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo", "Bar": "baz"}, map[string]interface{}{"Bar": "bar"})
|
||||
out, err := RenderHelm(ChartFileS("{{.Values.Test}}{{.Values.Bar}}"), map[string]interface{}{"Test": "foo", "Bar": "baz"}, map[string]interface{}{"Bar": "bar"})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(out).To(Equal("foobar"))
|
||||
})
|
||||
|
||||
It("Renders templates", func() {
|
||||
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo", "Bar": "bar"}, map[string]interface{}{})
|
||||
out, err := RenderHelm(ChartFileS("{{.Values.Test}}{{.Values.Bar}}"), map[string]interface{}{"Test": "foo", "Bar": "bar"}, map[string]interface{}{})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(out).To(Equal("foobar"))
|
||||
})
|
||||
@@ -68,7 +68,7 @@ foo: "baz"
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
res, err := RenderFiles(toTemplate, values, d)
|
||||
res, err := RenderFiles(ChartFile(toTemplate), values, d)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(res).To(Equal("baz"))
|
||||
|
||||
@@ -93,7 +93,7 @@ faa: "baz"
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
res, err := RenderFiles(toTemplate, values, d)
|
||||
res, err := RenderFiles(ChartFile(toTemplate), values, d)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(res).To(Equal("bar"))
|
||||
|
||||
@@ -114,7 +114,7 @@ foo: "bar"
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
res, err := RenderFiles(toTemplate, values)
|
||||
res, err := RenderFiles(ChartFile(toTemplate), values)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(res).To(Equal("bar"))
|
||||
})
|
||||
@@ -145,11 +145,11 @@ bar: "nei"
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
res, err := RenderFiles(toTemplate, values, d2, d)
|
||||
res, err := RenderFiles(ChartFile(toTemplate), values, d2, d)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(res).To(Equal("bazneif"))
|
||||
|
||||
res, err = RenderFiles(toTemplate, values, d, d2)
|
||||
res, err = RenderFiles(ChartFile(toTemplate), values, d, d2)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(res).To(Equal("doneif"))
|
||||
})
|
||||
@@ -173,7 +173,7 @@ faa: "baz"
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
res, err := RenderFiles(toTemplate, values, d)
|
||||
res, err := RenderFiles(ChartFile(toTemplate), values, d)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(res).To(Equal(""))
|
||||
|
||||
|
@@ -45,7 +45,7 @@ func NewHttpClient(r RepoData) *HttpClient {
|
||||
}
|
||||
|
||||
func NewGrabClient() *grab.Client {
|
||||
httpTimeout := 30
|
||||
httpTimeout := 120
|
||||
timeout := os.Getenv("HTTP_TIMEOUT")
|
||||
if timeout != "" {
|
||||
timeoutI, err := strconv.Atoi(timeout)
|
||||
|
@@ -29,7 +29,7 @@ func (s *System) ExecuteFinalizers(packs []pkg.Package) error {
|
||||
executedFinalizer := map[string]bool{}
|
||||
for _, p := range packs {
|
||||
if fileHelper.Exists(p.Rel(tree.FinalizerFile)) {
|
||||
out, err := helpers.RenderFiles(p.Rel(tree.FinalizerFile), p.Rel(tree.DefinitionFile))
|
||||
out, err := helpers.RenderFiles(helpers.ChartFile(p.Rel(tree.FinalizerFile)), p.Rel(tree.DefinitionFile))
|
||||
if err != nil {
|
||||
Warning("Failed rendering finalizer for ", p.HumanReadableString(), err.Error())
|
||||
errs = multierror.Append(errs, err)
|
||||
|
@@ -143,6 +143,7 @@ func (db *InMemoryDatabase) getRevdeps(p Package, visited map[string]interface{}
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
|
||||
for _, pp := range packs {
|
||||
// db.Lock()
|
||||
list := db.RevDepsDatabase[pp.GetFingerPrint()]
|
||||
@@ -195,6 +196,14 @@ func (db *InMemoryDatabase) CreatePackage(p Package) (string, error) {
|
||||
return ID, nil
|
||||
}
|
||||
|
||||
func (db *InMemoryDatabase) updateRevDep(k, v string, b Package) {
|
||||
_, ok := db.RevDepsDatabase[k]
|
||||
if !ok {
|
||||
db.RevDepsDatabase[k] = make(map[string]Package)
|
||||
}
|
||||
db.RevDepsDatabase[k][v] = b.Clone()
|
||||
}
|
||||
|
||||
func (db *InMemoryDatabase) populateCaches(p Package) {
|
||||
pd, _ := p.(*DefaultPackage)
|
||||
|
||||
@@ -226,20 +235,19 @@ func (db *InMemoryDatabase) populateCaches(p Package) {
|
||||
}
|
||||
db.CacheNoVersion[p.GetPackageName()][p.GetVersion()] = nil
|
||||
|
||||
db.Unlock()
|
||||
|
||||
// Updating Revdeps
|
||||
// Given that when we populate the cache we don't have the full db at hand
|
||||
// We cycle over reverse dependency of a package to update their entry if they are matching
|
||||
// the version selector
|
||||
db.Lock()
|
||||
toUpdate, ok := db.RevDepsDatabase[pd.GetPackageName()]
|
||||
if ok {
|
||||
for _, pp := range toUpdate {
|
||||
for _, re := range pp.GetRequires() {
|
||||
if match, _ := pd.VersionMatchSelector(re.GetVersion(), nil); match {
|
||||
_, ok = db.RevDepsDatabase[pd.GetFingerPrint()]
|
||||
if !ok {
|
||||
db.RevDepsDatabase[pd.GetFingerPrint()] = make(map[string]Package)
|
||||
}
|
||||
db.RevDepsDatabase[pd.GetFingerPrint()][pp.GetFingerPrint()] = pp
|
||||
db.updateRevDep(pd.GetFingerPrint(), pp.GetFingerPrint(), pp)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -249,30 +257,12 @@ func (db *InMemoryDatabase) populateCaches(p Package) {
|
||||
for _, re := range pd.GetRequires() {
|
||||
packages, _ := db.FindPackages(re)
|
||||
db.Lock()
|
||||
|
||||
for _, pa := range packages {
|
||||
_, ok := db.RevDepsDatabase[pa.GetFingerPrint()]
|
||||
if !ok {
|
||||
db.RevDepsDatabase[pa.GetFingerPrint()] = make(map[string]Package)
|
||||
}
|
||||
db.RevDepsDatabase[pa.GetFingerPrint()][pd.GetFingerPrint()] = pd
|
||||
_, ok = db.RevDepsDatabase[pa.GetPackageName()]
|
||||
if !ok {
|
||||
db.RevDepsDatabase[pa.GetPackageName()] = make(map[string]Package)
|
||||
}
|
||||
db.RevDepsDatabase[pa.GetPackageName()][pd.GetPackageName()] = pd
|
||||
db.updateRevDep(pa.GetFingerPrint(), pd.GetFingerPrint(), pd)
|
||||
db.updateRevDep(pa.GetPackageName(), pd.GetPackageName(), pd)
|
||||
}
|
||||
_, ok := db.RevDepsDatabase[re.GetFingerPrint()]
|
||||
if !ok {
|
||||
db.RevDepsDatabase[re.GetFingerPrint()] = make(map[string]Package)
|
||||
}
|
||||
db.RevDepsDatabase[re.GetFingerPrint()][pd.GetFingerPrint()] = pd
|
||||
_, ok = db.RevDepsDatabase[re.GetPackageName()]
|
||||
if !ok {
|
||||
db.RevDepsDatabase[re.GetPackageName()] = make(map[string]Package)
|
||||
}
|
||||
db.RevDepsDatabase[re.GetPackageName()][pd.GetPackageName()] = pd
|
||||
|
||||
db.updateRevDep(re.GetFingerPrint(), pd.GetFingerPrint(), pd)
|
||||
db.updateRevDep(re.GetPackageName(), pd.GetPackageName(), pd)
|
||||
db.Unlock()
|
||||
}
|
||||
}
|
||||
|
@@ -136,6 +136,18 @@ type Packages []Package
|
||||
|
||||
type DefaultPackages []*DefaultPackage
|
||||
|
||||
type PackageMap map[string]Package
|
||||
|
||||
func (pm PackageMap) String() string {
|
||||
rr := []string{}
|
||||
for _, r := range pm {
|
||||
|
||||
rr = append(rr, r.HumanReadableString())
|
||||
|
||||
}
|
||||
return fmt.Sprint(rr)
|
||||
}
|
||||
|
||||
func (d DefaultPackages) Hash(salt string) string {
|
||||
|
||||
overallFp := ""
|
||||
|
@@ -1,4 +1,4 @@
|
||||
// Copyright © 2020 Ettore Di Giacinto <mudler@gentoo.org>
|
||||
// Copyright © 2020-2021 Ettore Di Giacinto <mudler@mocaccino.org>
|
||||
//
|
||||
// This program is free software; you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
@@ -16,11 +16,16 @@
|
||||
package solver
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/crillab/gophersat/bf"
|
||||
"github.com/crillab/gophersat/explain"
|
||||
"github.com/mudler/luet/pkg/helpers"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
@@ -51,18 +56,115 @@ const (
|
||||
QLearningResolverType = "qlearning"
|
||||
)
|
||||
|
||||
//. "github.com/mudler/luet/pkg/logger"
|
||||
|
||||
// PackageResolver assists PackageSolver on unsat cases
|
||||
type PackageResolver interface {
|
||||
Solve(bf.Formula, PackageSolver) (PackagesAssertions, error)
|
||||
}
|
||||
|
||||
type DummyPackageResolver struct {
|
||||
type Explainer struct{}
|
||||
|
||||
func decodeDimacs(vars map[string]string, dimacs string) (string, error) {
|
||||
res := ""
|
||||
sc := bufio.NewScanner(bytes.NewBufferString(dimacs))
|
||||
lines := strings.Split(dimacs, "\n")
|
||||
linenum := 1
|
||||
SCAN:
|
||||
for sc.Scan() {
|
||||
|
||||
line := sc.Text()
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) == 0 {
|
||||
continue
|
||||
}
|
||||
switch fields[0] {
|
||||
case "p":
|
||||
continue SCAN
|
||||
default:
|
||||
for i := 0; i < len(fields)-1; i++ {
|
||||
v := fields[i]
|
||||
negative := false
|
||||
if strings.HasPrefix(fields[i], "-") {
|
||||
v = strings.TrimLeft(fields[i], "-")
|
||||
negative = true
|
||||
}
|
||||
variable := vars[v]
|
||||
if negative {
|
||||
res += fmt.Sprintf("!(%s)", variable)
|
||||
} else {
|
||||
res += variable
|
||||
}
|
||||
|
||||
if i != len(fields)-2 {
|
||||
res += fmt.Sprintf(" or ")
|
||||
}
|
||||
}
|
||||
if linenum != len(lines)-1 {
|
||||
res += fmt.Sprintf(" and \n")
|
||||
}
|
||||
}
|
||||
linenum++
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
return res, fmt.Errorf("could not parse problem: %v", err)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (*DummyPackageResolver) Solve(bf.Formula, PackageSolver) (PackagesAssertions, error) {
|
||||
return nil, errors.New("Could not satisfy the constraints. Try again by removing deps ")
|
||||
func parseVars(r io.Reader) (map[string]string, error) {
|
||||
sc := bufio.NewScanner(r)
|
||||
res := map[string]string{}
|
||||
for sc.Scan() {
|
||||
line := sc.Text()
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) == 0 {
|
||||
continue
|
||||
}
|
||||
switch fields[0] {
|
||||
case "c":
|
||||
data := strings.Split(fields[1], "=")
|
||||
res[data[1]] = data[0]
|
||||
|
||||
default:
|
||||
continue
|
||||
|
||||
}
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
return nil, fmt.Errorf("could not parse problem: %v", err)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// Solve tries to find the MUS (minimum unsat) formula from the original problem.
|
||||
// it returns an error with the decoded dimacs
|
||||
func (*Explainer) Solve(f bf.Formula, s PackageSolver) (PackagesAssertions, error) {
|
||||
buf := bytes.NewBufferString("")
|
||||
if err := bf.Dimacs(f, buf); err != nil {
|
||||
return nil, errors.Wrap(err, "cannot extract dimacs from formula")
|
||||
}
|
||||
|
||||
copy := *buf
|
||||
|
||||
pb, err := explain.ParseCNF(©)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not parse problem")
|
||||
}
|
||||
pb2, err := pb.MUS()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not extract subset")
|
||||
}
|
||||
|
||||
variables, err := parseVars(buf)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not parse variables")
|
||||
}
|
||||
|
||||
res, err := decodeDimacs(variables, pb2.CNF())
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not parse dimacs")
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("could not satisfy the constraints: \n%s", res)
|
||||
}
|
||||
|
||||
type QLearningResolver struct {
|
||||
@@ -103,8 +205,8 @@ func (resolver *QLearningResolver) Solve(f bf.Formula, s PackageSolver) (Package
|
||||
// Info("Using QLearning solver to resolve conflicts. Please be patient.")
|
||||
resolver.Solver = s
|
||||
|
||||
s.SetResolver(&DummyPackageResolver{}) // Set dummy. Otherwise the attempts will run again a QLearning instance.
|
||||
defer s.SetResolver(resolver) // Set back ourselves as resolver
|
||||
s.SetResolver(&Explainer{}) // Set dummy. Otherwise the attempts will run again a QLearning instance.
|
||||
defer s.SetResolver(resolver) // Set back ourselves as resolver
|
||||
|
||||
resolver.Formula = f
|
||||
|
||||
|
@@ -38,7 +38,7 @@ var _ = Describe("Resolver", func() {
|
||||
})
|
||||
|
||||
Context("Conflict set", func() {
|
||||
Context("DummyPackageResolver", func() {
|
||||
Context("Explainer", func() {
|
||||
It("is unsolvable - as we something we ask to install conflict with system stuff", func() {
|
||||
C := pkg.NewPackage("C", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
|
||||
B := pkg.NewPackage("B", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{C})
|
||||
@@ -152,7 +152,7 @@ var _ = Describe("Resolver", func() {
|
||||
})
|
||||
})
|
||||
|
||||
Context("DummyPackageResolver", func() {
|
||||
Context("Explainer", func() {
|
||||
It("cannot find a solution", func() {
|
||||
C := pkg.NewPackage("C", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
|
||||
B := pkg.NewPackage("B", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{C})
|
||||
@@ -171,6 +171,11 @@ var _ = Describe("Resolver", func() {
|
||||
|
||||
solution, err := s.Install([]pkg.Package{A, D})
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(Equal(`could not satisfy the constraints:
|
||||
A-- and
|
||||
C-- and
|
||||
!(A--) or B-- and
|
||||
!(B--) or !(C--)`))
|
||||
|
||||
Expect(len(solution)).To(Equal(0))
|
||||
})
|
||||
|
@@ -71,7 +71,7 @@ type Options struct {
|
||||
// NewSolver accepts as argument two lists of packages, the first is the initial set,
|
||||
// the second represent all the known packages.
|
||||
func NewSolver(t Options, installed pkg.PackageDatabase, definitiondb pkg.PackageDatabase, solverdb pkg.PackageDatabase) PackageSolver {
|
||||
return NewResolver(t, installed, definitiondb, solverdb, &DummyPackageResolver{})
|
||||
return NewResolver(t, installed, definitiondb, solverdb, &Explainer{})
|
||||
}
|
||||
|
||||
// NewResolver accepts as argument two lists of packages, the first is the initial set,
|
||||
|
@@ -76,6 +76,10 @@ func (r *CompilerRecipe) Load(path string) error {
|
||||
//if err != nil {
|
||||
// return err
|
||||
//}
|
||||
c, err := helpers.ChartFiles([]string{filepath.Join(path, "templates")})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//r.Tree().SetPackageSet(pkg.NewBoltDatabase(tmpfile.Name()))
|
||||
// TODO: Handle cleaning after? Cleanup implemented in GetPackageSet().Clean()
|
||||
@@ -104,8 +108,7 @@ func (r *CompilerRecipe) Load(path string) error {
|
||||
// Instead of rdeps, have a different tree for build deps.
|
||||
compileDefPath := pack.Rel(CompilerDefinitionFile)
|
||||
if fileHelper.Exists(compileDefPath) {
|
||||
|
||||
dat, err := helpers.RenderFiles(compileDefPath, currentpath)
|
||||
dat, err := helpers.RenderFiles(append(c, helpers.ChartFile(compileDefPath)...), currentpath)
|
||||
if err != nil {
|
||||
return errors.Wrap(err,
|
||||
"Error templating file "+CompilerDefinitionFile+" from "+
|
||||
@@ -157,7 +160,7 @@ func (r *CompilerRecipe) Load(path string) error {
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "Error reading file "+currentpath)
|
||||
}
|
||||
dat, err := helpers.RenderHelm(string(buildyaml), raw, map[string]interface{}{})
|
||||
dat, err := helpers.RenderHelm(append(c, helpers.ChartFileB(buildyaml)...), raw, map[string]interface{}{})
|
||||
if err != nil {
|
||||
return errors.Wrap(err,
|
||||
"Error templating file "+CompilerDefinitionFile+" from "+
|
||||
@@ -183,7 +186,7 @@ func (r *CompilerRecipe) Load(path string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
err := filepath.Walk(path, ff)
|
||||
err = filepath.Walk(path, ff)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
6
tests/fixtures/nofileconflicts_upgrade/noconflict/build.yaml
vendored
Normal file
6
tests/fixtures/nofileconflicts_upgrade/noconflict/build.yaml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
image: "alpine"
|
||||
prelude:
|
||||
- mkdir /foo
|
||||
steps:
|
||||
- echo conflict > /foo/test1
|
||||
package_dir: /foo
|
7
tests/fixtures/nofileconflicts_upgrade/noconflict/collection.yaml
vendored
Normal file
7
tests/fixtures/nofileconflicts_upgrade/noconflict/collection.yaml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
packages:
|
||||
- category: "test1"
|
||||
name: "noconflict"
|
||||
version: "1.0"
|
||||
- category: "test1"
|
||||
name: "noconflict"
|
||||
version: "1.1"
|
3
tests/fixtures/shared_templates/templates/writefile.yaml
vendored
Normal file
3
tests/fixtures/shared_templates/templates/writefile.yaml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{{ define "writefile" }}
|
||||
- echo conflict > /foo/{{.}}
|
||||
{{end}}
|
6
tests/fixtures/shared_templates/test/build.yaml
vendored
Normal file
6
tests/fixtures/shared_templates/test/build.yaml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
image: "alpine"
|
||||
prelude:
|
||||
- mkdir /foo
|
||||
steps:
|
||||
{{ template "writefile" .Values.name }}
|
||||
package_dir: /foo
|
7
tests/fixtures/shared_templates/test/collection.yaml
vendored
Normal file
7
tests/fixtures/shared_templates/test/collection.yaml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
packages:
|
||||
- category: "test"
|
||||
name: "foo"
|
||||
version: "1.0"
|
||||
- category: "test"
|
||||
name: "bar"
|
||||
version: "1.0"
|
78
tests/integration/32_nofileconflicts_upgrade.sh
Executable file
78
tests/integration/32_nofileconflicts_upgrade.sh
Executable file
@@ -0,0 +1,78 @@
|
||||
#!/bin/bash
|
||||
|
||||
export LUET_NOLOCK=true
|
||||
|
||||
oneTimeSetUp() {
|
||||
export tmpdir="$(mktemp -d)"
|
||||
}
|
||||
|
||||
oneTimeTearDown() {
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
|
||||
testBuild() {
|
||||
mkdir $tmpdir/testbuild
|
||||
luet build --tree "$ROOT_DIR/tests/fixtures/nofileconflicts_upgrade" --destination $tmpdir/testbuild --compression gzip --all
|
||||
buildst=$?
|
||||
assertEquals 'builds successfully' "$buildst" "0"
|
||||
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/noconflict-test1-1.0.package.tar.gz' ]"
|
||||
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/noconflict-test1-1.1.package.tar.gz' ]"
|
||||
}
|
||||
|
||||
testRepo() {
|
||||
assertTrue 'no repository' "[ ! -e '$tmpdir/testbuild/repository.yaml' ]"
|
||||
luet create-repo --tree "$ROOT_DIR/tests/fixtures/nofileconflicts_upgrade" \
|
||||
--output $tmpdir/testbuild \
|
||||
--packages $tmpdir/testbuild \
|
||||
--name "test" \
|
||||
--descr "Test Repo" \
|
||||
--urls $tmpdir/testrootfs \
|
||||
--type disk > /dev/null
|
||||
|
||||
createst=$?
|
||||
assertEquals 'create repo successfully' "$createst" "0"
|
||||
assertTrue 'create repository' "[ -e '$tmpdir/testbuild/repository.yaml' ]"
|
||||
}
|
||||
|
||||
testConfig() {
|
||||
mkdir $tmpdir/testrootfs
|
||||
cat <<EOF > $tmpdir/luet.yaml
|
||||
general:
|
||||
debug: true
|
||||
system:
|
||||
rootfs: $tmpdir/testrootfs
|
||||
database_path: "/"
|
||||
database_engine: "boltdb"
|
||||
config_from_host: true
|
||||
repositories:
|
||||
- name: "main"
|
||||
type: "disk"
|
||||
enable: true
|
||||
urls:
|
||||
- "$tmpdir/testbuild"
|
||||
EOF
|
||||
luet config --config $tmpdir/luet.yaml
|
||||
res=$?
|
||||
assertEquals 'config test successfully' "$res" "0"
|
||||
}
|
||||
|
||||
testInstall() {
|
||||
luet install -y --config $tmpdir/luet.yaml test1/noconflict@1.0
|
||||
installst=$?
|
||||
#assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
|
||||
}
|
||||
|
||||
testUpgrade() {
|
||||
out=$(luet upgrade -y --config $tmpdir/luet.yaml)
|
||||
installst=$?
|
||||
assertEquals 'install test succeeded' "$installst" "0"
|
||||
assertNotContains 'does find conflicts' "$out" "Error: file conflict found: file conflict between packages to be installed"
|
||||
|
||||
installed=$(luet --config $tmpdir/luet.yaml search --installed)
|
||||
assertContains 'does upgrade' "$installed" "test1/noconflict-1.1"
|
||||
|
||||
}
|
||||
|
||||
# Load shUnit2.
|
||||
. "$ROOT_DIR/tests/integration/shunit2"/shunit2
|
||||
|
69
tests/integration/34_templates.sh
Executable file
69
tests/integration/34_templates.sh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
export LUET_NOLOCK=true
|
||||
|
||||
oneTimeSetUp() {
|
||||
export tmpdir="$(mktemp -d)"
|
||||
}
|
||||
|
||||
oneTimeTearDown() {
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
|
||||
testBuild() {
|
||||
mkdir $tmpdir/testbuild
|
||||
luet build --tree "$ROOT_DIR/tests/fixtures/shared_templates" --destination $tmpdir/testbuild --compression gzip --all
|
||||
buildst=$?
|
||||
assertEquals 'builds successfully' "$buildst" "0"
|
||||
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/foo-test-1.0.package.tar.gz' ]"
|
||||
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/bar-test-1.0.package.tar.gz' ]"
|
||||
}
|
||||
|
||||
testRepo() {
|
||||
assertTrue 'no repository' "[ ! -e '$tmpdir/testbuild/repository.yaml' ]"
|
||||
luet create-repo --tree "$ROOT_DIR/tests/fixtures/shared_templates" \
|
||||
--output $tmpdir/testbuild \
|
||||
--packages $tmpdir/testbuild \
|
||||
--name "test" \
|
||||
--descr "Test Repo" \
|
||||
--urls $tmpdir/testrootfs \
|
||||
--type disk > /dev/null
|
||||
|
||||
createst=$?
|
||||
assertEquals 'create repo successfully' "$createst" "0"
|
||||
assertTrue 'create repository' "[ -e '$tmpdir/testbuild/repository.yaml' ]"
|
||||
}
|
||||
|
||||
testConfig() {
|
||||
mkdir $tmpdir/testrootfs
|
||||
cat <<EOF > $tmpdir/luet.yaml
|
||||
general:
|
||||
debug: true
|
||||
system:
|
||||
rootfs: $tmpdir/testrootfs
|
||||
database_path: "/"
|
||||
database_engine: "boltdb"
|
||||
config_from_host: true
|
||||
repositories:
|
||||
- name: "main"
|
||||
type: "disk"
|
||||
enable: true
|
||||
urls:
|
||||
- "$tmpdir/testbuild"
|
||||
EOF
|
||||
luet config --config $tmpdir/luet.yaml
|
||||
res=$?
|
||||
assertEquals 'config test successfully' "$res" "0"
|
||||
}
|
||||
|
||||
testInstall() {
|
||||
luet install -y --config $tmpdir/luet.yaml test/foo test/bar
|
||||
installst=$?
|
||||
assertEquals 'install test failed' "$installst" "0"
|
||||
assertTrue 'package bar installed' "[ -e '$tmpdir/testrootfs/bar' ]"
|
||||
assertTrue 'package foo installed' "[ -e '$tmpdir/testrootfs/foo' ]"
|
||||
}
|
||||
|
||||
# Load shUnit2.
|
||||
. "$ROOT_DIR/tests/integration/shunit2"/shunit2
|
||||
|
149
vendor/github.com/crillab/gophersat/explain/check.go
generated
vendored
Normal file
149
vendor/github.com/crillab/gophersat/explain/check.go
generated
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
// Package explain provides facilities to check and understand UNSAT instances.
|
||||
package explain
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/crillab/gophersat/solver"
|
||||
)
|
||||
|
||||
// Options is a set of options that can be set to true during the checking process.
|
||||
type Options struct {
|
||||
// If Verbose is true, information about resolution will be written on stdout.
|
||||
Verbose bool
|
||||
}
|
||||
|
||||
// Checks whether the clause satisfies the problem or not.
|
||||
// Will return true if the problem is UNSAT, false if it is SAT or indeterminate.
|
||||
func unsat(pb *Problem, clause []int) bool {
|
||||
oldUnits := make([]int, len(pb.units))
|
||||
copy(oldUnits, pb.units)
|
||||
// lits is supposed to be implied by the problem.
|
||||
// We add the negation of each lit as a unit clause to see if this is true.
|
||||
for _, lit := range clause {
|
||||
if lit > 0 {
|
||||
pb.units[lit-1] = -1
|
||||
} else {
|
||||
pb.units[-lit-1] = 1
|
||||
}
|
||||
}
|
||||
res := pb.unsat()
|
||||
pb.units = oldUnits // We must restore the previous state
|
||||
return res
|
||||
}
|
||||
|
||||
// UnsatChan will wait RUP clauses from ch and use them as a certificate.
|
||||
// It will return true iff the certificate is valid, i.e iff it makes the problem UNSAT
|
||||
// through unit propagation.
|
||||
// If pb.Options.ExtractSubset is true, a subset will also be extracted for that problem.
|
||||
func (pb *Problem) UnsatChan(ch chan string) (valid bool, err error) {
|
||||
defer pb.restore()
|
||||
pb.initTagged()
|
||||
for line := range ch {
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) == 0 {
|
||||
continue
|
||||
}
|
||||
if _, err := strconv.Atoi(fields[0]); err != nil {
|
||||
// This is not a clause: ignore the line
|
||||
continue
|
||||
}
|
||||
clause, err := parseClause(fields)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if !unsat(pb, clause) {
|
||||
return false, nil
|
||||
}
|
||||
if len(clause) == 0 {
|
||||
// This is the empty and unit propagation made the problem UNSAT: we're done.
|
||||
return true, nil
|
||||
}
|
||||
// Since clause is a logical consequence, append it to the problem
|
||||
pb.Clauses = append(pb.Clauses, clause)
|
||||
}
|
||||
|
||||
// If we did not send any information through the channel
|
||||
// It implies that the problem is trivially unsatisfiable
|
||||
// Since we had only unit clauses inside the channel.
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Unsat will parse a certificate, and return true iff the certificate is valid, i.e iff it makes the problem UNSAT
|
||||
// through unit propagation.
|
||||
// If pb.Options.ExtractSubset is true, a subset will also be extracted for that problem.
|
||||
func (pb *Problem) Unsat(cert io.Reader) (valid bool, err error) {
|
||||
defer pb.restore()
|
||||
pb.initTagged()
|
||||
sc := bufio.NewScanner(cert)
|
||||
for sc.Scan() {
|
||||
line := sc.Text()
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) == 0 {
|
||||
continue
|
||||
}
|
||||
if _, err := strconv.Atoi(fields[0]); err != nil {
|
||||
// This is not a clause: ignore the line
|
||||
continue
|
||||
}
|
||||
clause, err := parseClause(fields)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if !unsat(pb, clause) {
|
||||
return false, nil
|
||||
}
|
||||
// Since clause is a logical consequence, append it to the problem
|
||||
pb.Clauses = append(pb.Clauses, clause)
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
return false, fmt.Errorf("could not parse certificate: %v", err)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// ErrNotUnsat is the error returned when trying to get the MUS or UnsatSubset of a satisfiable problem.
|
||||
var ErrNotUnsat = fmt.Errorf("problem is not UNSAT")
|
||||
|
||||
// UnsatSubset returns an unsatisfiable subset of the problem.
|
||||
// The subset is not guaranteed to be a MUS, meaning some clauses of the resulting
|
||||
// problem might be removed while still keeping the unsatisfiability of the problem.
|
||||
// However, this method is much more efficient than extracting a MUS, as it only calls
|
||||
// the SAT solver once.
|
||||
func (pb *Problem) UnsatSubset() (subset *Problem, err error) {
|
||||
solverPb := solver.ParseSlice(pb.Clauses)
|
||||
if solverPb.Status == solver.Unsat {
|
||||
// Problem is trivially UNSAT
|
||||
// Make a copy so that pb and pb2 are not the same value.
|
||||
pb2 := *pb
|
||||
return &pb2, nil
|
||||
}
|
||||
s := solver.New(solver.ParseSlice(pb.Clauses))
|
||||
s.Certified = true
|
||||
s.CertChan = make(chan string)
|
||||
status := solver.Unsat
|
||||
go func() {
|
||||
status = s.Solve()
|
||||
close(s.CertChan)
|
||||
}()
|
||||
if valid, err := pb.UnsatChan(s.CertChan); !valid || status == solver.Sat {
|
||||
return nil, ErrNotUnsat
|
||||
} else if err != nil {
|
||||
return nil, fmt.Errorf("could not solve problem: %v", err)
|
||||
}
|
||||
subset = &Problem{
|
||||
NbVars: pb.NbVars,
|
||||
}
|
||||
for i, clause := range pb.Clauses {
|
||||
if pb.tagged[i] {
|
||||
// clause was used to prove pb is UNSAT: it's part of the subset
|
||||
subset.Clauses = append(subset.Clauses, clause)
|
||||
subset.NbClauses++
|
||||
}
|
||||
}
|
||||
return subset, nil
|
||||
}
|
213
vendor/github.com/crillab/gophersat/explain/mus.go
generated
vendored
Normal file
213
vendor/github.com/crillab/gophersat/explain/mus.go
generated
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
package explain
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/crillab/gophersat/solver"
|
||||
)
|
||||
|
||||
// MUSMaxSat returns a Minimal Unsatisfiable Subset for the problem using the MaxSat strategy.
|
||||
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
|
||||
// the problem becomes satisfiable.
|
||||
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
|
||||
// a SAT solver must be called several times on parts of the original problem to find them.
|
||||
// With the MaxSat strategy, the function computes the MUS through several calls to MaxSat.
|
||||
func (pb *Problem) MUSMaxSat() (mus *Problem, err error) {
|
||||
pb2 := pb.clone()
|
||||
nbVars := pb2.NbVars
|
||||
NbClauses := pb2.NbClauses
|
||||
weights := make([]int, NbClauses) // Weights of each clause
|
||||
relaxLits := make([]solver.Lit, NbClauses) // Set of all relax lits
|
||||
relaxLit := nbVars + 1 // Index of last used relax lit
|
||||
for i, clause := range pb2.Clauses {
|
||||
pb2.Clauses[i] = append(clause, relaxLit)
|
||||
relaxLits[i] = solver.IntToLit(int32(relaxLit))
|
||||
weights[i] = 1
|
||||
relaxLit++
|
||||
}
|
||||
prob := solver.ParseSlice(pb2.Clauses)
|
||||
prob.SetCostFunc(relaxLits, weights)
|
||||
s := solver.New(prob)
|
||||
s.Verbose = pb.Options.Verbose
|
||||
var musClauses [][]int
|
||||
done := make([]bool, NbClauses) // Indicates whether a clause is already part of MUS or not yet
|
||||
for {
|
||||
cost := s.Minimize()
|
||||
if cost == -1 {
|
||||
return makeMus(nbVars, musClauses), nil
|
||||
}
|
||||
if cost == 0 {
|
||||
return nil, fmt.Errorf("cannot extract MUS from satisfiable problem")
|
||||
}
|
||||
model := s.Model()
|
||||
for i, clause := range pb.Clauses {
|
||||
if !done[i] && !satClause(clause, model) {
|
||||
// The clause is part of the MUS
|
||||
pb2.Clauses = append(pb2.Clauses, []int{-(nbVars + i + 1)}) // Now, relax lit has to be false
|
||||
pb2.NbClauses++
|
||||
musClauses = append(musClauses, clause)
|
||||
done[i] = true
|
||||
// Make it a hard clause before restarting solver
|
||||
lits := make([]solver.Lit, len(clause))
|
||||
for j, lit := range clause {
|
||||
lits[j] = solver.IntToLit(int32(lit))
|
||||
}
|
||||
s.AppendClause(solver.NewClause(lits))
|
||||
}
|
||||
}
|
||||
if pb.Options.Verbose {
|
||||
fmt.Printf("c Currently %d/%d clauses in MUS\n", len(musClauses), NbClauses)
|
||||
}
|
||||
prob = solver.ParseSlice(pb2.Clauses)
|
||||
prob.SetCostFunc(relaxLits, weights)
|
||||
s = solver.New(prob)
|
||||
s.Verbose = pb.Options.Verbose
|
||||
}
|
||||
}
|
||||
|
||||
// true iff the clause is satisfied by the model
|
||||
func satClause(clause []int, model []bool) bool {
|
||||
for _, lit := range clause {
|
||||
if (lit > 0 && model[lit-1]) || (lit < 0 && !model[-lit-1]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func makeMus(nbVars int, clauses [][]int) *Problem {
|
||||
mus := &Problem{
|
||||
Clauses: clauses,
|
||||
NbVars: nbVars,
|
||||
NbClauses: len(clauses),
|
||||
units: make([]int, nbVars),
|
||||
}
|
||||
for _, clause := range clauses {
|
||||
if len(clause) == 1 {
|
||||
lit := clause[0]
|
||||
if lit > 0 {
|
||||
mus.units[lit-1] = 1
|
||||
} else {
|
||||
mus.units[-lit-1] = -1
|
||||
}
|
||||
}
|
||||
}
|
||||
return mus
|
||||
}
|
||||
|
||||
// MUSInsertion returns a Minimal Unsatisfiable Subset for the problem using the insertion method.
|
||||
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
|
||||
// the problem becomes satisfiable.
|
||||
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
|
||||
// a SAT solver must be called several times on parts of the original problem to find them.
|
||||
// The insertion algorithm is efficient is many cases, as it calls the same solver several times in a row.
|
||||
// However, in some cases, the number of calls will be higher than using other methods.
|
||||
// For instance, if called on a formula that is already a MUS, it will perform n*(n-1) calls to SAT, where
|
||||
// n is the number of clauses of the problem.
|
||||
func (pb *Problem) MUSInsertion() (mus *Problem, err error) {
|
||||
pb2, err := pb.UnsatSubset()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not extract MUS: %v", err)
|
||||
}
|
||||
mus = &Problem{NbVars: pb2.NbVars}
|
||||
clauses := pb2.Clauses
|
||||
for {
|
||||
if pb.Options.Verbose {
|
||||
fmt.Printf("c mus currently contains %d clauses\n", mus.NbClauses)
|
||||
}
|
||||
s := solver.New(solver.ParseSliceNb(mus.Clauses, mus.NbVars))
|
||||
s.Verbose = pb.Options.Verbose
|
||||
st := s.Solve()
|
||||
if st == solver.Unsat { // Found the MUS
|
||||
return mus, nil
|
||||
}
|
||||
// Add clauses until the problem becomes UNSAT
|
||||
idx := 0
|
||||
for st == solver.Sat {
|
||||
clause := clauses[idx]
|
||||
lits := make([]solver.Lit, len(clause))
|
||||
for i, lit := range clause {
|
||||
lits[i] = solver.IntToLit(int32(lit))
|
||||
}
|
||||
cl := solver.NewClause(lits)
|
||||
s.AppendClause(cl)
|
||||
idx++
|
||||
st = s.Solve()
|
||||
}
|
||||
idx-- // We went one step too far, go back
|
||||
mus.Clauses = append(mus.Clauses, clauses[idx]) // Last clause is part of the MUS
|
||||
mus.NbClauses++
|
||||
if pb.Options.Verbose {
|
||||
fmt.Printf("c removing %d/%d clause(s)\n", len(clauses)-idx, len(clauses))
|
||||
}
|
||||
clauses = clauses[:idx] // Remaining clauses are not part of the MUS
|
||||
}
|
||||
}
|
||||
|
||||
// MUSDeletion returns a Minimal Unsatisfiable Subset for the problem using the insertion method.
|
||||
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
|
||||
// the problem becomes satisfiable.
|
||||
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
|
||||
// a SAT solver must be called several times on parts of the original problem to find them.
|
||||
// The deletion algorithm is guaranteed to call exactly n SAT solvers, where n is the number of clauses in the problem.
|
||||
// It can be quite efficient, but each time the solver is called, it is starting from scratch.
|
||||
// Other methods keep the solver "hot", so despite requiring more calls, these methods can be more efficient in practice.
|
||||
func (pb *Problem) MUSDeletion() (mus *Problem, err error) {
|
||||
pb2, err := pb.UnsatSubset()
|
||||
if err != nil {
|
||||
if err == ErrNotUnsat {
|
||||
return nil, err
|
||||
}
|
||||
return nil, fmt.Errorf("could not extract MUS: %v", err)
|
||||
}
|
||||
pb2.NbVars += pb2.NbClauses // Add one relax var for each clause
|
||||
for i, clause := range pb2.Clauses { // Add relax lit to each clause
|
||||
newClause := make([]int, len(clause)+1)
|
||||
copy(newClause, clause)
|
||||
newClause[len(clause)] = pb.NbVars + i + 1 // Add relax lit to the clause
|
||||
pb2.Clauses[i] = newClause
|
||||
}
|
||||
s := solver.New(solver.ParseSlice(pb2.Clauses))
|
||||
asumptions := make([]solver.Lit, pb2.NbClauses)
|
||||
for i := 0; i < pb2.NbClauses; i++ {
|
||||
asumptions[i] = solver.IntToLit(int32(-(pb.NbVars + i + 1))) // At first, all asumptions are false
|
||||
}
|
||||
for i := range pb2.Clauses {
|
||||
// Relax current clause
|
||||
asumptions[i] = asumptions[i].Negation()
|
||||
s.Assume(asumptions)
|
||||
if s.Solve() == solver.Sat {
|
||||
// It is now sat; reinsert the clause, i.e re-falsify the relax lit
|
||||
asumptions[i] = asumptions[i].Negation()
|
||||
if pb.Options.Verbose {
|
||||
fmt.Printf("c clause %d/%d: kept\n", i+1, pb2.NbClauses)
|
||||
}
|
||||
} else if pb.Options.Verbose {
|
||||
fmt.Printf("c clause %d/%d: removed\n", i+1, pb2.NbClauses)
|
||||
}
|
||||
}
|
||||
mus = &Problem{
|
||||
NbVars: pb.NbVars,
|
||||
}
|
||||
for i, val := range asumptions {
|
||||
if !val.IsPositive() {
|
||||
// Lit is not relaxed, meaning the clause is part of the MUS
|
||||
clause := pb2.Clauses[i]
|
||||
clause = clause[:len(clause)-1] // Remove relax lit
|
||||
mus.Clauses = append(mus.Clauses, clause)
|
||||
}
|
||||
mus.NbClauses = len(mus.Clauses)
|
||||
}
|
||||
return mus, nil
|
||||
}
|
||||
|
||||
// MUS returns a Minimal Unsatisfiable Subset for the problem.
|
||||
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
|
||||
// the problem becomes satisfiable.
|
||||
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
|
||||
// a SAT solver must be called several times on parts of the original problem to find them.
|
||||
// The exact algorithm used to compute the MUS is not guaranteed. If you want to use a given algorithm,
|
||||
// use the relevant functions.
|
||||
func (pb *Problem) MUS() (mus *Problem, err error) {
|
||||
return pb.MUSDeletion()
|
||||
}
|
104
vendor/github.com/crillab/gophersat/explain/parser.go
generated
vendored
Normal file
104
vendor/github.com/crillab/gophersat/explain/parser.go
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
package explain
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// parseClause parses a line representing a clause in the DIMACS CNF syntax.
|
||||
func parseClause(fields []string) ([]int, error) {
|
||||
clause := make([]int, 0, len(fields)-1)
|
||||
for _, rawLit := range fields {
|
||||
lit, err := strconv.Atoi(rawLit)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse clause %v: %v", fields, err)
|
||||
}
|
||||
if lit != 0 {
|
||||
clause = append(clause, lit)
|
||||
}
|
||||
}
|
||||
return clause, nil
|
||||
}
|
||||
|
||||
// ParseCNF parses a CNF and returns the associated problem.
|
||||
func ParseCNF(r io.Reader) (*Problem, error) {
|
||||
sc := bufio.NewScanner(r)
|
||||
var pb Problem
|
||||
for sc.Scan() {
|
||||
line := sc.Text()
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) == 0 {
|
||||
continue
|
||||
}
|
||||
switch fields[0] {
|
||||
case "c":
|
||||
continue
|
||||
case "p":
|
||||
if err := pb.parseHeader(fields); err != nil {
|
||||
return nil, fmt.Errorf("could not parse header %q: %v", line, err)
|
||||
}
|
||||
default:
|
||||
if err := pb.parseClause(fields); err != nil {
|
||||
return nil, fmt.Errorf("could not parse clause %q: %v", line, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
return nil, fmt.Errorf("could not parse problem: %v", err)
|
||||
}
|
||||
return &pb, nil
|
||||
}
|
||||
|
||||
func (pb *Problem) parseHeader(fields []string) error {
|
||||
if len(fields) != 4 {
|
||||
return fmt.Errorf("expected 4 fields, got %d", len(fields))
|
||||
}
|
||||
strVars := fields[2]
|
||||
strClauses := fields[3]
|
||||
var err error
|
||||
pb.NbVars, err = strconv.Atoi(fields[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid number of vars %q: %v", strVars, err)
|
||||
}
|
||||
if pb.NbVars < 0 {
|
||||
return fmt.Errorf("negative number of vars %d", pb.NbVars)
|
||||
}
|
||||
pb.units = make([]int, pb.NbVars)
|
||||
pb.NbClauses, err = strconv.Atoi(fields[3])
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid number of clauses %s: %v", strClauses, err)
|
||||
}
|
||||
if pb.NbClauses < 0 {
|
||||
return fmt.Errorf("negative number of clauses %d", pb.NbClauses)
|
||||
}
|
||||
pb.Clauses = make([][]int, 0, pb.NbClauses)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pb *Problem) parseClause(fields []string) error {
|
||||
clause, err := parseClause(fields)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pb.Clauses = append(pb.Clauses, clause)
|
||||
if len(clause) == 1 {
|
||||
lit := clause[0]
|
||||
v := lit
|
||||
if lit < 0 {
|
||||
v = -v
|
||||
}
|
||||
if v > pb.NbVars {
|
||||
// There was an error in the header
|
||||
return fmt.Errorf("found lit %d but problem was supposed to hold only %d vars", lit, pb.NbVars)
|
||||
}
|
||||
if lit > 0 {
|
||||
pb.units[v-1] = 1
|
||||
} else {
|
||||
pb.units[v-1] = -1
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
125
vendor/github.com/crillab/gophersat/explain/problem.go
generated
vendored
Normal file
125
vendor/github.com/crillab/gophersat/explain/problem.go
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
package explain
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Problem is a conjunction of Clauses.
|
||||
// This package does not use solver's representation.
|
||||
// We want this code to be as simple as possible to be easy to audit.
|
||||
// On the other hand, solver's code must be as efficient as possible.
|
||||
type Problem struct {
|
||||
Clauses [][]int
|
||||
NbVars int
|
||||
NbClauses int
|
||||
units []int // For each var, 0 if the var is unbound, 1 if true, -1 if false
|
||||
Options Options
|
||||
tagged []bool // List of claused used whil proving the problem is unsat. Initialized lazily
|
||||
}
|
||||
|
||||
func (pb *Problem) initTagged() {
|
||||
pb.tagged = make([]bool, pb.NbClauses)
|
||||
for i, clause := range pb.Clauses {
|
||||
// Unit clauses are tagged as they will probably be used during resolution
|
||||
pb.tagged[i] = len(clause) == 1
|
||||
}
|
||||
}
|
||||
|
||||
func (pb *Problem) clone() *Problem {
|
||||
pb2 := &Problem{
|
||||
Clauses: make([][]int, pb.NbClauses),
|
||||
NbVars: pb.NbVars,
|
||||
NbClauses: pb.NbClauses,
|
||||
units: make([]int, pb.NbVars),
|
||||
}
|
||||
copy(pb2.units, pb.units)
|
||||
for i, clause := range pb.Clauses {
|
||||
pb2.Clauses[i] = make([]int, len(clause))
|
||||
copy(pb2.Clauses[i], clause)
|
||||
}
|
||||
return pb2
|
||||
}
|
||||
|
||||
// restore removes all learned clauses, if any.
|
||||
func (pb *Problem) restore() {
|
||||
pb.Clauses = pb.Clauses[:pb.NbClauses]
|
||||
}
|
||||
|
||||
// unsat will be true iff the problem can be proven unsat through unit propagation.
|
||||
// This methods modifies pb.units.
|
||||
func (pb *Problem) unsat() bool {
|
||||
done := make([]bool, len(pb.Clauses)) // clauses that were deemed sat during propagation
|
||||
modified := true
|
||||
for modified {
|
||||
modified = false
|
||||
for i, clause := range pb.Clauses {
|
||||
if done[i] { // That clause was already proved true
|
||||
continue
|
||||
}
|
||||
unbound := 0
|
||||
var unit int // An unbound literal, if any
|
||||
sat := false
|
||||
for _, lit := range clause {
|
||||
v := lit
|
||||
if v < 0 {
|
||||
v = -v
|
||||
}
|
||||
binding := pb.units[v-1]
|
||||
if binding == 0 {
|
||||
unbound++
|
||||
if unbound == 1 {
|
||||
unit = lit
|
||||
} else {
|
||||
break
|
||||
}
|
||||
} else if binding*lit == v { // (binding == -1 && lit < 0) || (binding == 1 && lit > 0) {
|
||||
sat = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if sat {
|
||||
done[i] = true
|
||||
continue
|
||||
}
|
||||
if unbound == 0 {
|
||||
// All lits are false: problem is UNSAT
|
||||
if i < pb.NbClauses {
|
||||
pb.tagged[i] = true
|
||||
}
|
||||
return true
|
||||
}
|
||||
if unbound == 1 {
|
||||
if unit < 0 {
|
||||
pb.units[-unit-1] = -1
|
||||
} else {
|
||||
pb.units[unit-1] = 1
|
||||
}
|
||||
done[i] = true
|
||||
if i < pb.NbClauses {
|
||||
pb.tagged[i] = true
|
||||
}
|
||||
modified = true
|
||||
}
|
||||
}
|
||||
}
|
||||
// Problem is either sat or could not be proven unsat through unit propagation
|
||||
return false
|
||||
}
|
||||
|
||||
// CNF returns a representation of the problem using the Dimacs syntax.
|
||||
func (pb *Problem) CNF() string {
|
||||
lines := make([]string, 1, pb.NbClauses+1)
|
||||
lines[0] = fmt.Sprintf("p cnf %d %d", pb.NbVars, pb.NbClauses)
|
||||
for i := 0; i < pb.NbClauses; i++ {
|
||||
clause := pb.Clauses[i]
|
||||
strClause := make([]string, len(clause)+1)
|
||||
for i, lit := range clause {
|
||||
strClause[i] = fmt.Sprintf("%d", lit)
|
||||
}
|
||||
strClause[len(clause)] = "0"
|
||||
line := strings.Join(strClause, " ")
|
||||
lines = append(lines, line)
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
20
vendor/github.com/crillab/gophersat/solver/solver.go
generated
vendored
20
vendor/github.com/crillab/gophersat/solver/solver.go
generated
vendored
@@ -127,6 +127,25 @@ func New(problem *Problem) *Solver {
|
||||
return s
|
||||
}
|
||||
|
||||
// newVar is used to indicate a new variable must be added to the solver.
|
||||
// This can be used when new clauses are appended and these clauses contain vars that were unseen so far.
|
||||
// If the var already existed, nothing will happen.
|
||||
func (s *Solver) newVar(v Var) {
|
||||
if cnfVar := int(v.Int()); cnfVar > s.nbVars {
|
||||
// If the var already existed, do nothing
|
||||
for i := s.nbVars; i < cnfVar; i++ {
|
||||
s.model = append(s.model, 0)
|
||||
s.activity = append(s.activity, 0.)
|
||||
s.polarity = append(s.polarity, false)
|
||||
s.reason = append(s.reason, nil)
|
||||
s.trailBuf = append(s.trailBuf, 0)
|
||||
}
|
||||
s.varQueue = newQueue(s.activity)
|
||||
s.addVarWatcherList(v)
|
||||
s.nbVars = cnfVar
|
||||
}
|
||||
}
|
||||
|
||||
// sets initial activity for optimization variables, if any.
|
||||
func (s *Solver) initOptimActivity() {
|
||||
for i, lit := range s.minLits {
|
||||
@@ -682,6 +701,7 @@ func (s *Solver) AppendClause(clause *Clause) {
|
||||
i := 0
|
||||
for i < clause.Len() {
|
||||
lit := clause.Get(i)
|
||||
s.newVar(lit.Var())
|
||||
switch s.litStatus(lit) {
|
||||
case Sat:
|
||||
w := clause.Weight(i)
|
||||
|
5
vendor/github.com/crillab/gophersat/solver/types.go
generated
vendored
5
vendor/github.com/crillab/gophersat/solver/types.go
generated
vendored
@@ -71,6 +71,11 @@ func (v Var) Lit() Lit {
|
||||
return Lit(v * 2)
|
||||
}
|
||||
|
||||
// Int converts a Var to a CNF variable.
|
||||
func (v Var) Int() int32 {
|
||||
return int32(v + 1)
|
||||
}
|
||||
|
||||
// SignedLit returns the Lit associated to v, negated if 'signed', positive else.
|
||||
func (v Var) SignedLit(signed bool) Lit {
|
||||
if signed {
|
||||
|
10
vendor/github.com/crillab/gophersat/solver/watcher.go
generated
vendored
10
vendor/github.com/crillab/gophersat/solver/watcher.go
generated
vendored
@@ -39,6 +39,16 @@ func (s *Solver) initWatcherList(clauses []*Clause) {
|
||||
}
|
||||
}
|
||||
|
||||
// Should be called when new vars are added to the problem (see Solver.newVar)
|
||||
func (s *Solver) addVarWatcherList(v Var) {
|
||||
cnfVar := int(v.Int())
|
||||
for i := s.nbVars; i < cnfVar; i++ {
|
||||
s.wl.wlistBin = append(s.wl.wlistBin, nil, nil)
|
||||
s.wl.wlist = append(s.wl.wlist, nil, nil)
|
||||
s.wl.wlistPb = append(s.wl.wlistPb, nil, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// appendClause appends the clause without checking whether the clause is already satisfiable, unit, or unsatisfiable.
|
||||
// To perform those checks, call s.AppendClause.
|
||||
// clause is supposed to be a problem clause, not a learned one.
|
||||
|
3
vendor/modules.txt
vendored
3
vendor/modules.txt
vendored
@@ -115,9 +115,10 @@ github.com/containerd/ttrpc
|
||||
github.com/containerd/typeurl
|
||||
# github.com/cpuguy83/go-md2man/v2 v2.0.0
|
||||
github.com/cpuguy83/go-md2man/v2/md2man
|
||||
# github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765
|
||||
# github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38
|
||||
## explicit
|
||||
github.com/crillab/gophersat/bf
|
||||
github.com/crillab/gophersat/explain
|
||||
github.com/crillab/gophersat/solver
|
||||
# github.com/cyphar/filepath-securejoin v0.2.2
|
||||
github.com/cyphar/filepath-securejoin
|
||||
|
Reference in New Issue
Block a user