Compare commits

...

7 Commits

Author SHA1 Message Date
Ettore Di Giacinto
65a55e242e Tag 0.17.6 2021-08-07 18:04:48 +02:00
Ettore Di Giacinto
77c4bf1fd1 update vendor 2021-08-07 14:46:05 +02:00
Ettore Di Giacinto
4d6cccb2fa Give explaination when formulas are unsat
Fixes #168

Signed-off-by: Ettore Di Giacinto <mudler@sabayon.org>
2021-08-07 14:46:05 +02:00
Ettore Di Giacinto
ec7be63418 Tag 0.17.5 2021-08-05 09:02:35 +02:00
Ettore Di Giacinto
33b1c63815 Allow to have shared templates across packages
This changeset allows to have shared templates in a static folder
"templates" present in each luet tree. If the directory is present, it
gets scanned and templated accordingly on top of each package. This
allows to use such folder to store custom blocks to share between
packages.

This is still experimental and subject to change, this is just a first
pass version to provide the feature. It needs to be refined still as it
would be more elegant to use the helm engine properly and map our
structure to the engine instead of adapting it roughly.

Fixes #224
2021-08-04 16:35:28 +02:00
Ettore Di Giacinto
86bd6c5fc0 Tag 0.17.4 2021-08-04 11:55:23 +02:00
Ettore Di Giacinto
658612fcf3 Check file conflicts also on packages that are going to be swapped out
Fixes #237
2021-08-04 10:43:12 +02:00
31 changed files with 1118 additions and 70 deletions

View File

@@ -160,6 +160,17 @@ Build packages specifying multiple definition trees:
opts.Options = solver.Options{Type: solver.SingleCoreSimple, Concurrency: concurrency}
}
templateFolders := []string{}
if !fromRepo {
for _, t := range treePaths {
templateFolders = append(templateFolders, filepath.Join(t, "templates"))
}
} else {
for _, s := range installer.SystemRepositories(LuetCfg) {
templateFolders = append(templateFolders, filepath.Join(s.TreePath, "templates"))
}
}
luetCompiler := compiler.NewLuetCompiler(compilerBackend, generalRecipe.GetDatabase(),
options.NoDeps(nodeps),
options.WithBackendType(backendType),
@@ -168,6 +179,7 @@ Build packages specifying multiple definition trees:
options.WithPullRepositories(pullRepo),
options.WithPushRepository(imageRepository),
options.Rebuild(rebuild),
options.WithTemplateFolder(templateFolders),
options.WithSolverOptions(*opts),
options.Wait(wait),
options.OnlyTarget(onlyTarget),

View File

@@ -41,7 +41,7 @@ var Verbose bool
var LockedCommands = []string{"install", "uninstall", "upgrade"}
const (
LuetCLIVersion = "0.17.3"
LuetCLIVersion = "0.17.6"
LuetEnvPrefix = "LUET"
license = `
Luet Copyright (C) 2019-2021 Ettore Di Giacinto

2
go.mod
View File

@@ -10,7 +10,7 @@ require (
github.com/briandowns/spinner v1.12.1-0.20201220203425-e201aaea0a31
github.com/cavaliercoder/grab v1.0.1-0.20201108051000-98a5bfe305ec
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765
github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible
github.com/docker/distribution v2.7.1+incompatible
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible

2
go.sum
View File

@@ -339,6 +339,8 @@ github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7Do
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765 h1:EO5Dm7O50aaEwv1GTFWNLAj7vNQ1OjW3+DeJCy1vTMk=
github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765/go.mod h1:S91tHga1PCZzYhCkStwZAhvp1rCc+zqtSi55I+vDWGc=
github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38 h1:sOsE90v/iPmouqh4d3cKQl8JuqTOEXg+fP+7YuY7R6U=
github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38/go.mod h1:S91tHga1PCZzYhCkStwZAhvp1rCc+zqtSi55I+vDWGc=
github.com/cyphar/filepath-securejoin v0.2.2 h1:jCwT2GTP+PY5nBz3c/YL5PAIbusElVrPujOBSCj8xRg=
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=

View File

@@ -42,6 +42,7 @@ import (
"github.com/mudler/luet/pkg/solver"
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
"helm.sh/helm/v3/pkg/chart"
)
const BuildFile = "build.yaml"
@@ -750,13 +751,13 @@ func (cs *LuetCompiler) inheritSpecBuildOptions(p *compilerspec.LuetCompilationS
p.BuildOptions.PullImageRepository = append(p.BuildOptions.PullImageRepository, cs.Options.PullImageRepository...)
Debug("Inheriting pull repository from PullImageRepository buildoptions", p.BuildOptions.PullImageRepository)
}
Debug(p.GetPackage().HumanReadableString(), "Build options after inherit", p.BuildOptions)
}
func (cs *LuetCompiler) getSpecHash(pkgs pkg.DefaultPackages, salt string) (string, error) {
ht := NewHashTree(cs.Database)
overallFp := ""
for _, p := range pkgs {
compileSpec, err := cs.FromPackage(p)
if err != nil {
@@ -1128,6 +1129,14 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, generateF
type templatedata map[string]interface{}
func (cs *LuetCompiler) templatePackage(vals []map[string]interface{}, pack pkg.Package, dst templatedata) ([]byte, error) {
// Grab shared templates first
var chartFiles []*chart.File
if len(cs.Options.TemplatesFolder) != 0 {
c, err := helpers.ChartFiles(cs.Options.TemplatesFolder)
if err == nil {
chartFiles = c
}
}
var dataresult []byte
val := pack.Rel(DefinitionFile)
@@ -1165,7 +1174,7 @@ func (cs *LuetCompiler) templatePackage(vals []map[string]interface{}, pack pkg.
return nil, errors.Wrap(err, "merging values maps")
}
dat, err := helpers.RenderHelm(string(dataBuild), td, dst)
dat, err := helpers.RenderHelm(append(chartFiles, helpers.ChartFileB(dataBuild)...), td, dst)
if err != nil {
return nil, errors.Wrap(err, "rendering file "+pack.Rel(BuildFile))
}
@@ -1190,7 +1199,13 @@ func (cs *LuetCompiler) templatePackage(vals []map[string]interface{}, pack pkg.
bv = append([]string{f}, bv...)
}
}
out, err := helpers.RenderFiles(pack.Rel(BuildFile), val, bv...)
raw, err := ioutil.ReadFile(pack.Rel(BuildFile))
if err != nil {
return nil, err
}
out, err := helpers.RenderFiles(append(chartFiles, helpers.ChartFileB(raw)...), val, bv...)
if err != nil {
return nil, errors.Wrap(err, "rendering file "+pack.Rel(BuildFile))
}

View File

@@ -43,6 +43,9 @@ type Compiler struct {
BackendArgs []string
BackendType string
// TemplatesFolder. should default to tree/templates
TemplatesFolder []string
}
func NewDefaultCompiler() *Compiler {
@@ -87,6 +90,13 @@ func WithBackendType(r string) func(cfg *Compiler) error {
}
}
func WithTemplateFolder(r []string) func(cfg *Compiler) error {
return func(cfg *Compiler) error {
cfg.TemplatesFolder = r
return nil
}
}
func WithBuildValues(r []string) func(cfg *Compiler) error {
return func(cfg *Compiler) error {
cfg.BuildValuesFile = r

View File

@@ -27,6 +27,8 @@ import (
"strings"
"time"
fileHelper "github.com/mudler/luet/pkg/helpers/file"
pkg "github.com/mudler/luet/pkg/package"
solver "github.com/mudler/luet/pkg/solver"
@@ -91,7 +93,7 @@ func (opts LuetSolverOptions) Resolver() solver.PackageResolver {
return solver.SimpleQLearningSolver()
}
return &solver.DummyPackageResolver{}
return &solver.Explainer{}
}
func (opts *LuetSolverOptions) CompactString() string {
@@ -108,15 +110,12 @@ type LuetSystemConfig struct {
}
func (s *LuetSystemConfig) SetRootFS(path string) error {
pathToSet := path
if !filepath.IsAbs(path) {
abs, err := filepath.Abs(path)
if err != nil {
return err
}
pathToSet = abs
p, err := fileHelper.Rel2Abs(path)
if err != nil {
return err
}
s.Rootfs = pathToSet
s.Rootfs = p
return nil
}

View File

@@ -287,3 +287,15 @@ func CopyDir(src string, dst string) (err error) {
Sync: true,
OnSymlink: func(string) copy.SymlinkAction { return copy.Shallow }})
}
func Rel2Abs(s string) (string, error) {
pathToSet := s
if !filepath.IsAbs(s) {
abs, err := filepath.Abs(s)
if err != nil {
return "", err
}
pathToSet = abs
}
return pathToSet, nil
}

View File

@@ -2,6 +2,8 @@ package helpers
import (
"io/ioutil"
"path/filepath"
"strings"
fileHelper "github.com/mudler/luet/pkg/helpers/file"
@@ -13,17 +15,86 @@ import (
"helm.sh/helm/v3/pkg/engine"
)
// ChartFileB is an helper that takes a slice of bytes and construct a chart.File slice from it
func ChartFileB(s []byte) []*chart.File {
return []*chart.File{
{Name: "templates", Data: s},
}
}
// ChartFileS is an helper that takes a string and construct a chart.File slice from it
func ChartFileS(s string) []*chart.File {
return []*chart.File{
{Name: "templates", Data: []byte(s)},
}
}
// ChartFile reads all the given files and returns a slice of []*chart.File
// containing the raw content and the file name for each file
func ChartFile(s ...string) []*chart.File {
files := []*chart.File{}
for _, c := range s {
raw, err := ioutil.ReadFile(c)
if err != nil {
return files
}
files = append(files, &chart.File{Name: c, Data: raw})
}
return files
}
// ChartFiles reads a list of paths and reads all yaml file inside. It returns a
// slice of pointers of chart.File(s) with the raw content of the yaml
func ChartFiles(path []string) ([]*chart.File, error) {
var chartFiles []*chart.File
for _, t := range path {
rel, err := fileHelper.Rel2Abs(t)
if err != nil {
return nil, err
}
if !fileHelper.Exists(rel) {
continue
}
files, err := fileHelper.ListDir(rel)
if err != nil {
return nil, err
}
for _, f := range files {
if strings.ToLower(filepath.Ext(f)) == ".yaml" {
raw, err := ioutil.ReadFile(f)
if err != nil {
return nil, err
}
chartFiles = append(chartFiles, &chart.File{Name: f, Data: raw})
}
}
}
return chartFiles, nil
}
// RenderHelm renders the template string with helm
func RenderHelm(template string, values, d map[string]interface{}) (string, error) {
func RenderHelm(files []*chart.File, values, d map[string]interface{}) (string, error) {
// We slurp all the files into one here. This is not elegant, but still works.
// As a reminder, the files passed here have on the head the templates in the 'templates/' folder
// of each luet tree, and it have at the bottom the package buildpsec to be templated.
// TODO: Replace by correctly populating the files so that the helm render engine templates it
// correctly
toTemplate := ""
for _, f := range files {
toTemplate += string(f.Data)
}
c := &chart.Chart{
Metadata: &chart.Metadata{
Name: "",
Version: "",
},
Templates: []*chart.File{
{Name: "templates", Data: []byte(template)},
},
Values: map[string]interface{}{"Values": values},
Templates: ChartFileS(toTemplate),
Values: map[string]interface{}{"Values": values},
}
v, err := chartutil.CoalesceValues(c, map[string]interface{}{"Values": d})
@@ -71,23 +142,18 @@ func reverse(s []string) []string {
return s
}
func RenderFiles(toTemplate, valuesFile string, defaultFile ...string) (string, error) {
raw, err := ioutil.ReadFile(toTemplate)
if err != nil {
return "", errors.Wrap(err, "reading file "+toTemplate)
}
func RenderFiles(files []*chart.File, valuesFile string, defaultFile ...string) (string, error) {
if !fileHelper.Exists(valuesFile) {
return "", errors.Wrap(err, "file not existing "+valuesFile)
return "", errors.New("file does not exist: " + valuesFile)
}
val, err := ioutil.ReadFile(valuesFile)
if err != nil {
return "", errors.Wrap(err, "reading file "+valuesFile)
return "", errors.Wrap(err, "reading file: "+valuesFile)
}
var values templatedata
if err = yaml.Unmarshal(val, &values); err != nil {
return "", errors.Wrap(err, "unmarshalling file "+toTemplate)
return "", errors.Wrap(err, "unmarshalling values")
}
dst, err := UnMarshalValues(defaultFile)
@@ -95,5 +161,5 @@ func RenderFiles(toTemplate, valuesFile string, defaultFile ...string) (string,
return "", errors.Wrap(err, "unmarshalling values")
}
return RenderHelm(string(raw), values, dst)
return RenderHelm(files, values, dst)
}

View File

@@ -30,21 +30,21 @@ func writeFile(path string, content string) {
Expect(err).ToNot(HaveOccurred())
}
var _ = Describe("Helpers", func() {
var _ = Describe("Helm", func() {
Context("RenderHelm", func() {
It("Renders templates", func() {
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo"}, map[string]interface{}{"Bar": "bar"})
out, err := RenderHelm(ChartFileS("{{.Values.Test}}{{.Values.Bar}}"), map[string]interface{}{"Test": "foo"}, map[string]interface{}{"Bar": "bar"})
Expect(err).ToNot(HaveOccurred())
Expect(out).To(Equal("foobar"))
})
It("Renders templates with overrides", func() {
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo", "Bar": "baz"}, map[string]interface{}{"Bar": "bar"})
out, err := RenderHelm(ChartFileS("{{.Values.Test}}{{.Values.Bar}}"), map[string]interface{}{"Test": "foo", "Bar": "baz"}, map[string]interface{}{"Bar": "bar"})
Expect(err).ToNot(HaveOccurred())
Expect(out).To(Equal("foobar"))
})
It("Renders templates", func() {
out, err := RenderHelm("{{.Values.Test}}{{.Values.Bar}}", map[string]interface{}{"Test": "foo", "Bar": "bar"}, map[string]interface{}{})
out, err := RenderHelm(ChartFileS("{{.Values.Test}}{{.Values.Bar}}"), map[string]interface{}{"Test": "foo", "Bar": "bar"}, map[string]interface{}{})
Expect(err).ToNot(HaveOccurred())
Expect(out).To(Equal("foobar"))
})
@@ -68,7 +68,7 @@ foo: "baz"
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, d)
res, err := RenderFiles(ChartFile(toTemplate), values, d)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("baz"))
@@ -93,7 +93,7 @@ faa: "baz"
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, d)
res, err := RenderFiles(ChartFile(toTemplate), values, d)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("bar"))
@@ -114,7 +114,7 @@ foo: "bar"
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values)
res, err := RenderFiles(ChartFile(toTemplate), values)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("bar"))
})
@@ -145,11 +145,11 @@ bar: "nei"
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, d2, d)
res, err := RenderFiles(ChartFile(toTemplate), values, d2, d)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("bazneif"))
res, err = RenderFiles(toTemplate, values, d, d2)
res, err = RenderFiles(ChartFile(toTemplate), values, d, d2)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal("doneif"))
})
@@ -173,7 +173,7 @@ faa: "baz"
Expect(err).ToNot(HaveOccurred())
res, err := RenderFiles(toTemplate, values, d)
res, err := RenderFiles(ChartFile(toTemplate), values, d)
Expect(err).ToNot(HaveOccurred())
Expect(res).To(Equal(""))

View File

@@ -272,6 +272,15 @@ func (l *LuetInstaller) swap(o Option, syncedRepos Repositories, toRemove pkg.Pa
if err := l.download(syncedRepos, match); err != nil {
return errors.Wrap(err, "Pre-downloading packages")
}
if err := l.checkFileconflicts(match, false, s); err != nil {
if !l.Options.Force {
return errors.Wrap(err, "file conflict found")
} else {
Warning("file conflict found", err.Error())
}
}
if l.Options.DownloadOnly {
return nil
}
@@ -756,7 +765,7 @@ func (l *LuetInstaller) getFinalizers(allRepos pkg.PackageDatabase, solution sol
return toFinalize, nil
}
func (l *LuetInstaller) checkFileconflicts(toInstall map[string]ArtifactMatch, s *System) error {
func (l *LuetInstaller) checkFileconflicts(toInstall map[string]ArtifactMatch, checkSystem bool, s *System) error {
Info("Checking for file conflicts..")
defer s.Clean() // Release memory
@@ -777,18 +786,19 @@ func (l *LuetInstaller) checkFileconflicts(toInstall map[string]ArtifactMatch, s
"file conflict between packages to be installed",
)
}
exists, p, err := s.ExistsPackageFile(f)
if err != nil {
return errors.Wrap(err, "failed checking into system db")
}
if exists {
return fmt.Errorf(
"file conflict between '%s' and '%s' ( file: %s )",
p.HumanReadableString(),
m.Package.HumanReadableString(),
f,
)
if checkSystem {
exists, p, err := s.ExistsPackageFile(f)
if err != nil {
return errors.Wrap(err, "failed checking into system db")
}
if exists {
return fmt.Errorf(
"file conflict between '%s' and '%s' ( file: %s )",
p.HumanReadableString(),
m.Package.HumanReadableString(),
f,
)
}
}
}
filesToInstall = append(filesToInstall, files...)
@@ -805,7 +815,7 @@ func (l *LuetInstaller) install(o Option, syncedRepos Repositories, toInstall ma
}
// Check file conflicts
if err := l.checkFileconflicts(toInstall, s); err != nil {
if err := l.checkFileconflicts(toInstall, true, s); err != nil {
if !l.Options.Force {
return errors.Wrap(err, "file conflict found")
} else {

View File

@@ -29,7 +29,7 @@ func (s *System) ExecuteFinalizers(packs []pkg.Package) error {
executedFinalizer := map[string]bool{}
for _, p := range packs {
if fileHelper.Exists(p.Rel(tree.FinalizerFile)) {
out, err := helpers.RenderFiles(p.Rel(tree.FinalizerFile), p.Rel(tree.DefinitionFile))
out, err := helpers.RenderFiles(helpers.ChartFile(p.Rel(tree.FinalizerFile)), p.Rel(tree.DefinitionFile))
if err != nil {
Warning("Failed rendering finalizer for ", p.HumanReadableString(), err.Error())
errs = multierror.Append(errs, err)

View File

@@ -1,4 +1,4 @@
// Copyright © 2020 Ettore Di Giacinto <mudler@gentoo.org>
// Copyright © 2020-2021 Ettore Di Giacinto <mudler@mocaccino.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -16,11 +16,16 @@
package solver
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"strconv"
"strings"
"github.com/crillab/gophersat/bf"
"github.com/crillab/gophersat/explain"
"github.com/mudler/luet/pkg/helpers"
"gopkg.in/yaml.v2"
@@ -51,18 +56,115 @@ const (
QLearningResolverType = "qlearning"
)
//. "github.com/mudler/luet/pkg/logger"
// PackageResolver assists PackageSolver on unsat cases
type PackageResolver interface {
Solve(bf.Formula, PackageSolver) (PackagesAssertions, error)
}
type DummyPackageResolver struct {
type Explainer struct{}
func decodeDimacs(vars map[string]string, dimacs string) (string, error) {
res := ""
sc := bufio.NewScanner(bytes.NewBufferString(dimacs))
lines := strings.Split(dimacs, "\n")
linenum := 1
SCAN:
for sc.Scan() {
line := sc.Text()
fields := strings.Fields(line)
if len(fields) == 0 {
continue
}
switch fields[0] {
case "p":
continue SCAN
default:
for i := 0; i < len(fields)-1; i++ {
v := fields[i]
negative := false
if strings.HasPrefix(fields[i], "-") {
v = strings.TrimLeft(fields[i], "-")
negative = true
}
variable := vars[v]
if negative {
res += fmt.Sprintf("!(%s)", variable)
} else {
res += variable
}
if i != len(fields)-2 {
res += fmt.Sprintf(" or ")
}
}
if linenum != len(lines)-1 {
res += fmt.Sprintf(" and \n")
}
}
linenum++
}
if err := sc.Err(); err != nil {
return res, fmt.Errorf("could not parse problem: %v", err)
}
return res, nil
}
func (*DummyPackageResolver) Solve(bf.Formula, PackageSolver) (PackagesAssertions, error) {
return nil, errors.New("Could not satisfy the constraints. Try again by removing deps ")
func parseVars(r io.Reader) (map[string]string, error) {
sc := bufio.NewScanner(r)
res := map[string]string{}
for sc.Scan() {
line := sc.Text()
fields := strings.Fields(line)
if len(fields) == 0 {
continue
}
switch fields[0] {
case "c":
data := strings.Split(fields[1], "=")
res[data[1]] = data[0]
default:
continue
}
}
if err := sc.Err(); err != nil {
return nil, fmt.Errorf("could not parse problem: %v", err)
}
return res, nil
}
// Solve tries to find the MUS (minimum unsat) formula from the original problem.
// it returns an error with the decoded dimacs
func (*Explainer) Solve(f bf.Formula, s PackageSolver) (PackagesAssertions, error) {
buf := bytes.NewBufferString("")
if err := bf.Dimacs(f, buf); err != nil {
return nil, errors.Wrap(err, "cannot extract dimacs from formula")
}
copy := *buf
pb, err := explain.ParseCNF(&copy)
if err != nil {
return nil, errors.Wrap(err, "could not parse problem")
}
pb2, err := pb.MUS()
if err != nil {
return nil, errors.Wrap(err, "could not extract subset")
}
variables, err := parseVars(buf)
if err != nil {
return nil, errors.Wrap(err, "could not parse variables")
}
res, err := decodeDimacs(variables, pb2.CNF())
if err != nil {
return nil, errors.Wrap(err, "could not parse dimacs")
}
return nil, fmt.Errorf("could not satisfy the constraints: \n%s", res)
}
type QLearningResolver struct {
@@ -103,8 +205,8 @@ func (resolver *QLearningResolver) Solve(f bf.Formula, s PackageSolver) (Package
// Info("Using QLearning solver to resolve conflicts. Please be patient.")
resolver.Solver = s
s.SetResolver(&DummyPackageResolver{}) // Set dummy. Otherwise the attempts will run again a QLearning instance.
defer s.SetResolver(resolver) // Set back ourselves as resolver
s.SetResolver(&Explainer{}) // Set dummy. Otherwise the attempts will run again a QLearning instance.
defer s.SetResolver(resolver) // Set back ourselves as resolver
resolver.Formula = f

View File

@@ -38,7 +38,7 @@ var _ = Describe("Resolver", func() {
})
Context("Conflict set", func() {
Context("DummyPackageResolver", func() {
Context("Explainer", func() {
It("is unsolvable - as we something we ask to install conflict with system stuff", func() {
C := pkg.NewPackage("C", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{C})
@@ -152,7 +152,7 @@ var _ = Describe("Resolver", func() {
})
})
Context("DummyPackageResolver", func() {
Context("Explainer", func() {
It("cannot find a solution", func() {
C := pkg.NewPackage("C", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{})
B := pkg.NewPackage("B", "", []*pkg.DefaultPackage{}, []*pkg.DefaultPackage{C})
@@ -171,6 +171,11 @@ var _ = Describe("Resolver", func() {
solution, err := s.Install([]pkg.Package{A, D})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(Equal(`could not satisfy the constraints:
A-- and
C-- and
!(A--) or B-- and
!(B--) or !(C--)`))
Expect(len(solution)).To(Equal(0))
})

View File

@@ -71,7 +71,7 @@ type Options struct {
// NewSolver accepts as argument two lists of packages, the first is the initial set,
// the second represent all the known packages.
func NewSolver(t Options, installed pkg.PackageDatabase, definitiondb pkg.PackageDatabase, solverdb pkg.PackageDatabase) PackageSolver {
return NewResolver(t, installed, definitiondb, solverdb, &DummyPackageResolver{})
return NewResolver(t, installed, definitiondb, solverdb, &Explainer{})
}
// NewResolver accepts as argument two lists of packages, the first is the initial set,

View File

@@ -76,6 +76,10 @@ func (r *CompilerRecipe) Load(path string) error {
//if err != nil {
// return err
//}
c, err := helpers.ChartFiles([]string{filepath.Join(path, "templates")})
if err != nil {
return err
}
//r.Tree().SetPackageSet(pkg.NewBoltDatabase(tmpfile.Name()))
// TODO: Handle cleaning after? Cleanup implemented in GetPackageSet().Clean()
@@ -104,8 +108,7 @@ func (r *CompilerRecipe) Load(path string) error {
// Instead of rdeps, have a different tree for build deps.
compileDefPath := pack.Rel(CompilerDefinitionFile)
if fileHelper.Exists(compileDefPath) {
dat, err := helpers.RenderFiles(compileDefPath, currentpath)
dat, err := helpers.RenderFiles(append(c, helpers.ChartFile(compileDefPath)...), currentpath)
if err != nil {
return errors.Wrap(err,
"Error templating file "+CompilerDefinitionFile+" from "+
@@ -157,7 +160,7 @@ func (r *CompilerRecipe) Load(path string) error {
if err != nil {
return errors.Wrap(err, "Error reading file "+currentpath)
}
dat, err := helpers.RenderHelm(string(buildyaml), raw, map[string]interface{}{})
dat, err := helpers.RenderHelm(append(c, helpers.ChartFileB(buildyaml)...), raw, map[string]interface{}{})
if err != nil {
return errors.Wrap(err,
"Error templating file "+CompilerDefinitionFile+" from "+
@@ -183,7 +186,7 @@ func (r *CompilerRecipe) Load(path string) error {
return nil
}
err := filepath.Walk(path, ff)
err = filepath.Walk(path, ff)
if err != nil {
return err
}

View File

@@ -0,0 +1,6 @@
image: "alpine"
prelude:
- mkdir /foo
steps:
- echo conflict > /foo/test1
package_dir: /foo

View File

@@ -0,0 +1,13 @@
packages:
- category: "test1"
name: "conflict"
version: "1.0"
- category: "test2"
name: "conflict"
version: "1.0"
- category: "test1"
name: "conflict"
version: "1.1"
- category: "test2"
name: "conflict"
version: "1.1"

View File

@@ -0,0 +1,3 @@
{{ define "writefile" }}
- echo conflict > /foo/{{.}}
{{end}}

View File

@@ -0,0 +1,6 @@
image: "alpine"
prelude:
- mkdir /foo
steps:
{{ template "writefile" .Values.name }}
package_dir: /foo

View File

@@ -0,0 +1,7 @@
packages:
- category: "test"
name: "foo"
version: "1.0"
- category: "test"
name: "bar"
version: "1.0"

View File

@@ -0,0 +1,81 @@
#!/bin/bash
export LUET_NOLOCK=true
oneTimeSetUp() {
export tmpdir="$(mktemp -d)"
}
oneTimeTearDown() {
rm -rf "$tmpdir"
}
testBuild() {
mkdir $tmpdir/testbuild
luet build --tree "$ROOT_DIR/tests/fixtures/fileconflicts_upgrade" --destination $tmpdir/testbuild --compression gzip --all
buildst=$?
assertEquals 'builds successfully' "$buildst" "0"
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/conflict-test1-1.0.package.tar.gz' ]"
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/conflict-test2-1.0.package.tar.gz' ]"
}
testRepo() {
assertTrue 'no repository' "[ ! -e '$tmpdir/testbuild/repository.yaml' ]"
luet create-repo --tree "$ROOT_DIR/tests/fixtures/fileconflicts_upgrade" \
--output $tmpdir/testbuild \
--packages $tmpdir/testbuild \
--name "test" \
--descr "Test Repo" \
--urls $tmpdir/testrootfs \
--type disk > /dev/null
createst=$?
assertEquals 'create repo successfully' "$createst" "0"
assertTrue 'create repository' "[ -e '$tmpdir/testbuild/repository.yaml' ]"
}
testConfig() {
mkdir $tmpdir/testrootfs
cat <<EOF > $tmpdir/luet.yaml
general:
debug: true
system:
rootfs: $tmpdir/testrootfs
database_path: "/"
database_engine: "boltdb"
config_from_host: true
repositories:
- name: "main"
type: "disk"
enable: true
urls:
- "$tmpdir/testbuild"
EOF
luet config --config $tmpdir/luet.yaml
res=$?
assertEquals 'config test successfully' "$res" "0"
}
testInstall() {
luet install -y --force --config $tmpdir/luet.yaml test1/conflict@1.0 test2/conflict@1.0
#luet install -y --config $tmpdir/luet.yaml test/c@1.0 > /dev/null
installst=$?
assertEquals 'install test succeded' "$installst" "0"
#assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
}
testUpgrade() {
out=$(luet upgrade -y --config $tmpdir/luet.yaml)
installst=$?
assertEquals 'install test succeeded' "$installst" "1"
assertContains 'does find conflicts' "$out" "Error: file conflict found: file conflict between packages to be installed"
luet upgrade -y --config $tmpdir/luet.yaml --force
#luet install -y --config $tmpdir/luet.yaml test/c@1.0 > /dev/null
installst=$?
assertEquals 'install test succeeded' "$installst" "0"
}
# Load shUnit2.
. "$ROOT_DIR/tests/integration/shunit2"/shunit2

View File

@@ -0,0 +1,69 @@
#!/bin/bash
export LUET_NOLOCK=true
oneTimeSetUp() {
export tmpdir="$(mktemp -d)"
}
oneTimeTearDown() {
rm -rf "$tmpdir"
}
testBuild() {
mkdir $tmpdir/testbuild
luet build --tree "$ROOT_DIR/tests/fixtures/shared_templates" --destination $tmpdir/testbuild --compression gzip --all
buildst=$?
assertEquals 'builds successfully' "$buildst" "0"
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/foo-test-1.0.package.tar.gz' ]"
assertTrue 'create packages' "[ -e '$tmpdir/testbuild/bar-test-1.0.package.tar.gz' ]"
}
testRepo() {
assertTrue 'no repository' "[ ! -e '$tmpdir/testbuild/repository.yaml' ]"
luet create-repo --tree "$ROOT_DIR/tests/fixtures/shared_templates" \
--output $tmpdir/testbuild \
--packages $tmpdir/testbuild \
--name "test" \
--descr "Test Repo" \
--urls $tmpdir/testrootfs \
--type disk > /dev/null
createst=$?
assertEquals 'create repo successfully' "$createst" "0"
assertTrue 'create repository' "[ -e '$tmpdir/testbuild/repository.yaml' ]"
}
testConfig() {
mkdir $tmpdir/testrootfs
cat <<EOF > $tmpdir/luet.yaml
general:
debug: true
system:
rootfs: $tmpdir/testrootfs
database_path: "/"
database_engine: "boltdb"
config_from_host: true
repositories:
- name: "main"
type: "disk"
enable: true
urls:
- "$tmpdir/testbuild"
EOF
luet config --config $tmpdir/luet.yaml
res=$?
assertEquals 'config test successfully' "$res" "0"
}
testInstall() {
luet install -y --config $tmpdir/luet.yaml test/foo test/bar
installst=$?
assertEquals 'install test failed' "$installst" "0"
assertTrue 'package bar installed' "[ -e '$tmpdir/testrootfs/bar' ]"
assertTrue 'package foo installed' "[ -e '$tmpdir/testrootfs/foo' ]"
}
# Load shUnit2.
. "$ROOT_DIR/tests/integration/shunit2"/shunit2

149
vendor/github.com/crillab/gophersat/explain/check.go generated vendored Normal file
View File

@@ -0,0 +1,149 @@
// Package explain provides facilities to check and understand UNSAT instances.
package explain
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
"github.com/crillab/gophersat/solver"
)
// Options is a set of options that can be set to true during the checking process.
type Options struct {
// If Verbose is true, information about resolution will be written on stdout.
Verbose bool
}
// Checks whether the clause satisfies the problem or not.
// Will return true if the problem is UNSAT, false if it is SAT or indeterminate.
func unsat(pb *Problem, clause []int) bool {
oldUnits := make([]int, len(pb.units))
copy(oldUnits, pb.units)
// lits is supposed to be implied by the problem.
// We add the negation of each lit as a unit clause to see if this is true.
for _, lit := range clause {
if lit > 0 {
pb.units[lit-1] = -1
} else {
pb.units[-lit-1] = 1
}
}
res := pb.unsat()
pb.units = oldUnits // We must restore the previous state
return res
}
// UnsatChan will wait RUP clauses from ch and use them as a certificate.
// It will return true iff the certificate is valid, i.e iff it makes the problem UNSAT
// through unit propagation.
// If pb.Options.ExtractSubset is true, a subset will also be extracted for that problem.
func (pb *Problem) UnsatChan(ch chan string) (valid bool, err error) {
defer pb.restore()
pb.initTagged()
for line := range ch {
fields := strings.Fields(line)
if len(fields) == 0 {
continue
}
if _, err := strconv.Atoi(fields[0]); err != nil {
// This is not a clause: ignore the line
continue
}
clause, err := parseClause(fields)
if err != nil {
return false, err
}
if !unsat(pb, clause) {
return false, nil
}
if len(clause) == 0 {
// This is the empty and unit propagation made the problem UNSAT: we're done.
return true, nil
}
// Since clause is a logical consequence, append it to the problem
pb.Clauses = append(pb.Clauses, clause)
}
// If we did not send any information through the channel
// It implies that the problem is trivially unsatisfiable
// Since we had only unit clauses inside the channel.
return true, nil
}
// Unsat will parse a certificate, and return true iff the certificate is valid, i.e iff it makes the problem UNSAT
// through unit propagation.
// If pb.Options.ExtractSubset is true, a subset will also be extracted for that problem.
func (pb *Problem) Unsat(cert io.Reader) (valid bool, err error) {
defer pb.restore()
pb.initTagged()
sc := bufio.NewScanner(cert)
for sc.Scan() {
line := sc.Text()
fields := strings.Fields(line)
if len(fields) == 0 {
continue
}
if _, err := strconv.Atoi(fields[0]); err != nil {
// This is not a clause: ignore the line
continue
}
clause, err := parseClause(fields)
if err != nil {
return false, err
}
if !unsat(pb, clause) {
return false, nil
}
// Since clause is a logical consequence, append it to the problem
pb.Clauses = append(pb.Clauses, clause)
}
if err := sc.Err(); err != nil {
return false, fmt.Errorf("could not parse certificate: %v", err)
}
return true, nil
}
// ErrNotUnsat is the error returned when trying to get the MUS or UnsatSubset of a satisfiable problem.
var ErrNotUnsat = fmt.Errorf("problem is not UNSAT")
// UnsatSubset returns an unsatisfiable subset of the problem.
// The subset is not guaranteed to be a MUS, meaning some clauses of the resulting
// problem might be removed while still keeping the unsatisfiability of the problem.
// However, this method is much more efficient than extracting a MUS, as it only calls
// the SAT solver once.
func (pb *Problem) UnsatSubset() (subset *Problem, err error) {
solverPb := solver.ParseSlice(pb.Clauses)
if solverPb.Status == solver.Unsat {
// Problem is trivially UNSAT
// Make a copy so that pb and pb2 are not the same value.
pb2 := *pb
return &pb2, nil
}
s := solver.New(solver.ParseSlice(pb.Clauses))
s.Certified = true
s.CertChan = make(chan string)
status := solver.Unsat
go func() {
status = s.Solve()
close(s.CertChan)
}()
if valid, err := pb.UnsatChan(s.CertChan); !valid || status == solver.Sat {
return nil, ErrNotUnsat
} else if err != nil {
return nil, fmt.Errorf("could not solve problem: %v", err)
}
subset = &Problem{
NbVars: pb.NbVars,
}
for i, clause := range pb.Clauses {
if pb.tagged[i] {
// clause was used to prove pb is UNSAT: it's part of the subset
subset.Clauses = append(subset.Clauses, clause)
subset.NbClauses++
}
}
return subset, nil
}

213
vendor/github.com/crillab/gophersat/explain/mus.go generated vendored Normal file
View File

@@ -0,0 +1,213 @@
package explain
import (
"fmt"
"github.com/crillab/gophersat/solver"
)
// MUSMaxSat returns a Minimal Unsatisfiable Subset for the problem using the MaxSat strategy.
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
// the problem becomes satisfiable.
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
// a SAT solver must be called several times on parts of the original problem to find them.
// With the MaxSat strategy, the function computes the MUS through several calls to MaxSat.
func (pb *Problem) MUSMaxSat() (mus *Problem, err error) {
pb2 := pb.clone()
nbVars := pb2.NbVars
NbClauses := pb2.NbClauses
weights := make([]int, NbClauses) // Weights of each clause
relaxLits := make([]solver.Lit, NbClauses) // Set of all relax lits
relaxLit := nbVars + 1 // Index of last used relax lit
for i, clause := range pb2.Clauses {
pb2.Clauses[i] = append(clause, relaxLit)
relaxLits[i] = solver.IntToLit(int32(relaxLit))
weights[i] = 1
relaxLit++
}
prob := solver.ParseSlice(pb2.Clauses)
prob.SetCostFunc(relaxLits, weights)
s := solver.New(prob)
s.Verbose = pb.Options.Verbose
var musClauses [][]int
done := make([]bool, NbClauses) // Indicates whether a clause is already part of MUS or not yet
for {
cost := s.Minimize()
if cost == -1 {
return makeMus(nbVars, musClauses), nil
}
if cost == 0 {
return nil, fmt.Errorf("cannot extract MUS from satisfiable problem")
}
model := s.Model()
for i, clause := range pb.Clauses {
if !done[i] && !satClause(clause, model) {
// The clause is part of the MUS
pb2.Clauses = append(pb2.Clauses, []int{-(nbVars + i + 1)}) // Now, relax lit has to be false
pb2.NbClauses++
musClauses = append(musClauses, clause)
done[i] = true
// Make it a hard clause before restarting solver
lits := make([]solver.Lit, len(clause))
for j, lit := range clause {
lits[j] = solver.IntToLit(int32(lit))
}
s.AppendClause(solver.NewClause(lits))
}
}
if pb.Options.Verbose {
fmt.Printf("c Currently %d/%d clauses in MUS\n", len(musClauses), NbClauses)
}
prob = solver.ParseSlice(pb2.Clauses)
prob.SetCostFunc(relaxLits, weights)
s = solver.New(prob)
s.Verbose = pb.Options.Verbose
}
}
// true iff the clause is satisfied by the model
func satClause(clause []int, model []bool) bool {
for _, lit := range clause {
if (lit > 0 && model[lit-1]) || (lit < 0 && !model[-lit-1]) {
return true
}
}
return false
}
func makeMus(nbVars int, clauses [][]int) *Problem {
mus := &Problem{
Clauses: clauses,
NbVars: nbVars,
NbClauses: len(clauses),
units: make([]int, nbVars),
}
for _, clause := range clauses {
if len(clause) == 1 {
lit := clause[0]
if lit > 0 {
mus.units[lit-1] = 1
} else {
mus.units[-lit-1] = -1
}
}
}
return mus
}
// MUSInsertion returns a Minimal Unsatisfiable Subset for the problem using the insertion method.
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
// the problem becomes satisfiable.
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
// a SAT solver must be called several times on parts of the original problem to find them.
// The insertion algorithm is efficient is many cases, as it calls the same solver several times in a row.
// However, in some cases, the number of calls will be higher than using other methods.
// For instance, if called on a formula that is already a MUS, it will perform n*(n-1) calls to SAT, where
// n is the number of clauses of the problem.
func (pb *Problem) MUSInsertion() (mus *Problem, err error) {
pb2, err := pb.UnsatSubset()
if err != nil {
return nil, fmt.Errorf("could not extract MUS: %v", err)
}
mus = &Problem{NbVars: pb2.NbVars}
clauses := pb2.Clauses
for {
if pb.Options.Verbose {
fmt.Printf("c mus currently contains %d clauses\n", mus.NbClauses)
}
s := solver.New(solver.ParseSliceNb(mus.Clauses, mus.NbVars))
s.Verbose = pb.Options.Verbose
st := s.Solve()
if st == solver.Unsat { // Found the MUS
return mus, nil
}
// Add clauses until the problem becomes UNSAT
idx := 0
for st == solver.Sat {
clause := clauses[idx]
lits := make([]solver.Lit, len(clause))
for i, lit := range clause {
lits[i] = solver.IntToLit(int32(lit))
}
cl := solver.NewClause(lits)
s.AppendClause(cl)
idx++
st = s.Solve()
}
idx-- // We went one step too far, go back
mus.Clauses = append(mus.Clauses, clauses[idx]) // Last clause is part of the MUS
mus.NbClauses++
if pb.Options.Verbose {
fmt.Printf("c removing %d/%d clause(s)\n", len(clauses)-idx, len(clauses))
}
clauses = clauses[:idx] // Remaining clauses are not part of the MUS
}
}
// MUSDeletion returns a Minimal Unsatisfiable Subset for the problem using the insertion method.
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
// the problem becomes satisfiable.
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
// a SAT solver must be called several times on parts of the original problem to find them.
// The deletion algorithm is guaranteed to call exactly n SAT solvers, where n is the number of clauses in the problem.
// It can be quite efficient, but each time the solver is called, it is starting from scratch.
// Other methods keep the solver "hot", so despite requiring more calls, these methods can be more efficient in practice.
func (pb *Problem) MUSDeletion() (mus *Problem, err error) {
pb2, err := pb.UnsatSubset()
if err != nil {
if err == ErrNotUnsat {
return nil, err
}
return nil, fmt.Errorf("could not extract MUS: %v", err)
}
pb2.NbVars += pb2.NbClauses // Add one relax var for each clause
for i, clause := range pb2.Clauses { // Add relax lit to each clause
newClause := make([]int, len(clause)+1)
copy(newClause, clause)
newClause[len(clause)] = pb.NbVars + i + 1 // Add relax lit to the clause
pb2.Clauses[i] = newClause
}
s := solver.New(solver.ParseSlice(pb2.Clauses))
asumptions := make([]solver.Lit, pb2.NbClauses)
for i := 0; i < pb2.NbClauses; i++ {
asumptions[i] = solver.IntToLit(int32(-(pb.NbVars + i + 1))) // At first, all asumptions are false
}
for i := range pb2.Clauses {
// Relax current clause
asumptions[i] = asumptions[i].Negation()
s.Assume(asumptions)
if s.Solve() == solver.Sat {
// It is now sat; reinsert the clause, i.e re-falsify the relax lit
asumptions[i] = asumptions[i].Negation()
if pb.Options.Verbose {
fmt.Printf("c clause %d/%d: kept\n", i+1, pb2.NbClauses)
}
} else if pb.Options.Verbose {
fmt.Printf("c clause %d/%d: removed\n", i+1, pb2.NbClauses)
}
}
mus = &Problem{
NbVars: pb.NbVars,
}
for i, val := range asumptions {
if !val.IsPositive() {
// Lit is not relaxed, meaning the clause is part of the MUS
clause := pb2.Clauses[i]
clause = clause[:len(clause)-1] // Remove relax lit
mus.Clauses = append(mus.Clauses, clause)
}
mus.NbClauses = len(mus.Clauses)
}
return mus, nil
}
// MUS returns a Minimal Unsatisfiable Subset for the problem.
// A MUS is an unsatisfiable subset such that, if any of its clause is removed,
// the problem becomes satisfiable.
// A MUS can be useful to understand why a problem is UNSAT, but MUSes are expensive to compute since
// a SAT solver must be called several times on parts of the original problem to find them.
// The exact algorithm used to compute the MUS is not guaranteed. If you want to use a given algorithm,
// use the relevant functions.
func (pb *Problem) MUS() (mus *Problem, err error) {
return pb.MUSDeletion()
}

104
vendor/github.com/crillab/gophersat/explain/parser.go generated vendored Normal file
View File

@@ -0,0 +1,104 @@
package explain
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
)
// parseClause parses a line representing a clause in the DIMACS CNF syntax.
func parseClause(fields []string) ([]int, error) {
clause := make([]int, 0, len(fields)-1)
for _, rawLit := range fields {
lit, err := strconv.Atoi(rawLit)
if err != nil {
return nil, fmt.Errorf("could not parse clause %v: %v", fields, err)
}
if lit != 0 {
clause = append(clause, lit)
}
}
return clause, nil
}
// ParseCNF parses a CNF and returns the associated problem.
func ParseCNF(r io.Reader) (*Problem, error) {
sc := bufio.NewScanner(r)
var pb Problem
for sc.Scan() {
line := sc.Text()
fields := strings.Fields(line)
if len(fields) == 0 {
continue
}
switch fields[0] {
case "c":
continue
case "p":
if err := pb.parseHeader(fields); err != nil {
return nil, fmt.Errorf("could not parse header %q: %v", line, err)
}
default:
if err := pb.parseClause(fields); err != nil {
return nil, fmt.Errorf("could not parse clause %q: %v", line, err)
}
}
}
if err := sc.Err(); err != nil {
return nil, fmt.Errorf("could not parse problem: %v", err)
}
return &pb, nil
}
func (pb *Problem) parseHeader(fields []string) error {
if len(fields) != 4 {
return fmt.Errorf("expected 4 fields, got %d", len(fields))
}
strVars := fields[2]
strClauses := fields[3]
var err error
pb.NbVars, err = strconv.Atoi(fields[2])
if err != nil {
return fmt.Errorf("invalid number of vars %q: %v", strVars, err)
}
if pb.NbVars < 0 {
return fmt.Errorf("negative number of vars %d", pb.NbVars)
}
pb.units = make([]int, pb.NbVars)
pb.NbClauses, err = strconv.Atoi(fields[3])
if err != nil {
return fmt.Errorf("invalid number of clauses %s: %v", strClauses, err)
}
if pb.NbClauses < 0 {
return fmt.Errorf("negative number of clauses %d", pb.NbClauses)
}
pb.Clauses = make([][]int, 0, pb.NbClauses)
return nil
}
func (pb *Problem) parseClause(fields []string) error {
clause, err := parseClause(fields)
if err != nil {
return err
}
pb.Clauses = append(pb.Clauses, clause)
if len(clause) == 1 {
lit := clause[0]
v := lit
if lit < 0 {
v = -v
}
if v > pb.NbVars {
// There was an error in the header
return fmt.Errorf("found lit %d but problem was supposed to hold only %d vars", lit, pb.NbVars)
}
if lit > 0 {
pb.units[v-1] = 1
} else {
pb.units[v-1] = -1
}
}
return nil
}

125
vendor/github.com/crillab/gophersat/explain/problem.go generated vendored Normal file
View File

@@ -0,0 +1,125 @@
package explain
import (
"fmt"
"strings"
)
// A Problem is a conjunction of Clauses.
// This package does not use solver's representation.
// We want this code to be as simple as possible to be easy to audit.
// On the other hand, solver's code must be as efficient as possible.
type Problem struct {
Clauses [][]int
NbVars int
NbClauses int
units []int // For each var, 0 if the var is unbound, 1 if true, -1 if false
Options Options
tagged []bool // List of claused used whil proving the problem is unsat. Initialized lazily
}
func (pb *Problem) initTagged() {
pb.tagged = make([]bool, pb.NbClauses)
for i, clause := range pb.Clauses {
// Unit clauses are tagged as they will probably be used during resolution
pb.tagged[i] = len(clause) == 1
}
}
func (pb *Problem) clone() *Problem {
pb2 := &Problem{
Clauses: make([][]int, pb.NbClauses),
NbVars: pb.NbVars,
NbClauses: pb.NbClauses,
units: make([]int, pb.NbVars),
}
copy(pb2.units, pb.units)
for i, clause := range pb.Clauses {
pb2.Clauses[i] = make([]int, len(clause))
copy(pb2.Clauses[i], clause)
}
return pb2
}
// restore removes all learned clauses, if any.
func (pb *Problem) restore() {
pb.Clauses = pb.Clauses[:pb.NbClauses]
}
// unsat will be true iff the problem can be proven unsat through unit propagation.
// This methods modifies pb.units.
func (pb *Problem) unsat() bool {
done := make([]bool, len(pb.Clauses)) // clauses that were deemed sat during propagation
modified := true
for modified {
modified = false
for i, clause := range pb.Clauses {
if done[i] { // That clause was already proved true
continue
}
unbound := 0
var unit int // An unbound literal, if any
sat := false
for _, lit := range clause {
v := lit
if v < 0 {
v = -v
}
binding := pb.units[v-1]
if binding == 0 {
unbound++
if unbound == 1 {
unit = lit
} else {
break
}
} else if binding*lit == v { // (binding == -1 && lit < 0) || (binding == 1 && lit > 0) {
sat = true
break
}
}
if sat {
done[i] = true
continue
}
if unbound == 0 {
// All lits are false: problem is UNSAT
if i < pb.NbClauses {
pb.tagged[i] = true
}
return true
}
if unbound == 1 {
if unit < 0 {
pb.units[-unit-1] = -1
} else {
pb.units[unit-1] = 1
}
done[i] = true
if i < pb.NbClauses {
pb.tagged[i] = true
}
modified = true
}
}
}
// Problem is either sat or could not be proven unsat through unit propagation
return false
}
// CNF returns a representation of the problem using the Dimacs syntax.
func (pb *Problem) CNF() string {
lines := make([]string, 1, pb.NbClauses+1)
lines[0] = fmt.Sprintf("p cnf %d %d", pb.NbVars, pb.NbClauses)
for i := 0; i < pb.NbClauses; i++ {
clause := pb.Clauses[i]
strClause := make([]string, len(clause)+1)
for i, lit := range clause {
strClause[i] = fmt.Sprintf("%d", lit)
}
strClause[len(clause)] = "0"
line := strings.Join(strClause, " ")
lines = append(lines, line)
}
return strings.Join(lines, "\n")
}

View File

@@ -127,6 +127,25 @@ func New(problem *Problem) *Solver {
return s
}
// newVar is used to indicate a new variable must be added to the solver.
// This can be used when new clauses are appended and these clauses contain vars that were unseen so far.
// If the var already existed, nothing will happen.
func (s *Solver) newVar(v Var) {
if cnfVar := int(v.Int()); cnfVar > s.nbVars {
// If the var already existed, do nothing
for i := s.nbVars; i < cnfVar; i++ {
s.model = append(s.model, 0)
s.activity = append(s.activity, 0.)
s.polarity = append(s.polarity, false)
s.reason = append(s.reason, nil)
s.trailBuf = append(s.trailBuf, 0)
}
s.varQueue = newQueue(s.activity)
s.addVarWatcherList(v)
s.nbVars = cnfVar
}
}
// sets initial activity for optimization variables, if any.
func (s *Solver) initOptimActivity() {
for i, lit := range s.minLits {
@@ -682,6 +701,7 @@ func (s *Solver) AppendClause(clause *Clause) {
i := 0
for i < clause.Len() {
lit := clause.Get(i)
s.newVar(lit.Var())
switch s.litStatus(lit) {
case Sat:
w := clause.Weight(i)

View File

@@ -71,6 +71,11 @@ func (v Var) Lit() Lit {
return Lit(v * 2)
}
// Int converts a Var to a CNF variable.
func (v Var) Int() int32 {
return int32(v + 1)
}
// SignedLit returns the Lit associated to v, negated if 'signed', positive else.
func (v Var) SignedLit(signed bool) Lit {
if signed {

View File

@@ -39,6 +39,16 @@ func (s *Solver) initWatcherList(clauses []*Clause) {
}
}
// Should be called when new vars are added to the problem (see Solver.newVar)
func (s *Solver) addVarWatcherList(v Var) {
cnfVar := int(v.Int())
for i := s.nbVars; i < cnfVar; i++ {
s.wl.wlistBin = append(s.wl.wlistBin, nil, nil)
s.wl.wlist = append(s.wl.wlist, nil, nil)
s.wl.wlistPb = append(s.wl.wlistPb, nil, nil)
}
}
// appendClause appends the clause without checking whether the clause is already satisfiable, unit, or unsatisfiable.
// To perform those checks, call s.AppendClause.
// clause is supposed to be a problem clause, not a learned one.

3
vendor/modules.txt vendored
View File

@@ -115,9 +115,10 @@ github.com/containerd/ttrpc
github.com/containerd/typeurl
# github.com/cpuguy83/go-md2man/v2 v2.0.0
github.com/cpuguy83/go-md2man/v2/md2man
# github.com/crillab/gophersat v1.3.2-0.20201023142334-3fc2ac466765
# github.com/crillab/gophersat v1.3.2-0.20210701121804-72b19f5b6b38
## explicit
github.com/crillab/gophersat/bf
github.com/crillab/gophersat/explain
github.com/crillab/gophersat/solver
# github.com/cyphar/filepath-securejoin v0.2.2
github.com/cyphar/filepath-securejoin