Compare commits

..

34 Commits
0.3 ... 0.4

Author SHA1 Message Date
Ettore Di Giacinto
12c97c7a2a Prepare for 0.4 tag 2020-01-06 19:11:44 +01:00
Ettore Di Giacinto
f5e7c2ad92 Allow more matches, we select the best one anyway 2020-01-06 11:33:35 +01:00
Ettore Di Giacinto
d2abaa9cc1 Add message asserting skip. Also return pointer to allow to edit Options 2020-01-05 16:26:42 +01:00
Ettore Di Giacinto
d23e1dee78 Default to Clean true in CompilerOptions 2020-01-05 16:08:40 +01:00
Ettore Di Giacinto
ee055e08b1 Set abs path when returning artifact from yaml 2020-01-05 16:08:39 +01:00
Ettore Di Giacinto
6d745ef915 Add build --clean to CLI 2020-01-05 16:08:39 +01:00
Ettore Di Giacinto
02c37c7451 Fix tests after contructor change 2020-01-05 16:08:39 +01:00
Ettore Di Giacinto
1d1efad18b Skip building if artifact already exists 2020-01-05 15:30:16 +01:00
Ettore Di Giacinto
bcc6ce19ea Move compiler options to its own struct
Also add Clean attribute, to handle a future build clean method
2020-01-05 14:32:26 +01:00
Ettore Di Giacinto
9db9c1bf19 Add integration test for reinstall warning 2020-01-01 12:04:26 +01:00
Ettore Di Giacinto
5e8a29caf5 Detect already installed packages when calling install
We wasn't checking this previously, which was drawing weird errors on the CLI
2020-01-01 11:58:33 +01:00
Ettore Di Giacinto
255f768cc0 Enhance simple integration test 2019-12-31 15:59:46 +01:00
Ettore Di Giacinto
1af235dfdc Add integration tests to run on travis 2019-12-31 15:44:00 +01:00
Ettore Di Giacinto
62ebe1a82b Add integration tests 2019-12-31 15:22:11 +01:00
Ettore Di Giacinto
efdfe72568 Treat CompressionType none as default
To provide backward compatibility with repos that didn't declares it explictly
2019-12-31 12:29:53 +01:00
Ettore Di Giacinto
c193e4d320 Enhance output 2019-12-30 16:35:38 +01:00
Ettore Di Giacinto
3d5b723668 Add compression tests
Refers to #33
2019-12-30 16:35:35 +01:00
Ettore Di Giacinto
58eb483e32 Do not create new artifact on client
Otherwise we loose artifact metadata - as checksum and compressiontype

Refers to #33
2019-12-30 16:35:33 +01:00
Ettore Di Giacinto
4f65d46d56 Drop CompressedPath, or we don't have a way to compare checksums
Refers to #33
2019-12-30 16:35:30 +01:00
Ettore Di Giacinto
d48f510f14 Propagate Checksum and CompressionType when building ArtifactIndex
Refers to #33
2019-12-30 16:35:28 +01:00
Ettore Di Giacinto
ea27ada6c0 Do not return errors after we uncompress successfully
Refers to #33
2019-12-30 16:35:25 +01:00
Ettore Di Giacinto
f71c9937c4 Add compression to build CLI
Also handle how concurrency is set now.

Adds also an accessor to compiler to set the desired compression type

Refers to #33
2019-12-30 16:35:21 +01:00
Ettore Di Giacinto
475b63be95 Consume concurrency from compiler
Refers to #33
2019-12-30 16:35:18 +01:00
Ettore Di Giacinto
a40ecaea40 Use a separate attribute to handle the compressed artifact
Refers to #33
2019-12-30 16:35:15 +01:00
Ettore Di Giacinto
5155681513 Fixup tests
Refers to #33
2019-12-30 16:35:12 +01:00
Ettore Di Giacinto
d2d72c3fc4 Add package compression type
TODO: Handle the path substitution in a separate field
Adds GZip support and allows the compiler to switch compression type.

It also adds Concurrency as a compiler attribute (not consumed yet)

Refers to #33
2019-12-30 16:34:41 +01:00
Ettore Di Giacinto
bb98259a48 Add sanity check test
To verify that we are actually comparing with some data

Closes #28
2019-12-29 14:14:06 +01:00
Ettore Di Giacinto
fea6061f89 Add hash test to artifact_test
Refers to #28
2019-12-29 14:14:03 +01:00
Ettore Di Giacinto
cb98a49917 Create new Checksum struct for Artifact
Refers to #28
2019-12-29 14:13:51 +01:00
Ettore Di Giacinto
2693ec2f8c Consume artifact verify mechanism
Refers to #28
2019-12-29 14:00:03 +01:00
Ettore Di Giacinto
eeb6719529 Add accessors to Hash and Verify artifacts
Refers to #28
2019-12-29 13:59:58 +01:00
Ettore Di Giacinto
17982e9527 Add package to calculate and compare artifact checksums
Refers to #28
2019-12-29 13:59:47 +01:00
Ettore Di Giacinto
2fa9c754ae Move archive helpers to artifact
This allow in the future to swap and provide archive/compression methods without hijacking the code.

Refers to #33
2019-12-28 16:48:05 +01:00
Ettore Di Giacinto
8fffae31c7 Add dev version tag 2019-12-23 12:08:14 +01:00
23 changed files with 830 additions and 129 deletions

2
.gitignore vendored
View File

@@ -1,2 +1,4 @@
*.swp *.swp
luet luet
tests/integration/shunit2
tests/integration/bin

View File

@@ -9,7 +9,7 @@ before_install:
- make deps - make deps
- curl -LO https://storage.googleapis.com/container-diff/latest/container-diff-linux-amd64 && chmod +x container-diff-linux-amd64 && mkdir -p $HOME/bin && export PATH=$PATH:$HOME/bin && mv container-diff-linux-amd64 $HOME/bin/container-diff - curl -LO https://storage.googleapis.com/container-diff/latest/container-diff-linux-amd64 && chmod +x container-diff-linux-amd64 && mkdir -p $HOME/bin && export PATH=$PATH:$HOME/bin && mv container-diff-linux-amd64 $HOME/bin/container-diff
script: script:
- make multiarch-build test - make multiarch-build test test-integration
after_success: after_success:
- make coverage - make coverage
- bash <(curl -s https://codecov.io/bash) - bash <(curl -s https://codecov.io/bash)

View File

@@ -21,6 +21,10 @@ test:
GO111MODULE=off go get github.com/onsi/gomega/... GO111MODULE=off go get github.com/onsi/gomega/...
ginkgo -race -r ./... ginkgo -race -r ./...
.PHONY: test-integration
test-integration:
tests/integration/run.sh
.PHONY: coverage .PHONY: coverage
coverage: coverage:
go test ./... -race -coverprofile=coverage.txt -covermode=atomic go test ./... -race -coverprofile=coverage.txt -covermode=atomic
@@ -36,6 +40,8 @@ help:
.PHONY: clean .PHONY: clean
clean: clean:
rm -rf release/ rm -rf release/
rm -rf tests/integration/shunit2
rm -rf tests/integration/bin
.PHONY: deps .PHONY: deps
deps: deps:

View File

@@ -35,6 +35,7 @@ var buildCmd = &cobra.Command{
Short: "build a package or a tree", Short: "build a package or a tree",
Long: `build packages or trees from luet tree definitions. Packages are in [category]/[name]-[version] form`, Long: `build packages or trees from luet tree definitions. Packages are in [category]/[name]-[version] form`,
PreRun: func(cmd *cobra.Command, args []string) { PreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlag("clean", cmd.Flags().Lookup("clean"))
viper.BindPFlag("tree", cmd.Flags().Lookup("tree")) viper.BindPFlag("tree", cmd.Flags().Lookup("tree"))
viper.BindPFlag("destination", cmd.Flags().Lookup("destination")) viper.BindPFlag("destination", cmd.Flags().Lookup("destination"))
viper.BindPFlag("backend", cmd.Flags().Lookup("backend")) viper.BindPFlag("backend", cmd.Flags().Lookup("backend"))
@@ -43,9 +44,11 @@ var buildCmd = &cobra.Command{
viper.BindPFlag("database", cmd.Flags().Lookup("database")) viper.BindPFlag("database", cmd.Flags().Lookup("database"))
viper.BindPFlag("revdeps", cmd.Flags().Lookup("revdeps")) viper.BindPFlag("revdeps", cmd.Flags().Lookup("revdeps"))
viper.BindPFlag("all", cmd.Flags().Lookup("all")) viper.BindPFlag("all", cmd.Flags().Lookup("all"))
viper.BindPFlag("compression", cmd.Flags().Lookup("compression"))
}, },
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
clean := viper.GetBool("clean")
src := viper.GetString("tree") src := viper.GetString("tree")
dst := viper.GetString("destination") dst := viper.GetString("destination")
concurrency := viper.GetInt("concurrency") concurrency := viper.GetInt("concurrency")
@@ -54,6 +57,7 @@ var buildCmd = &cobra.Command{
revdeps := viper.GetBool("revdeps") revdeps := viper.GetBool("revdeps")
all := viper.GetBool("all") all := viper.GetBool("all")
databaseType := viper.GetString("database") databaseType := viper.GetString("database")
compressionType := viper.GetString("compression")
compilerSpecs := compiler.NewLuetCompilationspecs() compilerSpecs := compiler.NewLuetCompilationspecs()
var compilerBackend compiler.CompilerBackend var compilerBackend compiler.CompilerBackend
@@ -88,8 +92,11 @@ var buildCmd = &cobra.Command{
if err != nil { if err != nil {
Fatal("Error: " + err.Error()) Fatal("Error: " + err.Error())
} }
luetCompiler := compiler.NewLuetCompiler(compilerBackend, generalRecipe.GetDatabase()) opts := compiler.NewDefaultCompilerOptions()
opts.Clean = clean
luetCompiler := compiler.NewLuetCompiler(compilerBackend, generalRecipe.GetDatabase(), opts)
luetCompiler.SetConcurrency(concurrency)
luetCompiler.SetCompressionType(compiler.CompressionImplementation(compressionType))
if !all { if !all {
for _, a := range args { for _, a := range args {
decodepackage, err := regexp.Compile(`^([<>]?\~?=?)((([^\/]+)\/)?(?U)(\S+))(-(\d+(\.\d+)*[a-z]?(_(alpha|beta|pre|rc|p)\d*)*(-r\d+)?))?$`) decodepackage, err := regexp.Compile(`^([<>]?\~?=?)((([^\/]+)\/)?(?U)(\S+))(-(\d+(\.\d+)*[a-z]?(_(alpha|beta|pre|rc|p)\d*)*(-r\d+)?))?$`)
@@ -125,10 +132,10 @@ var buildCmd = &cobra.Command{
var artifact []compiler.Artifact var artifact []compiler.Artifact
var errs []error var errs []error
if revdeps { if revdeps {
artifact, errs = luetCompiler.CompileWithReverseDeps(concurrency, privileged, compilerSpecs) artifact, errs = luetCompiler.CompileWithReverseDeps(privileged, compilerSpecs)
} else { } else {
artifact, errs = luetCompiler.CompileParallel(concurrency, privileged, compilerSpecs) artifact, errs = luetCompiler.CompileParallel(privileged, compilerSpecs)
} }
if len(errs) != 0 { if len(errs) != 0 {
@@ -148,6 +155,7 @@ func init() {
if err != nil { if err != nil {
Fatal(err) Fatal(err)
} }
buildCmd.Flags().Bool("clean", true, "Build all packages without considering the packages present in the build directory")
buildCmd.Flags().String("tree", path, "Source luet tree") buildCmd.Flags().String("tree", path, "Source luet tree")
buildCmd.Flags().String("backend", "docker", "backend used (docker,img)") buildCmd.Flags().String("backend", "docker", "backend used (docker,img)")
buildCmd.Flags().Int("concurrency", runtime.NumCPU(), "Concurrency") buildCmd.Flags().Int("concurrency", runtime.NumCPU(), "Concurrency")
@@ -156,6 +164,7 @@ func init() {
buildCmd.Flags().Bool("revdeps", false, "Build with revdeps") buildCmd.Flags().Bool("revdeps", false, "Build with revdeps")
buildCmd.Flags().Bool("all", false, "Build all packages in the tree") buildCmd.Flags().Bool("all", false, "Build all packages in the tree")
buildCmd.Flags().String("destination", path, "Destination folder") buildCmd.Flags().String("destination", path, "Destination folder")
buildCmd.Flags().String("compression", "none", "Compression alg: none, gzip")
RootCmd.AddCommand(buildCmd) RootCmd.AddCommand(buildCmd)
} }

View File

@@ -30,7 +30,7 @@ import (
var cfgFile string var cfgFile string
var Verbose bool var Verbose bool
const LuetCLIVersion = "0.3" const LuetCLIVersion = "0.4"
// RootCmd represents the base command when called without any subcommands // RootCmd represents the base command when called without any subcommands
var RootCmd = &cobra.Command{ var RootCmd = &cobra.Command{

1
go.mod
View File

@@ -15,6 +15,7 @@ require (
github.com/go-yaml/yaml v2.1.0+incompatible // indirect github.com/go-yaml/yaml v2.1.0+incompatible // indirect
github.com/hashicorp/go-version v1.2.0 github.com/hashicorp/go-version v1.2.0
github.com/jinzhu/copier v0.0.0-20180308034124-7e38e58719c3 github.com/jinzhu/copier v0.0.0-20180308034124-7e38e58719c3
github.com/klauspost/pgzip v1.2.1
github.com/kyokomi/emoji v2.1.0+incompatible github.com/kyokomi/emoji v2.1.0+incompatible
github.com/logrusorgru/aurora v0.0.0-20190417123914-21d75270181e github.com/logrusorgru/aurora v0.0.0-20190417123914-21d75270181e
github.com/marcsauter/single v0.0.0-20181104081128-f8bf46f26ec0 github.com/marcsauter/single v0.0.0-20181104081128-f8bf46f26ec0

View File

@@ -16,22 +16,33 @@
package compiler package compiler
import ( import (
"archive/tar"
"bufio"
"io"
"io/ioutil" "io/ioutil"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"regexp" "regexp"
gzip "github.com/klauspost/pgzip"
//"strconv" //"strconv"
"strings" "strings"
"sync" "sync"
"github.com/mudler/luet/pkg/helpers"
. "github.com/mudler/luet/pkg/logger" . "github.com/mudler/luet/pkg/logger"
"github.com/mudler/luet/pkg/solver" "github.com/mudler/luet/pkg/solver"
yaml "gopkg.in/yaml.v2"
"github.com/mudler/luet/pkg/helpers"
"github.com/pkg/errors" "github.com/pkg/errors"
yaml "gopkg.in/yaml.v2"
)
type CompressionImplementation string
const (
None CompressionImplementation = "none" // e.g. tar for standard packages
GZip CompressionImplementation = "gzip"
) )
type ArtifactIndex []Artifact type ArtifactIndex []Artifact
@@ -40,7 +51,15 @@ func (i ArtifactIndex) CleanPath() ArtifactIndex {
newIndex := ArtifactIndex{} newIndex := ArtifactIndex{}
for _, n := range i { for _, n := range i {
art := n.(*PackageArtifact) art := n.(*PackageArtifact)
newIndex = append(newIndex, &PackageArtifact{Path: path.Base(n.GetPath()), SourceAssertion: art.SourceAssertion, CompileSpec: art.CompileSpec, Dependencies: art.Dependencies}) // FIXME: This is a dup and makes difficult to add attributes to artifacts
newIndex = append(newIndex, &PackageArtifact{
Path: path.Base(n.GetPath()),
SourceAssertion: art.SourceAssertion,
CompileSpec: art.CompileSpec,
Dependencies: art.Dependencies,
CompressionType: art.CompressionType,
Checksums: art.Checksums,
})
} }
return newIndex return newIndex
//Update if exists, otherwise just create //Update if exists, otherwise just create
@@ -50,19 +69,21 @@ func (i ArtifactIndex) CleanPath() ArtifactIndex {
// which will consist in just of an repository.yaml which is just the repository structure with the list of package artifact. // which will consist in just of an repository.yaml which is just the repository structure with the list of package artifact.
// In this way a generic client can fetch the packages and, after unpacking the tree, performing queries to install packages. // In this way a generic client can fetch the packages and, after unpacking the tree, performing queries to install packages.
type PackageArtifact struct { type PackageArtifact struct {
Path string `json:"path"` Path string `json:"path"`
Dependencies []*PackageArtifact `json:"dependencies"`
CompileSpec *LuetCompilationSpec `json:"compilationspec"`
Dependencies []*PackageArtifact `json:"dependencies"`
CompileSpec *LuetCompilationSpec `json:"compilationspec"`
Checksums Checksums `json:"checksums"`
SourceAssertion solver.PackagesAssertions `json:"-"` SourceAssertion solver.PackagesAssertions `json:"-"`
CompressionType CompressionImplementation `json:"compressiontype"`
} }
func NewPackageArtifact(path string) Artifact { func NewPackageArtifact(path string) Artifact {
return &PackageArtifact{Path: path, Dependencies: []*PackageArtifact{}} return &PackageArtifact{Path: path, Dependencies: []*PackageArtifact{}, Checksums: Checksums{}, CompressionType: None}
} }
func NewPackageArtifactFromYaml(data []byte) (Artifact, error) { func NewPackageArtifactFromYaml(data []byte) (Artifact, error) {
p := &PackageArtifact{} p := &PackageArtifact{Checksums: Checksums{}}
err := yaml.Unmarshal(data, &p) err := yaml.Unmarshal(data, &p)
if err != nil { if err != nil {
return p, err return p, err
@@ -71,7 +92,50 @@ func NewPackageArtifactFromYaml(data []byte) (Artifact, error) {
return p, err return p, err
} }
func LoadArtifactFromYaml(spec CompilationSpec) (Artifact, error) {
metaFile := spec.GetPackage().GetFingerPrint() + ".metadata.yaml"
dat, err := ioutil.ReadFile(spec.Rel(metaFile))
if err != nil {
return nil, errors.Wrap(err, "Error reading file "+metaFile)
}
art, err := NewPackageArtifactFromYaml(dat)
if err != nil {
return nil, errors.Wrap(err, "Error writing file "+metaFile)
}
// It is relative, set it back to abs
art.SetPath(spec.Rel(art.GetPath()))
return art, nil
}
func (a *PackageArtifact) SetCompressionType(t CompressionImplementation) {
a.CompressionType = t
}
func (a *PackageArtifact) Hash() error {
return a.Checksums.Generate(a)
}
func (a *PackageArtifact) Verify() error {
sum := Checksums{}
err := sum.Generate(a)
if err != nil {
return err
}
err = sum.Compare(a.Checksums)
if err != nil {
return err
}
return nil
}
func (a *PackageArtifact) WriteYaml(dst string) error { func (a *PackageArtifact) WriteYaml(dst string) error {
// First compute checksum of artifact. When we write the yaml we want to write up-to-date informations.
err := a.Hash()
if err != nil {
return errors.Wrap(err, "Failed generating checksums for artifact")
}
//p := a.CompileSpec.GetPackage().GetPath() //p := a.CompileSpec.GetPackage().GetPath()
//a.CompileSpec.GetPackage().SetPath("") //a.CompileSpec.GetPackage().SetPath("")
@@ -148,6 +212,154 @@ func (a *PackageArtifact) SetPath(p string) {
a.Path = p a.Path = p
} }
// Compress Archives and compress (TODO) to the artifact path
func (a *PackageArtifact) Compress(src string, concurrency int) error {
switch a.CompressionType {
case GZip:
err := helpers.Tar(src, a.Path)
if err != nil {
return err
}
original, err := os.Open(a.Path)
if err != nil {
return err
}
defer original.Close()
gzipfile := a.Path + ".gz"
bufferedReader := bufio.NewReader(original)
// Open a file for writing.
dst, err := os.Create(gzipfile)
if err != nil {
return err
}
// Create gzip writer.
w := gzip.NewWriter(dst)
w.SetConcurrency(concurrency, 10)
defer w.Close()
defer dst.Close()
_, err = io.Copy(w, bufferedReader)
if err != nil {
return err
}
w.Close()
os.RemoveAll(a.Path) // Remove original
// a.CompressedPath = gzipfile
a.Path = gzipfile
return nil
//a.Path = gzipfile
// Defaults to tar only (covers when "none" is supplied)
default:
return helpers.Tar(src, a.Path)
}
return errors.New("Compression type must be supplied")
}
// Unpack Untar and decompress (TODO) to the given path
func (a *PackageArtifact) Unpack(dst string, keepPerms bool) error {
switch a.CompressionType {
case GZip:
// Create the uncompressed archive
archive, err := os.Create(a.GetPath() + ".uncompressed")
if err != nil {
return err
}
defer os.RemoveAll(a.GetPath() + ".uncompressed")
defer archive.Close()
original, err := os.Open(a.Path)
if err != nil {
return errors.Wrap(err, "Cannot open "+a.Path)
}
defer original.Close()
bufferedReader := bufio.NewReader(original)
r, err := gzip.NewReader(bufferedReader)
if err != nil {
return err
}
defer r.Close()
_, err = io.Copy(archive, r)
if err != nil {
return errors.Wrap(err, "Cannot copy to "+a.GetPath()+".uncompressed")
}
err = helpers.Untar(a.GetPath()+".uncompressed", dst, keepPerms)
if err != nil {
return err
}
return nil
// Defaults to tar only (covers when "none" is supplied)
default:
return helpers.Untar(a.GetPath(), dst, keepPerms)
}
return errors.New("Compression type must be supplied")
}
func (a *PackageArtifact) FileList() ([]string, error) {
var tr *tar.Reader
switch a.CompressionType {
case GZip:
// Create the uncompressed archive
archive, err := os.Create(a.GetPath() + ".uncompressed")
if err != nil {
return []string{}, err
}
defer os.RemoveAll(a.GetPath() + ".uncompressed")
defer archive.Close()
original, err := os.Open(a.Path)
if err != nil {
return []string{}, errors.Wrap(err, "Cannot open "+a.Path)
}
defer original.Close()
bufferedReader := bufio.NewReader(original)
r, err := gzip.NewReader(bufferedReader)
if err != nil {
return []string{}, err
}
defer r.Close()
tr = tar.NewReader(r)
// Defaults to tar only (covers when "none" is supplied)
default:
tarFile, err := os.Open(a.GetPath())
if err != nil {
return []string{}, errors.Wrap(err, "Could not open package archive")
}
defer tarFile.Close()
tr = tar.NewReader(tarFile)
}
var files []string
// untar each segment
for {
hdr, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
return []string{}, err
}
// determine proper file path info
finfo := hdr.FileInfo()
fileName := hdr.Name
if finfo.Mode().IsDir() {
continue
}
files = append(files, fileName)
// if a dir, create it, then go to next segment
}
return files, nil
}
type CopyJob struct { type CopyJob struct {
Src, Dst string Src, Dst string
Artifact string Artifact string
@@ -175,7 +387,7 @@ func worker(i int, wg *sync.WaitGroup, s <-chan CopyJob) {
} }
// ExtractArtifactFromDelta extracts deltas from ArtifactLayer from an image in tar format // ExtractArtifactFromDelta extracts deltas from ArtifactLayer from an image in tar format
func ExtractArtifactFromDelta(src, dst string, layers []ArtifactLayer, concurrency int, keepPerms bool, includes []string) (Artifact, error) { func ExtractArtifactFromDelta(src, dst string, layers []ArtifactLayer, concurrency int, keepPerms bool, includes []string, t CompressionImplementation) (Artifact, error) {
archive, err := ioutil.TempDir(os.TempDir(), "archive") archive, err := ioutil.TempDir(os.TempDir(), "archive")
if err != nil { if err != nil {
@@ -239,10 +451,11 @@ func ExtractArtifactFromDelta(src, dst string, layers []ArtifactLayer, concurren
close(toCopy) close(toCopy)
wg.Wait() wg.Wait()
a := NewPackageArtifact(dst)
err = helpers.Tar(archive, dst) a.SetCompressionType(t)
err = a.Compress(archive, concurrency)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "Error met while creating package archive") return nil, errors.Wrap(err, "Error met while creating package archive")
} }
return NewPackageArtifact(dst), nil return a, nil
} }

View File

@@ -32,7 +32,7 @@ import (
var _ = Describe("Artifact", func() { var _ = Describe("Artifact", func() {
Context("Simple package build definition", func() { Context("Simple package build definition", func() {
It("Generates a delta", func() { It("Generates a verified delta", func() {
generalRecipe := tree.NewGeneralRecipe(pkg.NewInMemoryDatabase(false)) generalRecipe := tree.NewGeneralRecipe(pkg.NewInMemoryDatabase(false))
@@ -41,7 +41,7 @@ var _ = Describe("Artifact", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase()) compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -125,7 +125,7 @@ RUN echo bar > /test2`))
err = b.ExtractRootfs(CompilerBackendOptions{SourcePath: filepath.Join(tmpdir, "output2.tar"), Destination: rootfs}, false) err = b.ExtractRootfs(CompilerBackendOptions{SourcePath: filepath.Join(tmpdir, "output2.tar"), Destination: rootfs}, false)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
artifact, err := ExtractArtifactFromDelta(rootfs, filepath.Join(tmpdir, "package.tar"), diffs, 2, false, []string{}) artifact, err := ExtractArtifactFromDelta(rootfs, filepath.Join(tmpdir, "package.tar"), diffs, 2, false, []string{}, None)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(filepath.Join(tmpdir, "package.tar"))).To(BeTrue()) Expect(helpers.Exists(filepath.Join(tmpdir, "package.tar"))).To(BeTrue())
err = helpers.Untar(artifact.GetPath(), unpacked, false) err = helpers.Untar(artifact.GetPath(), unpacked, false)
@@ -138,6 +138,15 @@ RUN echo bar > /test2`))
content2, err := helpers.Read(filepath.Join(unpacked, "test2")) content2, err := helpers.Read(filepath.Join(unpacked, "test2"))
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(content2).To(Equal("bar\n")) Expect(content2).To(Equal("bar\n"))
err = artifact.Hash()
Expect(err).ToNot(HaveOccurred())
err = artifact.Verify()
Expect(err).ToNot(HaveOccurred())
Expect(helpers.CopyFile(filepath.Join(tmpdir, "output2.tar"), filepath.Join(tmpdir, "package.tar"))).ToNot(HaveOccurred())
err = artifact.Verify()
Expect(err).To(HaveOccurred())
}) })
}) })

View File

@@ -40,7 +40,7 @@ var _ = Describe("Docker backend", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase()) compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())

78
pkg/compiler/checksum.go Normal file
View File

@@ -0,0 +1,78 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package compiler
import (
//"strconv"
"crypto/sha256"
"fmt"
"hash"
"io"
"os"
// . "github.com/mudler/luet/pkg/logger"
"github.com/pkg/errors"
)
type HashImplementation string
const (
SHA256 HashImplementation = "sha256"
)
type Checksums map[string]string
type HashOptions struct {
Hasher hash.Hash
Type HashImplementation
}
// Generate generates all Checksums supported for the artifact
func (c *Checksums) Generate(a Artifact) error {
return c.generateSHA256(a)
}
func (c Checksums) Compare(d Checksums) error {
for t, sum := range d {
if v, ok := c[t]; ok && v != sum {
return errors.New("Checksum mismsatch")
}
}
return nil
}
func (c *Checksums) generateSHA256(a Artifact) error {
return c.generateSum(a, HashOptions{Hasher: sha256.New(), Type: SHA256})
}
func (c *Checksums) generateSum(a Artifact, opts HashOptions) error {
f, err := os.Open(a.GetPath())
if err != nil {
return err
}
defer f.Close()
if _, err := io.Copy(opts.Hasher, f); err != nil {
return err
}
sum := fmt.Sprintf("%x", opts.Hasher.Sum(nil))
(*c)[string(opts.Type)] = sum
return nil
}

View File

@@ -0,0 +1,61 @@
// Copyright © 2019 Ettore Di Giacinto <mudler@gentoo.org>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see <http://www.gnu.org/licenses/>.
package compiler_test
import (
"io/ioutil"
"os"
. "github.com/mudler/luet/pkg/compiler"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Checksum", func() {
Context("Generation", func() {
It("Compares successfully", func() {
tmpdir, err := ioutil.TempDir("", "tree")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
buildsum := Checksums{}
definitionsum := Checksums{}
definitionsum2 := Checksums{}
Expect(len(buildsum)).To(Equal(0))
Expect(len(definitionsum)).To(Equal(0))
Expect(len(definitionsum2)).To(Equal(0))
err = buildsum.Generate(NewPackageArtifact("../../tests/fixtures/layers/alpine/build.yaml"))
Expect(err).ToNot(HaveOccurred())
err = definitionsum.Generate(NewPackageArtifact("../../tests/fixtures/layers/alpine/definition.yaml"))
Expect(err).ToNot(HaveOccurred())
err = definitionsum2.Generate(NewPackageArtifact("../../tests/fixtures/layers/alpine/definition.yaml"))
Expect(err).ToNot(HaveOccurred())
Expect(len(buildsum)).To(Equal(1))
Expect(len(definitionsum)).To(Equal(1))
Expect(len(definitionsum2)).To(Equal(1))
Expect(definitionsum.Compare(buildsum)).To(HaveOccurred())
Expect(definitionsum.Compare(definitionsum2)).ToNot(HaveOccurred())
})
})
})

View File

@@ -36,13 +36,15 @@ const BuildFile = "build.yaml"
type LuetCompiler struct { type LuetCompiler struct {
*tree.CompilerRecipe *tree.CompilerRecipe
Backend CompilerBackend Backend CompilerBackend
Database pkg.PackageDatabase Database pkg.PackageDatabase
ImageRepository string ImageRepository string
PullFirst, KeepImg bool PullFirst, KeepImg, Clean bool
Concurrency int
CompressionType CompressionImplementation
} }
func NewLuetCompiler(backend CompilerBackend, db pkg.PackageDatabase) Compiler { func NewLuetCompiler(backend CompilerBackend, db pkg.PackageDatabase, opt *CompilerOptions) Compiler {
// The CompilerRecipe will gives us a tree with only build deps listed. // The CompilerRecipe will gives us a tree with only build deps listed.
return &LuetCompiler{ return &LuetCompiler{
Backend: backend, Backend: backend,
@@ -50,12 +52,23 @@ func NewLuetCompiler(backend CompilerBackend, db pkg.PackageDatabase) Compiler {
tree.Recipe{Database: db}, tree.Recipe{Database: db},
}, },
Database: db, Database: db,
ImageRepository: "luet/cache", ImageRepository: opt.ImageRepository,
PullFirst: true, PullFirst: opt.PullFirst,
KeepImg: true, CompressionType: opt.CompressionType,
KeepImg: opt.KeepImg,
Concurrency: opt.Concurrency,
Clean: opt.Clean,
} }
} }
func (cs *LuetCompiler) SetConcurrency(i int) {
cs.Concurrency = i
}
func (cs *LuetCompiler) SetCompressionType(t CompressionImplementation) {
cs.CompressionType = t
}
func (cs *LuetCompiler) compilerWorker(i int, wg *sync.WaitGroup, cspecs chan CompilationSpec, a *[]Artifact, m *sync.Mutex, concurrency int, keepPermissions bool, errors chan error) { func (cs *LuetCompiler) compilerWorker(i int, wg *sync.WaitGroup, cspecs chan CompilationSpec, a *[]Artifact, m *sync.Mutex, concurrency int, keepPermissions bool, errors chan error) {
defer wg.Done() defer wg.Done()
@@ -70,8 +83,9 @@ func (cs *LuetCompiler) compilerWorker(i int, wg *sync.WaitGroup, cspecs chan Co
m.Unlock() m.Unlock()
} }
} }
func (cs *LuetCompiler) CompileWithReverseDeps(concurrency int, keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error) {
artifacts, err := cs.CompileParallel(concurrency, keepPermissions, ps) func (cs *LuetCompiler) CompileWithReverseDeps(keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error) {
artifacts, err := cs.CompileParallel(keepPermissions, ps)
if len(err) != 0 { if len(err) != 0 {
return artifacts, err return artifacts, err
} }
@@ -119,11 +133,11 @@ func (cs *LuetCompiler) CompileWithReverseDeps(concurrency int, keepPermissions
Info(" :arrow_right_hook:", u.GetPackage().GetName(), ":leaves:", u.GetPackage().GetVersion(), "(", u.GetPackage().GetCategory(), ")") Info(" :arrow_right_hook:", u.GetPackage().GetName(), ":leaves:", u.GetPackage().GetVersion(), "(", u.GetPackage().GetCategory(), ")")
} }
artifacts2, err := cs.CompileParallel(concurrency, keepPermissions, uniques) artifacts2, err := cs.CompileParallel(keepPermissions, uniques)
return append(artifacts, artifacts2...), err return append(artifacts, artifacts2...), err
} }
func (cs *LuetCompiler) CompileParallel(concurrency int, keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error) { func (cs *LuetCompiler) CompileParallel(keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error) {
Spinner(22) Spinner(22)
defer SpinnerStop() defer SpinnerStop()
all := make(chan CompilationSpec) all := make(chan CompilationSpec)
@@ -131,9 +145,9 @@ func (cs *LuetCompiler) CompileParallel(concurrency int, keepPermissions bool, p
mutex := &sync.Mutex{} mutex := &sync.Mutex{}
errors := make(chan error, ps.Len()) errors := make(chan error, ps.Len())
var wg = new(sync.WaitGroup) var wg = new(sync.WaitGroup)
for i := 0; i < concurrency; i++ { for i := 0; i < cs.Concurrency; i++ {
wg.Add(1) wg.Add(1)
go cs.compilerWorker(i, wg, all, &artifacts, mutex, concurrency, keepPermissions, errors) go cs.compilerWorker(i, wg, all, &artifacts, mutex, cs.Concurrency, keepPermissions, errors)
} }
for _, p := range ps.All() { for _, p := range ps.All() {
@@ -213,6 +227,12 @@ func (cs *LuetCompiler) stripIncludesFromRootfs(includes []string, rootfs string
} }
func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage string, concurrency int, keepPermissions, keepImg bool, p CompilationSpec) (Artifact, error) { func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage string, concurrency int, keepPermissions, keepImg bool, p CompilationSpec) (Artifact, error) {
if !cs.Clean {
if art, err := LoadArtifactFromYaml(p); err == nil {
Debug("Artifact reloaded. Skipping build")
return art, err
}
}
pkgTag := ":package: " + p.GetPackage().GetName() pkgTag := ":package: " + p.GetPackage().GetName()
p.SetSeedImage(image) // In this case, we ignore the build deps as we suppose that the image has them - otherwise we recompose the tree with a solver, p.SetSeedImage(image) // In this case, we ignore the build deps as we suppose that the image has them - otherwise we recompose the tree with a solver,
@@ -339,21 +359,22 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
// strip from includes // strip from includes
cs.stripIncludesFromRootfs(p.GetIncludes(), rootfs) cs.stripIncludesFromRootfs(p.GetIncludes(), rootfs)
} }
artifact = NewPackageArtifact(p.Rel(p.GetPackage().GetFingerPrint() + ".package.tar"))
err = helpers.Tar(rootfs, p.Rel(p.GetPackage().GetFingerPrint()+".package.tar")) artifact.SetCompressionType(cs.CompressionType)
err = artifact.Compress(rootfs, concurrency)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "Error met while creating package archive") return nil, errors.Wrap(err, "Error met while creating package archive")
} }
artifact = NewPackageArtifact(p.Rel(p.GetPackage().GetFingerPrint() + ".package.tar"))
artifact.SetCompileSpec(p) artifact.SetCompileSpec(p)
} else { } else {
Info(pkgTag, "Generating delta") Info(pkgTag, "Generating delta")
artifact, err = ExtractArtifactFromDelta(rootfs, p.Rel(p.GetPackage().GetFingerPrint()+".package.tar"), diffs, concurrency, keepPermissions, p.GetIncludes()) artifact, err = ExtractArtifactFromDelta(rootfs, p.Rel(p.GetPackage().GetFingerPrint()+".package.tar"), diffs, concurrency, keepPermissions, p.GetIncludes(), cs.CompressionType)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "Could not generate deltas") return nil, errors.Wrap(err, "Could not generate deltas")
} }
artifact.SetCompileSpec(p) artifact.SetCompileSpec(p)
} }
@@ -366,7 +387,13 @@ func (cs *LuetCompiler) compileWithImage(image, buildertaggedImage, packageImage
return artifact, nil return artifact, nil
} }
func (cs *LuetCompiler) packageFromImage(p CompilationSpec, tag string, keepPermissions, keepImg bool) (Artifact, error) { func (cs *LuetCompiler) packageFromImage(p CompilationSpec, tag string, keepPermissions, keepImg bool, concurrency int) (Artifact, error) {
if !cs.Clean {
if art, err := LoadArtifactFromYaml(p); err == nil {
Debug("Artifact reloaded. Skipping build")
return art, err
}
}
pkgTag := ":package: " + p.GetPackage().GetName() pkgTag := ":package: " + p.GetPackage().GetName()
Info(pkgTag, " 🍩 Build starts 🔨 🔨 🔨 ") Info(pkgTag, " 🍩 Build starts 🔨 🔨 🔨 ")
@@ -404,8 +431,11 @@ func (cs *LuetCompiler) packageFromImage(p CompilationSpec, tag string, keepPerm
if err != nil { if err != nil {
return nil, errors.Wrap(err, "Could not extract rootfs") return nil, errors.Wrap(err, "Could not extract rootfs")
} }
artifact := NewPackageArtifact(p.Rel(p.GetPackage().GetFingerPrint() + ".package.tar"))
artifact.SetCompileSpec(p)
artifact.SetCompressionType(cs.CompressionType)
err = helpers.Tar(rootfs, p.Rel(p.GetPackage().GetFingerPrint()+".package.tar")) err = artifact.Compress(rootfs, concurrency)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "Error met while creating package archive") return nil, errors.Wrap(err, "Error met while creating package archive")
} }
@@ -422,8 +452,7 @@ func (cs *LuetCompiler) packageFromImage(p CompilationSpec, tag string, keepPerm
} }
Info(pkgTag, " :white_check_mark: Done") Info(pkgTag, " :white_check_mark: Done")
artifact := NewPackageArtifact(p.Rel(p.GetPackage().GetFingerPrint() + ".package.tar"))
artifact.SetCompileSpec(p)
err = artifact.WriteYaml(p.GetOutputPath()) err = artifact.WriteYaml(p.GetOutputPath())
if err != nil { if err != nil {
return artifact, err return artifact, err
@@ -459,13 +488,13 @@ func (cs *LuetCompiler) ComputeDepTree(p CompilationSpec) (solver.PackagesAssert
} }
// Compile is non-parallel // Compile is non-parallel
func (cs *LuetCompiler) Compile(concurrency int, keepPermissions bool, p CompilationSpec) (Artifact, error) { func (cs *LuetCompiler) Compile(keepPermissions bool, p CompilationSpec) (Artifact, error) {
asserts, err := cs.ComputeDepTree(p) asserts, err := cs.ComputeDepTree(p)
if err != nil { if err != nil {
panic(err) panic(err)
} }
p.SetSourceAssertion(asserts) p.SetSourceAssertion(asserts)
return cs.compile(concurrency, keepPermissions, p) return cs.compile(cs.Concurrency, keepPermissions, p)
} }
func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p CompilationSpec) (Artifact, error) { func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p CompilationSpec) (Artifact, error) {
@@ -480,7 +509,7 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
// Treat last case (easier) first. The image is provided and we just compute a plain dockerfile with the images listed as above // Treat last case (easier) first. The image is provided and we just compute a plain dockerfile with the images listed as above
if p.GetImage() != "" { if p.GetImage() != "" {
if p.ImageUnpack() { // If it is just an entire image, create a package from it if p.ImageUnpack() { // If it is just an entire image, create a package from it
return cs.packageFromImage(p, "", keepPermissions, cs.KeepImg) return cs.packageFromImage(p, "", keepPermissions, cs.KeepImg, concurrency)
} }
return cs.compileWithImage(p.GetImage(), "", "", concurrency, keepPermissions, cs.KeepImg, p) return cs.compileWithImage(p.GetImage(), "", "", concurrency, keepPermissions, cs.KeepImg, p)
@@ -527,7 +556,7 @@ func (cs *LuetCompiler) compile(concurrency int, keepPermissions bool, p Compila
return nil, errors.New("No image defined for package: " + assertion.Package.GetName()) return nil, errors.New("No image defined for package: " + assertion.Package.GetName())
} }
Info(pkgTag, ":whale: Sourcing package from image", compileSpec.GetImage()) Info(pkgTag, ":whale: Sourcing package from image", compileSpec.GetImage())
artifact, err := cs.packageFromImage(compileSpec, currentPackageImageHash, keepPermissions, cs.KeepImg) artifact, err := cs.packageFromImage(compileSpec, currentPackageImageHash, keepPermissions, cs.KeepImg, concurrency)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "Failed compiling "+compileSpec.GetPackage().GetName()) return nil, errors.Wrap(err, "Failed compiling "+compileSpec.GetPackage().GetName())
} }

View File

@@ -38,7 +38,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -53,7 +53,9 @@ var _ = Describe("Compiler", func() {
Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"})) Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"}))
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifact, err := compiler.Compile(2, false, spec) compiler.SetConcurrency(2)
artifact, err := compiler.Compile(false, spec)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue()) Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred()) Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())
@@ -80,7 +82,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -95,7 +97,8 @@ var _ = Describe("Compiler", func() {
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
spec2.SetOutputPath(tmpdir) spec2.SetOutputPath(tmpdir)
artifacts, errs := compiler.CompileParallel(2, false, NewLuetCompilationspecs(spec, spec2)) compiler.SetConcurrency(2)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec, spec2))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
for _, artifact := range artifacts { for _, artifact := range artifacts {
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue()) Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
@@ -117,7 +120,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -134,8 +137,9 @@ var _ = Describe("Compiler", func() {
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
spec2.SetOutputPath(tmpdir) spec2.SetOutputPath(tmpdir)
spec3.SetOutputPath(tmpdir) spec3.SetOutputPath(tmpdir)
compiler.SetConcurrency(2)
artifacts, errs := compiler.CompileParallel(2, false, NewLuetCompilationspecs(spec, spec2, spec3)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec, spec2, spec3))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(3)) Expect(len(artifacts)).To(Equal(3))
@@ -173,7 +177,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "extra", Category: "layer", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "extra", Category: "layer", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -181,12 +185,12 @@ var _ = Describe("Compiler", func() {
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
spec2.SetOutputPath(tmpdir) spec2.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
artifacts2, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec2)) artifacts2, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec2))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts2)).To(Equal(1)) Expect(len(artifacts2)).To(Equal(1))
@@ -215,7 +219,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -224,8 +228,9 @@ var _ = Describe("Compiler", func() {
// Expect(err).ToNot(HaveOccurred()) // Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
@@ -249,7 +254,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -258,8 +263,8 @@ var _ = Describe("Compiler", func() {
// Expect(err).ToNot(HaveOccurred()) // Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
@@ -287,7 +292,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "pkgs-checker", Category: "package", Version: "9999"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "pkgs-checker", Category: "package", Version: "9999"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -296,8 +301,9 @@ var _ = Describe("Compiler", func() {
// Expect(err).ToNot(HaveOccurred()) // Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
@@ -327,7 +333,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -336,8 +342,9 @@ var _ = Describe("Compiler", func() {
// Expect(err).ToNot(HaveOccurred()) // Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
Expect(len(artifacts[0].GetDependencies())).To(Equal(1)) Expect(len(artifacts[0].GetDependencies())).To(Equal(1))
@@ -370,7 +377,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "d", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -379,8 +386,9 @@ var _ = Describe("Compiler", func() {
// Expect(err).ToNot(HaveOccurred()) // Expect(err).ToNot(HaveOccurred())
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
Expect(len(artifacts[0].GetDependencies())).To(Equal(1)) Expect(len(artifacts[0].GetDependencies())).To(Equal(1))
@@ -411,7 +419,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "extra", Category: "layer", Version: "0.1"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "extra", Category: "layer", Version: "0.1"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -421,7 +429,7 @@ var _ = Describe("Compiler", func() {
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifacts, errs := compiler.CompileWithReverseDeps(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileWithReverseDeps(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(2)) Expect(len(artifacts)).To(Equal(2))
@@ -449,7 +457,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(10)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(10))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "vhba", Category: "sys-fs-5.4.2", Version: "20190410"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "vhba", Category: "sys-fs-5.4.2", Version: "20190410"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -459,7 +467,7 @@ var _ = Describe("Compiler", func() {
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifacts, errs := compiler.CompileParallel(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
Expect(len(artifacts[0].GetDependencies())).To(Equal(6)) Expect(len(artifacts[0].GetDependencies())).To(Equal(6))
@@ -488,13 +496,13 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifacts, errs := compiler.CompileWithReverseDeps(1, false, NewLuetCompilationspecs(spec)) artifacts, errs := compiler.CompileWithReverseDeps(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(4)) Expect(len(artifacts)).To(Equal(4))
@@ -540,7 +548,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -552,7 +560,9 @@ var _ = Describe("Compiler", func() {
defer os.RemoveAll(tmpdir) // clean up defer os.RemoveAll(tmpdir) // clean up
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifacts, errs := compiler.CompileParallel(2, false, NewLuetCompilationspecs(spec)) compiler.SetConcurrency(2)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
for _, artifact := range artifacts { for _, artifact := range artifacts {
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue()) Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
@@ -581,7 +591,7 @@ var _ = Describe("Compiler", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "runtime", Category: "layer", Version: "0.1"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "runtime", Category: "layer", Version: "0.1"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -593,7 +603,9 @@ var _ = Describe("Compiler", func() {
defer os.RemoveAll(tmpdir) // clean up defer os.RemoveAll(tmpdir) // clean up
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifacts, errs := compiler.CompileParallel(2, false, NewLuetCompilationspecs(spec)) compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil()) Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1)) Expect(len(artifacts)).To(Equal(1))
Expect(len(artifacts[0].GetDependencies())).To(Equal(1)) Expect(len(artifacts[0].GetDependencies())).To(Equal(1))
@@ -602,4 +614,40 @@ var _ = Describe("Compiler", func() {
Expect(helpers.Exists(spec.Rel("var"))).ToNot(BeTrue()) Expect(helpers.Exists(spec.Rel("var"))).ToNot(BeTrue())
}) })
}) })
Context("Compression", func() {
It("Builds packages in gzip", func() {
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
err := generalRecipe.Load("../../tests/fixtures/packagelayers")
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(2))
compiler := NewLuetCompiler(sd.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "runtime", Category: "layer", Version: "0.1"})
Expect(err).ToNot(HaveOccurred())
compiler.SetCompressionType(GZip)
Expect(spec.GetPackage().GetPath()).ToNot(Equal(""))
tmpdir, err := ioutil.TempDir("", "tree")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
spec.SetOutputPath(tmpdir)
compiler.SetConcurrency(1)
artifacts, errs := compiler.CompileParallel(false, NewLuetCompilationspecs(spec))
Expect(errs).To(BeNil())
Expect(len(artifacts)).To(Equal(1))
Expect(len(artifacts[0].GetDependencies())).To(Equal(1))
Expect(helpers.Exists(spec.Rel("runtime-layer-0.1.package.tar.gz"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("runtime-layer-0.1.package.tar"))).To(BeFalse())
Expect(artifacts[0].Unpack(tmpdir, false)).ToNot(HaveOccurred())
// Expect(helpers.Untar(spec.Rel("runtime-layer-0.1.package.tar"), tmpdir, false)).ToNot(HaveOccurred())
Expect(helpers.Exists(spec.Rel("bin/busybox"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("var"))).ToNot(BeTrue())
})
})
}) })

View File

@@ -16,20 +16,23 @@
package compiler package compiler
import ( import (
"runtime"
pkg "github.com/mudler/luet/pkg/package" pkg "github.com/mudler/luet/pkg/package"
"github.com/mudler/luet/pkg/solver" "github.com/mudler/luet/pkg/solver"
) )
type Compiler interface { type Compiler interface {
Compile(int, bool, CompilationSpec) (Artifact, error) Compile(bool, CompilationSpec) (Artifact, error)
CompileParallel(concurrency int, keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error) CompileParallel(keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error)
CompileWithReverseDeps(concurrency int, keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error) CompileWithReverseDeps(keepPermissions bool, ps CompilationSpecs) ([]Artifact, []error)
ComputeDepTree(p CompilationSpec) (solver.PackagesAssertions, error) ComputeDepTree(p CompilationSpec) (solver.PackagesAssertions, error)
SetConcurrency(i int)
FromPackage(pkg.Package) (CompilationSpec, error) FromPackage(pkg.Package) (CompilationSpec, error)
SetBackend(CompilerBackend) SetBackend(CompilerBackend)
GetBackend() CompilerBackend GetBackend() CompilerBackend
SetCompressionType(t CompressionImplementation)
} }
type CompilerBackendOptions struct { type CompilerBackendOptions struct {
@@ -39,6 +42,25 @@ type CompilerBackendOptions struct {
Destination string Destination string
} }
type CompilerOptions struct {
ImageRepository string
PullFirst, KeepImg bool
Concurrency int
CompressionType CompressionImplementation
Clean bool
}
func NewDefaultCompilerOptions() *CompilerOptions {
return &CompilerOptions{
ImageRepository: "luet/cache",
PullFirst: true,
CompressionType: None,
KeepImg: true,
Concurrency: runtime.NumCPU(),
Clean: true,
}
}
type CompilerBackend interface { type CompilerBackend interface {
BuildImage(CompilerBackendOptions) error BuildImage(CompilerBackendOptions) error
ExportImage(CompilerBackendOptions) error ExportImage(CompilerBackendOptions) error
@@ -62,6 +84,12 @@ type Artifact interface {
SetCompileSpec(as CompilationSpec) SetCompileSpec(as CompilationSpec)
GetCompileSpec() CompilationSpec GetCompileSpec() CompilationSpec
WriteYaml(dst string) error WriteYaml(dst string) error
Unpack(dst string, keepPerms bool) error
Compress(src string, concurrency int) error
SetCompressionType(t CompressionImplementation)
FileList() ([]string, error)
Hash() error
Verify() error
} }
type ArtifactNode struct { type ArtifactNode struct {

View File

@@ -61,7 +61,7 @@ var _ = Describe("Spec", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(1))
compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase()) compiler := NewLuetCompiler(nil, generalRecipe.GetDatabase(), NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "enman", Category: "app-admin", Version: "1.4.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())

View File

@@ -64,8 +64,9 @@ func (c *HttpClient) DownloadArtifact(artifact compiler.Artifact) (compiler.Arti
} }
err = helpers.CopyFile(filepath.Join(temp, artifactName), file.Name()) err = helpers.CopyFile(filepath.Join(temp, artifactName), file.Name())
newart := artifact
return compiler.NewPackageArtifact(file.Name()), nil newart.SetPath(file.Name())
return newart, nil
} }
func (c *HttpClient) DownloadFile(name string) (string, error) { func (c *HttpClient) DownloadFile(name string) (string, error) {

View File

@@ -45,8 +45,9 @@ func (c *LocalClient) DownloadArtifact(artifact compiler.Artifact) (compiler.Art
//defer os.Remove(file.Name()) //defer os.Remove(file.Name())
err = helpers.CopyFile(filepath.Join(c.RepoData.Uri, artifactName), file.Name()) err = helpers.CopyFile(filepath.Join(c.RepoData.Uri, artifactName), file.Name())
newart := artifact
return compiler.NewPackageArtifact(file.Name()), nil newart.SetPath(file.Name())
return newart, nil
} }
func (c *LocalClient) DownloadFile(name string) (string, error) { func (c *LocalClient) DownloadFile(name string) (string, error) {
Info("Downloading file", name, "from", c.RepoData.Uri) Info("Downloading file", name, "from", c.RepoData.Uri)

View File

@@ -16,8 +16,6 @@
package installer package installer
import ( import (
"archive/tar"
"io"
"io/ioutil" "io/ioutil"
"os" "os"
"os/exec" "os/exec"
@@ -136,7 +134,29 @@ func (l *LuetInstaller) Upgrade(s *System) error {
return l.Install(toInstall, s) return l.Install(toInstall, s)
} }
func (l *LuetInstaller) Install(p []pkg.Package, s *System) error { func (l *LuetInstaller) Install(cp []pkg.Package, s *System) error {
var p []pkg.Package
// Check if the package is installed first
for _, pi := range cp {
vers, _ := s.Database.FindPackageVersions(pi)
if len(vers) >= 1 {
Warning("Filtering out package " + pi.GetFingerPrint() + ", it has other versions already installed. Uninstall one of them first ")
continue
//return errors.New("Package " + pi.GetFingerPrint() + " has other versions already installed. Uninstall one of them first: " + strings.Join(vers, " "))
}
p = append(p, pi)
}
if len(p) == 0 {
Warning("No package to install, bailing out with no errors")
return nil
}
// First get metas from all repos (and decodes trees) // First get metas from all repos (and decodes trees)
Spinner(32) Spinner(32)
@@ -170,7 +190,7 @@ func (l *LuetInstaller) Install(p []pkg.Package, s *System) error {
for _, assertion := range solution { for _, assertion := range solution {
if assertion.Value { if assertion.Value {
matches := syncedRepos.PackageMatches([]pkg.Package{assertion.Package}) matches := syncedRepos.PackageMatches([]pkg.Package{assertion.Package})
if len(matches) != 1 { if len(matches) == 0 {
return errors.New("Failed matching solutions against repository - where are definitions coming from?!") return errors.New("Failed matching solutions against repository - where are definitions coming from?!")
} }
A: A:
@@ -264,39 +284,22 @@ func (l *LuetInstaller) Install(p []pkg.Package, s *System) error {
func (l *LuetInstaller) installPackage(a ArtifactMatch, s *System) error { func (l *LuetInstaller) installPackage(a ArtifactMatch, s *System) error {
// FIXME: Implement Info("Installing", a.Package.GetName())
artifact, err := a.Repository.Client().DownloadArtifact(a.Artifact) artifact, err := a.Repository.Client().DownloadArtifact(a.Artifact)
defer os.Remove(artifact.GetPath()) defer os.Remove(artifact.GetPath())
tarFile, err := os.Open(artifact.GetPath()) err = artifact.Verify()
if err != nil { if err != nil {
return errors.Wrap(err, "Could not open package archive") return errors.Wrap(err, "Artifact integrity check failure")
}
defer tarFile.Close()
tr := tar.NewReader(tarFile)
var files []string
// untar each segment
for {
hdr, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
return err
}
// determine proper file path info
finfo := hdr.FileInfo()
fileName := hdr.Name
if finfo.Mode().IsDir() {
continue
}
files = append(files, fileName)
// if a dir, create it, then go to next segment
} }
err = helpers.Untar(artifact.GetPath(), s.Target, true) files, err := artifact.FileList()
if err != nil {
return errors.Wrap(err, "Could not get file list")
}
err = artifact.Unpack(s.Target, true)
if err != nil { if err != nil {
return errors.Wrap(err, "Error met while unpacking rootfs") return errors.Wrap(err, "Error met while unpacking rootfs")
} }

View File

@@ -47,7 +47,7 @@ var _ = Describe("Installer", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -62,7 +62,9 @@ var _ = Describe("Installer", func() {
Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"})) Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"}))
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifact, err := compiler.Compile(2, false, spec) compiler.SetConcurrency(2)
artifact, err := compiler.Compile(false, spec)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue()) Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred()) Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())
@@ -157,7 +159,7 @@ uri: "`+tmpdir+`"
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -172,7 +174,9 @@ uri: "`+tmpdir+`"
Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"})) Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"}))
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifact, err := compiler.Compile(2, false, spec) compiler.SetConcurrency(2)
artifact, err := compiler.Compile(false, spec)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue()) Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred()) Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())
@@ -273,7 +277,7 @@ uri: "`+tmpdir+`"
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -291,7 +295,9 @@ uri: "`+tmpdir+`"
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
spec2.SetOutputPath(tmpdir) spec2.SetOutputPath(tmpdir)
spec3.SetOutputPath(tmpdir) spec3.SetOutputPath(tmpdir)
_, errs := c.CompileParallel(2, false, compiler.NewLuetCompilationspecs(spec, spec2, spec3)) c.SetConcurrency(2)
_, errs := c.CompileParallel(false, compiler.NewLuetCompilationspecs(spec, spec2, spec3))
Expect(errs).To(BeEmpty()) Expect(errs).To(BeEmpty())
@@ -369,4 +375,118 @@ uri: "`+tmpdir+`"
}) })
Context("Compressed packages", func() {
It("Installs", func() {
//repo:=NewLuetRepository()
tmpdir, err := ioutil.TempDir("", "tree")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
generalRecipe := tree.NewCompilerRecipe(pkg.NewInMemoryDatabase(false))
err = generalRecipe.Load("../../tests/fixtures/upgrade")
Expect(err).ToNot(HaveOccurred())
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(4))
c := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
spec, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
spec2, err := c.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.1"})
Expect(err).ToNot(HaveOccurred())
spec3, err := c.FromPackage(&pkg.DefaultPackage{Name: "c", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
Expect(spec.GetPackage().GetPath()).ToNot(Equal(""))
tmpdir, err = ioutil.TempDir("", "tree")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir) // clean up
spec.SetOutputPath(tmpdir)
spec2.SetOutputPath(tmpdir)
spec3.SetOutputPath(tmpdir)
c.SetConcurrency(2)
c.SetCompressionType(compiler.GZip)
_, errs := c.CompileParallel(false, compiler.NewLuetCompilationspecs(spec, spec2, spec3))
Expect(errs).To(BeEmpty())
repo, err := GenerateRepository("test", tmpdir, "local", 1, tmpdir, "../../tests/fixtures/upgrade", pkg.NewInMemoryDatabase(false))
Expect(err).ToNot(HaveOccurred())
Expect(repo.GetName()).To(Equal("test"))
Expect(helpers.Exists(spec.Rel("repository.yaml"))).ToNot(BeTrue())
Expect(helpers.Exists(spec.Rel("tree.tar"))).ToNot(BeTrue())
err = repo.Write(tmpdir)
Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(spec.Rel("b-test-1.1.package.tar.gz"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("b-test-1.1.package.tar"))).ToNot(BeTrue())
Expect(helpers.Exists(spec.Rel("repository.yaml"))).To(BeTrue())
Expect(helpers.Exists(spec.Rel("tree.tar"))).To(BeTrue())
Expect(repo.GetUri()).To(Equal(tmpdir))
Expect(repo.GetType()).To(Equal("local"))
fakeroot, err := ioutil.TempDir("", "fakeroot")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(fakeroot) // clean up
inst := NewLuetInstaller(1)
repo2, err := NewLuetRepositoryFromYaml([]byte(`
name: "test"
type: "local"
uri: "`+tmpdir+`"
`), pkg.NewInMemoryDatabase(false))
Expect(err).ToNot(HaveOccurred())
inst.Repositories(Repositories{repo2})
Expect(repo.GetUri()).To(Equal(tmpdir))
Expect(repo.GetType()).To(Equal("local"))
bolt, err := ioutil.TempDir("", "db")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(bolt) // clean up
systemDB := pkg.NewBoltDatabase(filepath.Join(bolt, "db.db"))
system := &System{Database: systemDB, Target: fakeroot}
err = inst.Install([]pkg.Package{&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}}, system)
Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(filepath.Join(fakeroot, "test5"))).To(BeTrue())
Expect(helpers.Exists(filepath.Join(fakeroot, "test6"))).To(BeTrue())
_, err = systemDB.FindPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred())
Expect(len(system.Database.GetPackages())).To(Equal(1))
p, err := system.Database.GetPackage(system.Database.GetPackages()[0])
Expect(err).ToNot(HaveOccurred())
Expect(p.GetName()).To(Equal("b"))
files, err := systemDB.GetPackageFiles(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(files).To(Equal([]string{"artifact42", "test5", "test6"}))
Expect(err).ToNot(HaveOccurred())
err = inst.Upgrade(system)
Expect(err).ToNot(HaveOccurred())
// Nothing should be there anymore (files, packagedb entry)
Expect(helpers.Exists(filepath.Join(fakeroot, "test5"))).ToNot(BeTrue())
Expect(helpers.Exists(filepath.Join(fakeroot, "test6"))).ToNot(BeTrue())
// New version - new files
Expect(helpers.Exists(filepath.Join(fakeroot, "newc"))).To(BeTrue())
_, err = system.Database.GetPackageFiles(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).To(HaveOccurred())
_, err = system.Database.FindPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).To(HaveOccurred())
// New package should be there
_, err = system.Database.FindPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.1"})
Expect(err).ToNot(HaveOccurred())
})
})
}) })

View File

@@ -46,7 +46,7 @@ var _ = Describe("Repository", func() {
Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3)) Expect(len(generalRecipe.GetDatabase().GetPackages())).To(Equal(3))
compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase()) compiler := compiler.NewLuetCompiler(backend.NewSimpleDockerBackend(), generalRecipe.GetDatabase(), compiler.NewDefaultCompilerOptions())
spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"}) spec, err := compiler.FromPackage(&pkg.DefaultPackage{Name: "b", Category: "test", Version: "1.0"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@@ -61,7 +61,9 @@ var _ = Describe("Repository", func() {
Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"})) Expect(spec.GetPreBuildSteps()).To(Equal([]string{"echo foo > /test", "echo bar > /test2", "chmod +x generate.sh"}))
spec.SetOutputPath(tmpdir) spec.SetOutputPath(tmpdir)
artifact, err := compiler.Compile(2, false, spec) compiler.SetConcurrency(1)
artifact, err := compiler.Compile(false, spec)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(helpers.Exists(artifact.GetPath())).To(BeTrue()) Expect(helpers.Exists(artifact.GetPath())).To(BeTrue())
Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred()) Expect(helpers.Untar(artifact.GetPath(), tmpdir, false)).ToNot(HaveOccurred())

View File

@@ -196,7 +196,7 @@ func (assertions PackagesAssertions) Order(definitiondb pkg.PackageDatabase, fin
for _, res := range result { for _, res := range result {
a, ok := tmpMap[res] a, ok := tmpMap[res]
if !ok { if !ok {
panic("fail") panic("fail looking for " + res)
// continue // continue
} }
orderedAssertions = append(orderedAssertions, a) orderedAssertions = append(orderedAssertions, a)

75
tests/integration/01_simple.sh Executable file
View File

@@ -0,0 +1,75 @@
#!/bin/bash
export LUET_NOLOCK=true
oneTimeSetUp() {
export tmpdir="$(mktemp -d)"
}
oneTimeTearDown() {
rm -rf "$tmpdir"
}
testBuild() {
mkdir $tmpdir/testbuild
luet build --tree "$ROOT_DIR/tests/fixtures/buildableseed" --destination $tmpdir/testbuild --compression gzip test/c-1.0 > /dev/null
buildst=$?
assertEquals 'builds successfully' "$buildst" "0"
assertTrue 'create package dep B' "[ -e '$tmpdir/testbuild/b-test-1.0.package.tar.gz' ]"
assertTrue 'create package' "[ -e '$tmpdir/testbuild/c-test-1.0.package.tar.gz' ]"
}
testRepo() {
assertTrue 'no repository' "[ ! -e '$tmpdir/testbuild/repository.yaml' ]"
luet create-repo --tree "$ROOT_DIR/tests/fixtures/buildableseed" \
--output $tmpdir/testbuild \
--packages $tmpdir/testbuild \
--name "test" \
--uri $tmpdir/testrootfs \
--type local > /dev/null
createst=$?
assertEquals 'create repo successfully' "$createst" "0"
assertTrue 'create repository' "[ -e '$tmpdir/testbuild/repository.yaml' ]"
}
testInstall() {
mkdir $tmpdir/testrootfs
cat <<EOF > $tmpdir/luet.yaml
system-repositories:
- name: "main"
type: "local"
uri: "$tmpdir/testbuild"
EOF
luet install --config $tmpdir/luet.yaml --system-dbpath $tmpdir/testrootfs --system-target $tmpdir/testrootfs test/c-1.0 > /dev/null
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
}
testReInstall() {
output=$(luet install --config $tmpdir/luet.yaml --system-dbpath $tmpdir/testrootfs --system-target $tmpdir/testrootfs test/c-1.0)
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertContains 'contains warning' "$output" 'Filtering out'
}
testUnInstall() {
luet uninstall --config $tmpdir/luet.yaml --system-dbpath $tmpdir/testrootfs --system-target $tmpdir/testrootfs test/c-1.0 > /dev/null
installst=$?
assertEquals 'uninstall test successfully' "$installst" "0"
assertTrue 'package uninstalled' "[ ! -e '$tmpdir/testrootfs/c' ]"
}
testInstallAgain() {
assertTrue 'package uninstalled' "[ ! -e '$tmpdir/testrootfs/c' ]"
output=$(luet install --config $tmpdir/luet.yaml --system-dbpath $tmpdir/testrootfs --system-target $tmpdir/testrootfs test/c-1.0)
installst=$?
assertEquals 'install test successfully' "$installst" "0"
assertNotContains 'contains warning' "$output" 'Filtering out'
assertTrue 'package installed' "[ -e '$tmpdir/testrootfs/c' ]"
}
# Load shUnit2.
. "$ROOT_DIR/tests/integration/shunit2"/shunit2

15
tests/integration/run.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
set -e
export ROOT_DIR="$(git rev-parse --show-toplevel)"
pushd $ROOT_DIR
go build -o "$ROOT_DIR/tests/integration/bin/luet"
popd
[ ! -d "$ROOT_DIR/tests/integration/shunit2" ] && git clone https://github.com/kward/shunit2.git "$ROOT_DIR/tests/integration/shunit2"
export PATH=$ROOT_DIR/tests/integration/bin/:$PATH
"$ROOT_DIR/tests/integration/01_simple.sh"