Bump github.com/containers/storage from 1.16.0 to 1.16.1

Bumps [github.com/containers/storage](https://github.com/containers/storage) from 1.16.0 to 1.16.1.
- [Release notes](https://github.com/containers/storage/releases)
- [Changelog](https://github.com/containers/storage/blob/master/docs/containers-storage-changes.md)
- [Commits](https://github.com/containers/storage/compare/v1.16.0...v1.16.1)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
Signed-off-by: Daniel J Walsh <dwalsh@redhat.com>
This commit is contained in:
dependabot-preview[bot]
2020-03-04 09:16:57 +00:00
committed by Daniel J Walsh
parent 9b984e8eba
commit 7dcfc18309
37 changed files with 1642 additions and 333 deletions

View File

@@ -806,7 +806,7 @@ func (b *blockEnc) genCodes() {
mlH[v]++
if v > mlMax {
mlMax = v
if debug && mlMax > maxMatchLengthSymbol {
if debugAsserts && mlMax > maxMatchLengthSymbol {
panic(fmt.Errorf("mlMax > maxMatchLengthSymbol (%d), matchlen: %d", mlMax, seq.matchLen))
}
}
@@ -821,13 +821,13 @@ func (b *blockEnc) genCodes() {
}
return int(max)
}
if mlMax > maxMatchLengthSymbol {
if debugAsserts && mlMax > maxMatchLengthSymbol {
panic(fmt.Errorf("mlMax > maxMatchLengthSymbol (%d)", mlMax))
}
if ofMax > maxOffsetBits {
if debugAsserts && ofMax > maxOffsetBits {
panic(fmt.Errorf("ofMax > maxOffsetBits (%d)", ofMax))
}
if llMax > maxLiteralLengthSymbol {
if debugAsserts && llMax > maxLiteralLengthSymbol {
panic(fmt.Errorf("llMax > maxLiteralLengthSymbol (%d)", llMax))
}

View File

@@ -30,7 +30,7 @@ type byteBuffer interface {
type byteBuf []byte
func (b *byteBuf) readSmall(n int) []byte {
if debug && n > 8 {
if debugAsserts && n > 8 {
panic(fmt.Errorf("small read > 8 (%d). use readBig", n))
}
bb := *b
@@ -82,7 +82,7 @@ type readerWrapper struct {
}
func (r *readerWrapper) readSmall(n int) []byte {
if debug && n > 8 {
if debugAsserts && n > 8 {
panic(fmt.Errorf("small read > 8 (%d). use readBig", n))
}
n2, err := io.ReadFull(r.r, r.tmp[:n])

View File

@@ -315,7 +315,7 @@ func (d *Decoder) DecodeAll(input, dst []byte) ([]byte, error) {
if size > 1<<20 {
size = 1 << 20
}
dst = make([]byte, 0, frame.WindowSize)
dst = make([]byte, 0, size)
}
dst, err = frame.runDecoder(dst, block)

View File

@@ -4,6 +4,8 @@
package zstd
import "fmt"
const (
dFastLongTableBits = 17 // Bits used in the long match table
dFastLongTableSize = 1 << dFastLongTableBits // Size of the table
@@ -29,7 +31,7 @@ func (e *doubleFastEncoder) Encode(blk *blockEnc, src []byte) {
)
// Protect against e.cur wraparound.
for e.cur > (1<<30)+e.maxMatchOff {
for e.cur >= bufferReset {
if len(e.hist) == 0 {
for i := range e.table[:] {
e.table[i] = tableEntry{}
@@ -61,6 +63,7 @@ func (e *doubleFastEncoder) Encode(blk *blockEnc, src []byte) {
e.longTable[i].offset = v
}
e.cur = e.maxMatchOff
break
}
s := e.addBlock(src)
@@ -110,7 +113,7 @@ encodeLoop:
canRepeat := len(blk.sequences) > 2
for {
if debug && canRepeat && offset1 == 0 {
if debugAsserts && canRepeat && offset1 == 0 {
panic("offset0 was 0")
}
@@ -229,10 +232,10 @@ encodeLoop:
// Reference encoder checks all 8 bytes, we only check 4,
// but the likelihood of both the first 4 bytes and the hash matching should be enough.
t = candidateL.offset - e.cur
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debugMatches {
@@ -266,13 +269,13 @@ encodeLoop:
}
t = candidateS.offset - e.cur
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debug && t < 0 {
if debugAsserts && t < 0 {
panic("t<0")
}
if debugMatches {
@@ -294,11 +297,11 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && canRepeat && int(offset1) > len(src) {
if debugAsserts && canRepeat && int(offset1) > len(src) {
panic("invalid offset")
}
@@ -424,7 +427,7 @@ func (e *doubleFastEncoder) EncodeNoHist(blk *blockEnc, src []byte) {
)
// Protect against e.cur wraparound.
if e.cur > (1<<30)+e.maxMatchOff {
if e.cur >= bufferReset {
for i := range e.table[:] {
e.table[i] = tableEntry{}
}
@@ -545,10 +548,10 @@ encodeLoop:
// Reference encoder checks all 8 bytes, we only check 4,
// but the likelihood of both the first 4 bytes and the hash matching should be enough.
t = candidateL.offset - e.cur
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debugMatches {
@@ -582,13 +585,13 @@ encodeLoop:
}
t = candidateS.offset - e.cur
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debug && t < 0 {
if debugAsserts && t < 0 {
panic("t<0")
}
if debugMatches {
@@ -610,8 +613,8 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
// Extend the 4-byte match as long as possible.

View File

@@ -5,6 +5,7 @@
package zstd
import (
"fmt"
"math/bits"
"github.com/klauspost/compress/zstd/internal/xxhash"
@@ -74,7 +75,7 @@ func (e *fastEncoder) Encode(blk *blockEnc, src []byte) {
)
// Protect against e.cur wraparound.
for e.cur > (1<<30)+e.maxMatchOff {
for e.cur >= bufferReset {
if len(e.hist) == 0 {
for i := range e.table[:] {
e.table[i] = tableEntry{}
@@ -94,6 +95,7 @@ func (e *fastEncoder) Encode(blk *blockEnc, src []byte) {
e.table[i].offset = v
}
e.cur = e.maxMatchOff
break
}
s := e.addBlock(src)
@@ -151,7 +153,7 @@ encodeLoop:
canRepeat := len(blk.sequences) > 2
for {
if debug && canRepeat && offset1 == 0 {
if debugAsserts && canRepeat && offset1 == 0 {
panic("offset0 was 0")
}
@@ -212,10 +214,10 @@ encodeLoop:
if coffset0 < e.maxMatchOff && uint32(cv) == candidate.val {
// found a regular match
t = candidate.offset - e.cur
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
break
@@ -225,13 +227,13 @@ encodeLoop:
// found a regular match
t = candidate2.offset - e.cur
s++
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debug && t < 0 {
if debugAsserts && t < 0 {
panic("t<0")
}
break
@@ -246,11 +248,11 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && canRepeat && int(offset1) > len(src) {
if debugAsserts && canRepeat && int(offset1) > len(src) {
panic("invalid offset")
}
@@ -343,7 +345,7 @@ func (e *fastEncoder) EncodeNoHist(blk *blockEnc, src []byte) {
}
}
// Protect against e.cur wraparound.
if e.cur > (1<<30)+e.maxMatchOff {
if e.cur >= bufferReset {
for i := range e.table[:] {
e.table[i] = tableEntry{}
}
@@ -456,10 +458,10 @@ encodeLoop:
if coffset0 < e.maxMatchOff && uint32(cv) == candidate.val {
// found a regular match
t = candidate.offset - e.cur
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
break
@@ -469,13 +471,13 @@ encodeLoop:
// found a regular match
t = candidate2.offset - e.cur
s++
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
if debug && s-t > e.maxMatchOff {
if debugAsserts && s-t > e.maxMatchOff {
panic("s - t >e.maxMatchOff")
}
if debug && t < 0 {
if debugAsserts && t < 0 {
panic("t<0")
}
break
@@ -490,8 +492,8 @@ encodeLoop:
offset2 = offset1
offset1 = s - t
if debug && s <= t {
panic("s <= t")
if debugAsserts && s <= t {
panic(fmt.Sprintf("s (%d) <= t (%d)", s, t))
}
// Extend the 4-byte match as long as possible.
@@ -570,6 +572,9 @@ encodeLoop:
}
func (e *fastEncoder) addBlock(src []byte) int32 {
if debugAsserts && e.cur > bufferReset {
panic(fmt.Sprintf("ecur (%d) > buffer reset (%d)", e.cur, bufferReset))
}
// check if we have space already
if len(e.hist)+len(src) > cap(e.hist) {
if cap(e.hist) == 0 {
@@ -608,15 +613,18 @@ func (e *fastEncoder) matchlenNoHist(s, t int32, src []byte) int32 {
}
func (e *fastEncoder) matchlen(s, t int32, src []byte) int32 {
if debug {
if debugAsserts {
if s < 0 {
panic("s<0")
err := fmt.Sprintf("s (%d) < 0", s)
panic(err)
}
if t < 0 {
panic("t<0")
err := fmt.Sprintf("s (%d) < 0", s)
panic(err)
}
if s-t > e.maxMatchOff {
panic(s - t)
err := fmt.Sprintf("s (%d) - t (%d) > maxMatchOff (%d)", s, t, e.maxMatchOff)
panic(err)
}
}
s1 := int(s) + maxMatchLength - 4
@@ -650,7 +658,10 @@ func (e *fastEncoder) Reset() {
}
e.hist = make([]byte, 0, l)
}
// We offset current position so everything will be out of reach
e.cur += e.maxMatchOff + int32(len(e.hist))
// We offset current position so everything will be out of reach.
// If above reset line, history will be purged.
if e.cur < bufferReset {
e.cur += e.maxMatchOff + int32(len(e.hist))
}
e.hist = e.hist[:0]
}

View File

@@ -156,7 +156,7 @@ func (e *Encoder) Write(p []byte) (n int, err error) {
if err != nil {
return n, err
}
if debug && len(s.filling) > 0 {
if debugAsserts && len(s.filling) > 0 {
panic(len(s.filling))
}
}

View File

@@ -50,7 +50,7 @@ type frameDec struct {
const (
// The minimum Window_Size is 1 KB.
MinWindowSize = 1 << 10
MaxWindowSize = 1 << 30
MaxWindowSize = 1 << 29
)
var (

View File

@@ -118,7 +118,7 @@ func (s *fseDecoder) readNCount(b *byteReader, maxSymbol uint16) error {
if int32(bitStream)&(threshold-1) < max {
count = int32(bitStream) & (threshold - 1)
if debug && nbBits < 1 {
if debugAsserts && nbBits < 1 {
panic("nbBits underflow")
}
bitCount += nbBits - 1

View File

@@ -327,7 +327,7 @@ func (s *fseEncoder) normalizeCount(length int) error {
if err != nil {
return err
}
if debug {
if debugAsserts {
err = s.validateNorm()
if err != nil {
return err
@@ -336,7 +336,7 @@ func (s *fseEncoder) normalizeCount(length int) error {
return s.buildCTable()
}
s.norm[largest] += stillToDistribute
if debug {
if debugAsserts {
err := s.validateNorm()
if err != nil {
return err
@@ -619,7 +619,7 @@ func (s *fseEncoder) writeCount(out []byte) ([]byte, error) {
func (s *fseEncoder) bitCost(symbolValue uint8, accuracyLog uint32) uint32 {
minNbBits := s.ct.symbolTT[symbolValue].deltaNbBits >> 16
threshold := (minNbBits + 1) << 16
if debug {
if debugAsserts {
if !(s.actualTableLog < 16) {
panic("!s.actualTableLog < 16")
}
@@ -633,7 +633,7 @@ func (s *fseEncoder) bitCost(symbolValue uint8, accuracyLog uint32) uint32 {
// linear interpolation (very approximate)
normalizedDeltaFromThreshold := (deltaFromThreshold << accuracyLog) >> s.actualTableLog
bitMultiplier := uint32(1) << accuracyLog
if debug {
if debugAsserts {
if s.ct.symbolTT[symbolValue].deltaNbBits+tableSize > threshold {
panic("s.ct.symbolTT[symbolValue].deltaNbBits+tableSize > threshold")
}

View File

@@ -179,13 +179,13 @@ TEXT ·writeBlocks(SB), NOSPLIT, $0-40
MOVQ ·prime2v(SB), R14
// Load slice.
MOVQ b_base+8(FP), CX
MOVQ b_len+16(FP), DX
MOVQ arg1_base+8(FP), CX
MOVQ arg1_len+16(FP), DX
LEAQ (CX)(DX*1), BX
SUBQ $32, BX
// Load vN from d.
MOVQ d+0(FP), AX
MOVQ arg+0(FP), AX
MOVQ 0(AX), R8 // v1
MOVQ 8(AX), R9 // v2
MOVQ 16(AX), R10 // v3
@@ -209,7 +209,7 @@ blockLoop:
MOVQ R11, 24(AX)
// The number of bytes written is CX minus the old base pointer.
SUBQ b_base+8(FP), CX
SUBQ arg1_base+8(FP), CX
MOVQ CX, ret+32(FP)
RET

View File

@@ -6,11 +6,20 @@ package zstd
import (
"errors"
"log"
"math"
"math/bits"
)
// enable debug printing
const debug = false
// Enable extra assertions.
const debugAsserts = debug || false
// print sequence details
const debugSequences = false
// print detailed matching information
const debugMatches = false
// force encoder to use predefined tables.
@@ -19,6 +28,9 @@ const forcePreDef = false
// zstdMinMatch is the minimum zstd match length.
const zstdMinMatch = 3
// Reset the buffer offset when reaching this.
const bufferReset = math.MaxInt32 - MaxWindowSize
var (
// ErrReservedBlockType is returned when a reserved block type is found.
// Typically this indicates wrong or corrupted input.