Add glide.yaml and vendor deps

This commit is contained in:
Dalton Hubble 2016-12-03 22:43:32 -08:00
parent db918f12ad
commit 5b3d5e81bd
18880 changed files with 5166045 additions and 1 deletions

61
vendor/github.com/containers/image/copy/compression.go generated vendored Normal file
View file

@ -0,0 +1,61 @@
package copy
import (
"bytes"
"compress/bzip2"
"compress/gzip"
"errors"
"io"
"github.com/Sirupsen/logrus"
)
// decompressorFunc, given a compressed stream, returns the decompressed stream.
type decompressorFunc func(io.Reader) (io.Reader, error)
func gzipDecompressor(r io.Reader) (io.Reader, error) {
return gzip.NewReader(r)
}
func bzip2Decompressor(r io.Reader) (io.Reader, error) {
return bzip2.NewReader(r), nil
}
func xzDecompressor(r io.Reader) (io.Reader, error) {
return nil, errors.New("Decompressing xz streams is not supported")
}
// compressionAlgos is an internal implementation detail of detectCompression
var compressionAlgos = map[string]struct {
prefix []byte
decompressor decompressorFunc
}{
"gzip": {[]byte{0x1F, 0x8B, 0x08}, gzipDecompressor}, // gzip (RFC 1952)
"bzip2": {[]byte{0x42, 0x5A, 0x68}, bzip2Decompressor}, // bzip2 (decompress.c:BZ2_decompress)
"xz": {[]byte{0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00}, xzDecompressor}, // xz (/usr/share/doc/xz/xz-file-format.txt)
}
// detectCompression returns a decompressorFunc if the input is recognized as a compressed format, nil otherwise.
// Because it consumes the start of input, other consumers must use the returned io.Reader instead to also read from the beginning.
func detectCompression(input io.Reader) (decompressorFunc, io.Reader, error) {
buffer := [8]byte{}
n, err := io.ReadAtLeast(input, buffer[:], len(buffer))
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
// This is a “real” error. We could just ignore it this time, process the data we have, and hope that the source will report the same error again.
// Instead, fail immediately with the original error cause instead of a possibly secondary/misleading error returned later.
return nil, nil, err
}
var decompressor decompressorFunc
for name, algo := range compressionAlgos {
if bytes.HasPrefix(buffer[:n], algo.prefix) {
logrus.Debugf("Detected compression format %s", name)
decompressor = algo.decompressor
break
}
}
if decompressor == nil {
logrus.Debugf("No compression detected")
}
return decompressor, io.MultiReader(bytes.NewReader(buffer[:n]), input), nil
}

View file

@ -0,0 +1,85 @@
package copy
import (
"bytes"
"errors"
"io"
"io/ioutil"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDetectCompression(t *testing.T) {
cases := []struct {
filename string
unimplemented bool
}{
{"fixtures/Hello.uncompressed", false},
{"fixtures/Hello.gz", false},
{"fixtures/Hello.bz2", false},
{"fixtures/Hello.xz", true},
}
// The original stream is preserved.
for _, c := range cases {
originalContents, err := ioutil.ReadFile(c.filename)
require.NoError(t, err, c.filename)
stream, err := os.Open(c.filename)
require.NoError(t, err, c.filename)
defer stream.Close()
_, updatedStream, err := detectCompression(stream)
require.NoError(t, err, c.filename)
updatedContents, err := ioutil.ReadAll(updatedStream)
require.NoError(t, err, c.filename)
assert.Equal(t, originalContents, updatedContents, c.filename)
}
// The correct decompressor is chosen, and the result is as expected.
for _, c := range cases {
stream, err := os.Open(c.filename)
require.NoError(t, err, c.filename)
defer stream.Close()
decompressor, updatedStream, err := detectCompression(stream)
require.NoError(t, err, c.filename)
var uncompressedStream io.Reader
switch {
case decompressor == nil:
uncompressedStream = updatedStream
case c.unimplemented:
_, err := decompressor(updatedStream)
assert.Error(t, err)
continue
default:
s, err := decompressor(updatedStream)
require.NoError(t, err)
uncompressedStream = s
}
uncompressedContents, err := ioutil.ReadAll(uncompressedStream)
require.NoError(t, err, c.filename)
assert.Equal(t, []byte("Hello"), uncompressedContents, c.filename)
}
// Empty input is handled reasonably.
decompressor, updatedStream, err := detectCompression(bytes.NewReader([]byte{}))
require.NoError(t, err)
assert.Nil(t, decompressor)
updatedContents, err := ioutil.ReadAll(updatedStream)
require.NoError(t, err)
assert.Equal(t, []byte{}, updatedContents)
// Error reading input
reader, writer := io.Pipe()
defer reader.Close()
writer.CloseWithError(errors.New("Expected error reading input in detectCompression"))
_, _, err = detectCompression(reader)
assert.Error(t, err)
}

538
vendor/github.com/containers/image/copy/copy.go generated vendored Normal file

File diff suppressed because it is too large Load diff

121
vendor/github.com/containers/image/copy/copy_test.go generated vendored Normal file
View file

@ -0,0 +1,121 @@
package copy
import (
"bytes"
"errors"
"io"
"os"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewDigestingReader(t *testing.T) {
// Only the failure cases, success is tested in TestDigestingReaderRead below.
source := bytes.NewReader([]byte("abc"))
for _, input := range []string{
"abc", // Not algo:hexvalue
"crc32:", // Unknown algorithm, empty value
"crc32:012345678", // Unknown algorithm
"sha256:", // Empty value
"sha256:0", // Invalid hex value
"sha256:01", // Invalid length of hex value
} {
_, err := newDigestingReader(source, input)
assert.Error(t, err, input)
}
}
func TestDigestingReaderRead(t *testing.T) {
cases := []struct {
input []byte
digest string
}{
{[]byte(""), "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"},
{[]byte("abc"), "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"},
{make([]byte, 65537, 65537), "sha256:3266304f31be278d06c3bd3eb9aa3e00c59bedec0a890de466568b0b90b0e01f"},
}
// Valid input
for _, c := range cases {
source := bytes.NewReader(c.input)
reader, err := newDigestingReader(source, c.digest)
require.NoError(t, err, c.digest)
dest := bytes.Buffer{}
n, err := io.Copy(&dest, reader)
assert.NoError(t, err, c.digest)
assert.Equal(t, int64(len(c.input)), n, c.digest)
assert.Equal(t, c.input, dest.Bytes(), c.digest)
assert.False(t, reader.validationFailed, c.digest)
}
// Modified input
for _, c := range cases {
source := bytes.NewReader(bytes.Join([][]byte{c.input, []byte("x")}, nil))
reader, err := newDigestingReader(source, c.digest)
require.NoError(t, err, c.digest)
dest := bytes.Buffer{}
_, err = io.Copy(&dest, reader)
assert.Error(t, err, c.digest)
assert.True(t, reader.validationFailed)
}
}
func goDiffIDComputationGoroutineWithTimeout(layerStream io.ReadCloser, decompressor decompressorFunc) *diffIDResult {
ch := make(chan diffIDResult)
go diffIDComputationGoroutine(ch, layerStream, nil)
timeout := time.After(time.Second)
select {
case res := <-ch:
return &res
case <-timeout:
return nil
}
}
func TestDiffIDComputationGoroutine(t *testing.T) {
stream, err := os.Open("fixtures/Hello.uncompressed")
require.NoError(t, err)
res := goDiffIDComputationGoroutineWithTimeout(stream, nil)
require.NotNil(t, res)
assert.NoError(t, res.err)
assert.Equal(t, "sha256:185f8db32271fe25f561a6fc938b2e264306ec304eda518007d1764826381969", res.digest)
// Error reading input
reader, writer := io.Pipe()
writer.CloseWithError(errors.New("Expected error reading input in diffIDComputationGoroutine"))
res = goDiffIDComputationGoroutineWithTimeout(reader, nil)
require.NotNil(t, res)
assert.Error(t, res.err)
}
func TestComputeDiffID(t *testing.T) {
for _, c := range []struct {
filename string
decompressor decompressorFunc
result string
}{
{"fixtures/Hello.uncompressed", nil, "sha256:185f8db32271fe25f561a6fc938b2e264306ec304eda518007d1764826381969"},
{"fixtures/Hello.gz", nil, "sha256:0bd4409dcd76476a263b8f3221b4ce04eb4686dec40bfdcc2e86a7403de13609"},
{"fixtures/Hello.gz", gzipDecompressor, "sha256:185f8db32271fe25f561a6fc938b2e264306ec304eda518007d1764826381969"},
} {
stream, err := os.Open(c.filename)
require.NoError(t, err, c.filename)
defer stream.Close()
diffID, err := computeDiffID(stream, c.decompressor)
require.NoError(t, err, c.filename)
assert.Equal(t, c.result, diffID)
}
// Error initializing decompression
_, err := computeDiffID(bytes.NewReader([]byte{}), gzipDecompressor)
assert.Error(t, err)
// Error reading input
reader, writer := io.Pipe()
defer reader.Close()
writer.CloseWithError(errors.New("Expected error reading input in computeDiffID"))
_, err = computeDiffID(reader, nil)
assert.Error(t, err)
}

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1 @@
Hello

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,54 @@
package copy
import (
"bytes"
"compress/gzip"
"crypto/sha256"
"encoding/hex"
"fmt"
"io/ioutil"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// Verify that the GZIP writer results are consistent across golang versions
//
// * a large payload is used to ensure the compression code utilizes several blocks
// * a small payload forces flushes in the compression code by using a partial block
//
// Verified against: go 1.7.3, go 1.6.3
func TestGzipWriter(t *testing.T) {
cases := []struct {
name string
digest string
}{
{"fixtures/random_512k.bin", "89ec079aa14317e6a96c53dc22081ec054c47533cea5858cc18644bfb148a363"},
{"fixtures/random_500b.bin", "0b3a69e34b8ea6d544b1686aaabdcb162a4e3ea597a320a32107b6eb74c125e1"},
}
for _, test := range cases {
payload, err := ioutil.ReadFile(test.name)
require.NoError(t, err, "failed to read payload from disk")
var buffer bytes.Buffer
zipper := gzip.NewWriter(&buffer)
_, err = zipper.Write(payload)
require.NoError(t, err, "failed to write payload to gzip.Writer")
// Flush and finialize all data being zipped
err = zipper.Close()
require.NoError(t, err, "failed to close gzip.Writer")
var actual bytes.Buffer
_, err = buffer.WriteTo(&actual)
require.NoError(t, err, "failed to retrieve payload")
sum := sha256.Sum256(actual.Bytes())
digest := hex.EncodeToString(sum[:])
// If checksum does not match then something has changed in the gzip library
assert.Equal(t, test.digest, digest, fmt.Sprintf("Invalid payload sha256: %v", test.name))
}
}