All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m41s
Test / Hakurei (push) Successful in 4m1s
Test / ShareFS (push) Successful in 4m1s
Test / Hpkg (push) Successful in 4m35s
Test / Sandbox (race detector) (push) Successful in 5m4s
Test / Hakurei (race detector) (push) Successful in 6m0s
Test / Flake checks (push) Successful in 1m46s
This should hopefully provide good separation between the artifact curing backend implementation and the (still work in progress) language. Making the IR parseable also guarantees uniqueness of the representation. Signed-off-by: Ophestra <cat@gensokyo.uk>
227 lines
6.5 KiB
Go
227 lines
6.5 KiB
Go
package pkg_test
|
|
|
|
import (
|
|
"archive/tar"
|
|
"bytes"
|
|
"compress/gzip"
|
|
"crypto/sha512"
|
|
"errors"
|
|
"io/fs"
|
|
"net/http"
|
|
"os"
|
|
"testing"
|
|
"testing/fstest"
|
|
|
|
"hakurei.app/container/check"
|
|
"hakurei.app/container/stub"
|
|
"hakurei.app/internal/pkg"
|
|
)
|
|
|
|
func TestTar(t *testing.T) {
|
|
t.Parallel()
|
|
|
|
checkWithCache(t, []cacheTestCase{
|
|
{"http", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
|
checkTarHTTP(t, base, c, fstest.MapFS{
|
|
".": {Mode: fs.ModeDir | 0700},
|
|
|
|
"checksum": {Mode: fs.ModeDir | 0700},
|
|
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP": {Mode: fs.ModeDir | 0700},
|
|
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/check": {Mode: 0400, Data: []byte{0, 0}},
|
|
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib": {Mode: fs.ModeDir | 0700},
|
|
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/pkgconfig": {Mode: fs.ModeDir | 0700},
|
|
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
|
|
|
|
"identifier": {Mode: fs.ModeDir | 0700},
|
|
"identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
|
|
"identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
|
|
|
|
"work": {Mode: fs.ModeDir | 0700},
|
|
}, pkg.MustDecode(
|
|
"cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM",
|
|
))
|
|
}, pkg.MustDecode("NQTlc466JmSVLIyWklm_u8_g95jEEb98PxJU-kjwxLpfdjwMWJq0G8ze9R4Vo1Vu")},
|
|
|
|
{"http expand", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
|
checkTarHTTP(t, base, c, fstest.MapFS{
|
|
".": {Mode: fs.ModeDir | 0700},
|
|
|
|
"lib": {Mode: fs.ModeDir | 0700},
|
|
"lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
|
|
}, pkg.MustDecode(
|
|
"CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN",
|
|
))
|
|
}, pkg.MustDecode("hSoSSgCYTNonX3Q8FjvjD1fBl-E-BQyA6OTXro2OadXqbST4tZ-akGXszdeqphRe")},
|
|
})
|
|
}
|
|
|
|
func checkTarHTTP(
|
|
t *testing.T,
|
|
base *check.Absolute,
|
|
c *pkg.Cache,
|
|
testdataFsys fs.FS,
|
|
wantChecksum pkg.Checksum,
|
|
) {
|
|
var testdata string
|
|
{
|
|
var buf bytes.Buffer
|
|
w := tar.NewWriter(&buf)
|
|
if err := w.AddFS(testdataFsys); err != nil {
|
|
t.Fatalf("AddFS: error = %v", err)
|
|
}
|
|
if err := w.Close(); err != nil {
|
|
t.Fatalf("Close: error = %v", err)
|
|
}
|
|
|
|
var zbuf bytes.Buffer
|
|
gw := gzip.NewWriter(&zbuf)
|
|
if _, err := gw.Write(buf.Bytes()); err != nil {
|
|
t.Fatalf("Write: error = %v", err)
|
|
}
|
|
if err := gw.Close(); err != nil {
|
|
t.Fatalf("Close: error = %v", err)
|
|
}
|
|
testdata = zbuf.String()
|
|
}
|
|
|
|
testdataChecksum := func() pkg.Checksum {
|
|
h := sha512.New384()
|
|
h.Write([]byte(testdata))
|
|
return (pkg.Checksum)(h.Sum(nil))
|
|
}()
|
|
|
|
var transport http.Transport
|
|
client := http.Client{Transport: &transport}
|
|
transport.RegisterProtocol("file", http.NewFileTransportFS(fstest.MapFS{
|
|
"testdata": {Data: []byte(testdata), Mode: 0400},
|
|
}))
|
|
|
|
wantIdent := func() pkg.ID {
|
|
h := sha512.New384()
|
|
|
|
// kind uint64
|
|
h.Write([]byte{byte(pkg.KindTar), 0, 0, 0, 0, 0, 0, 0})
|
|
// deps_sz uint64
|
|
h.Write([]byte{1, 0, 0, 0, 0, 0, 0, 0})
|
|
|
|
// kind uint64
|
|
h.Write([]byte{byte(pkg.KindHTTPGet), 0, 0, 0, 0, 0, 0, 0})
|
|
// ident ID
|
|
h0 := sha512.New384()
|
|
// kind uint64
|
|
h0.Write([]byte{byte(pkg.KindHTTPGet), 0, 0, 0, 0, 0, 0, 0})
|
|
// deps_sz uint64
|
|
h0.Write([]byte{0, 0, 0, 0, 0, 0, 0, 0})
|
|
// url string
|
|
h0.Write([]byte{byte(pkg.IRKindString), 0, 0, 0})
|
|
h0.Write([]byte{0x10, 0, 0, 0})
|
|
h0.Write([]byte("file:///testdata"))
|
|
// end(KnownChecksum)
|
|
h0.Write([]byte{byte(pkg.IRKindEnd), 0, 0, 0})
|
|
h0.Write([]byte{byte(pkg.IREndKnownChecksum), 0, 0, 0})
|
|
// checksum Checksum
|
|
h0.Write(testdataChecksum[:])
|
|
h.Write(h0.Sum(nil))
|
|
// compression uint32
|
|
h.Write([]byte{byte(pkg.IRKindUint32), 0, 0, 0})
|
|
h.Write([]byte{pkg.TarGzip, 0, 0, 0})
|
|
// end
|
|
h.Write([]byte{byte(pkg.IRKindEnd), 0, 0, 0})
|
|
h.Write([]byte{0, 0, 0, 0})
|
|
|
|
return pkg.ID(h.Sum(nil))
|
|
}()
|
|
|
|
a := pkg.NewHTTPGetTar(
|
|
&client,
|
|
"file:///testdata",
|
|
testdataChecksum,
|
|
pkg.TarGzip,
|
|
)
|
|
|
|
tarDir := stubArtifact{
|
|
kind: pkg.KindExec,
|
|
params: []byte("directory containing a single regular file"),
|
|
cure: func(t *pkg.TContext) error {
|
|
work := t.GetWorkDir()
|
|
if err := os.MkdirAll(work.String(), 0700); err != nil {
|
|
return err
|
|
}
|
|
return os.WriteFile(
|
|
work.Append("sample.tar.gz").String(),
|
|
[]byte(testdata),
|
|
0400,
|
|
)
|
|
},
|
|
}
|
|
tarDirMulti := stubArtifact{
|
|
kind: pkg.KindExec,
|
|
params: []byte("directory containing a multiple entries"),
|
|
cure: func(t *pkg.TContext) error {
|
|
work := t.GetWorkDir()
|
|
if err := os.MkdirAll(work.Append(
|
|
"garbage",
|
|
).String(), 0700); err != nil {
|
|
return err
|
|
}
|
|
return os.WriteFile(
|
|
work.Append("sample.tar.gz").String(),
|
|
[]byte(testdata),
|
|
0400,
|
|
)
|
|
},
|
|
}
|
|
tarDirType := stubArtifact{
|
|
kind: pkg.KindExec,
|
|
params: []byte("directory containing a symbolic link"),
|
|
cure: func(t *pkg.TContext) error {
|
|
work := t.GetWorkDir()
|
|
if err := os.MkdirAll(work.String(), 0700); err != nil {
|
|
return err
|
|
}
|
|
return os.Symlink(
|
|
work.String(),
|
|
work.Append("sample.tar.gz").String(),
|
|
)
|
|
},
|
|
}
|
|
// destroy these to avoid including it in flatten test case
|
|
defer newDestroyArtifactFunc(&tarDir)(t, base, c)
|
|
defer newDestroyArtifactFunc(&tarDirMulti)(t, base, c)
|
|
defer newDestroyArtifactFunc(&tarDirType)(t, base, c)
|
|
|
|
cureMany(t, c, []cureStep{
|
|
{"file", a, base.Append(
|
|
"identifier",
|
|
pkg.Encode(wantIdent),
|
|
), wantChecksum, nil},
|
|
|
|
{"directory", pkg.NewTar(
|
|
&tarDir,
|
|
pkg.TarGzip,
|
|
), ignorePathname, wantChecksum, nil},
|
|
|
|
{"multiple entries", pkg.NewTar(
|
|
&tarDirMulti,
|
|
pkg.TarGzip,
|
|
), nil, pkg.Checksum{}, errors.New(
|
|
"input directory does not contain a single regular file",
|
|
)},
|
|
|
|
{"bad type", pkg.NewTar(
|
|
&tarDirType,
|
|
pkg.TarGzip,
|
|
), nil, pkg.Checksum{}, errors.New(
|
|
"input directory does not contain a single regular file",
|
|
)},
|
|
|
|
{"error passthrough", pkg.NewTar(&stubArtifact{
|
|
kind: pkg.KindExec,
|
|
params: []byte("doomed artifact"),
|
|
cure: func(t *pkg.TContext) error {
|
|
return stub.UniqueError(0xcafe)
|
|
},
|
|
}, pkg.TarGzip), nil, pkg.Checksum{}, stub.UniqueError(0xcafe)},
|
|
})
|
|
}
|