internal/pkg: make checksum available to cure
All checks were successful
Test / Create distribution (push) Successful in 50s
Test / Sandbox (push) Successful in 2m54s
Test / ShareFS (push) Successful in 4m41s
Test / Sandbox (race detector) (push) Successful in 5m21s
Test / Hpkg (push) Successful in 5m37s
Test / Hakurei (push) Successful in 5m49s
Test / Hakurei (race detector) (push) Successful in 7m27s
Test / Flake checks (push) Successful in 1m43s

This enables deduplication by value as implemented in execArtifact.

Signed-off-by: Ophestra <cat@gensokyo.uk>
This commit is contained in:
2026-01-19 20:10:51 +09:00
parent 1c49c75f95
commit d933234784
8 changed files with 195 additions and 71 deletions

View File

@@ -10,6 +10,7 @@ import (
"path/filepath"
"runtime"
"syscall"
"unique"
"hakurei.app/command"
"hakurei.app/container"
@@ -113,7 +114,7 @@ func main() {
_, _, _, stage3 := rosa.Std.NewLLVM()
var (
pathname *check.Absolute
checksum [2]pkg.Checksum
checksum [2]unique.Handle[pkg.Checksum]
)
if pathname, checksum[0], err = cache.Cure(stage2); err != nil {
@@ -127,8 +128,8 @@ func main() {
if checksum[0] != checksum[1] {
err = &pkg.ChecksumMismatchError{
Got: checksum[0],
Want: checksum[1],
Got: checksum[0].Value(),
Want: checksum[1].Value(),
}
}
return

View File

@@ -463,6 +463,38 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("SITnQ6PTV12PAQQjIuLUxkvsXQiC9Gq_HJQlcb4BPL5YnRHnx8lsW7PRM9YMLBsx"), nil},
{"sample exec container layer promotion", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9": {Mode: fs.ModeDir | 0500},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/1tQZOGmVk_JkpyiG84AKW_BXmlK_MvHUbh5WtMuthGbHUq7i7nL1bvdF-LoJbqNh": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/O-6VjlIUxc4PYLf5v35uhIeL8kkYCbHYklqlmDjFPXe0m4j6GkUDg5qwTzBRESnf": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9"},
{Mode: 0400, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check", Data: []byte{0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/1tQZOGmVk_JkpyiG84AKW_BXmlK_MvHUbh5WtMuthGbHUq7i7nL1bvdF-LoJbqNh", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/O-6VjlIUxc4PYLf5v35uhIeL8kkYCbHYklqlmDjFPXe0m4j6GkUDg5qwTzBRESnf", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("fuC20BhMKr86TYzNPP2A-9P7mGLvdcOiG10exlhRvZm8ySI7csf0LhW3im_26l1N"), nil},
{"sample file short", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},

View File

@@ -13,6 +13,7 @@ import (
"strconv"
"syscall"
"time"
"unique"
"hakurei.app/container"
"hakurei.app/container/check"
@@ -37,6 +38,31 @@ type ExecPath struct {
W bool
}
// layers returns pathnames collected from A deduplicated by checksum.
func (p *ExecPath) layers(f *FContext) []*check.Absolute {
msg := f.GetMessage()
layers := make([]*check.Absolute, 0, len(p.A))
checksums := make(map[unique.Handle[Checksum]]struct{}, len(p.A))
for i := range p.A {
d := p.A[len(p.A)-1-i]
pathname, checksum := f.GetArtifact(d)
if _, ok := checksums[checksum]; ok {
if msg.IsVerbose() {
msg.Verbosef(
"promoted layer %d as %s",
len(p.A)-1-i, reportName(d, f.cache.Ident(d)),
)
}
continue
}
checksums[checksum] = struct{}{}
layers = append(layers, pathname)
}
slices.Reverse(layers)
return layers
}
// Path returns a populated [ExecPath].
func Path(pathname *check.Absolute, writable bool, a ...Artifact) ExecPath {
return ExecPath{pathname, a, writable}
@@ -299,11 +325,6 @@ func (a *execArtifact) cure(f *FContext, hostNet bool) (err error) {
temp, work := f.GetTempDir(), f.GetWorkDir()
for i, b := range a.paths {
layers := make([]*check.Absolute, len(b.A))
for j, d := range b.A {
layers[j] = f.Pathname(d)
}
if i == overlayWorkIndex {
if err = os.MkdirAll(work.String(), 0700); err != nil {
return
@@ -316,7 +337,7 @@ func (a *execArtifact) cure(f *FContext, hostNet bool) (err error) {
AbsWork,
work,
tempWork,
layers...,
b.layers(f)...,
)
continue
}
@@ -333,11 +354,12 @@ func (a *execArtifact) cure(f *FContext, hostNet bool) (err error) {
if err = os.MkdirAll(tempWork.String(), 0700); err != nil {
return
}
z.Overlay(b.P, tempUpper, tempWork, layers...)
} else if len(layers) == 1 {
z.Bind(layers[0], b.P, 0)
z.Overlay(b.P, tempUpper, tempWork, b.layers(f)...)
} else if len(b.A) == 1 {
pathname, _ := f.GetArtifact(b.A[0])
z.Bind(pathname, b.P, 0)
} else {
z.OverlayReadonly(b.P, layers...)
z.OverlayReadonly(b.P, b.layers(f)...)
}
}
if overlayWorkIndex < 0 {

View File

@@ -39,7 +39,7 @@ func TestExec(t *testing.T) {
cureMany(t, c, []cureStep{
{"container", pkg.NewExec(
"", nil, 0,
"exec-offline", nil, 0,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1"},
check.MustAbs("/opt/bin/testtool"),
@@ -120,7 +120,7 @@ func TestExec(t *testing.T) {
)
cureMany(t, c, []cureStep{
{"container", pkg.NewExec(
"", &wantChecksum, 0,
"exec-net", &wantChecksum, 0,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1"},
check.MustAbs("/opt/bin/testtool"),
@@ -152,7 +152,7 @@ func TestExec(t *testing.T) {
cureMany(t, c, []cureStep{
{"container", pkg.NewExec(
"", nil, 0,
"exec-overlay-root", nil, 0,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
check.MustAbs("/opt/bin/testtool"),
@@ -178,7 +178,7 @@ func TestExec(t *testing.T) {
cureMany(t, c, []cureStep{
{"container", pkg.NewExec(
"", nil, 0,
"exec-overlay-work", nil, 0,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
check.MustAbs("/work/bin/testtool"),
@@ -209,7 +209,7 @@ func TestExec(t *testing.T) {
cureMany(t, c, []cureStep{
{"container", pkg.NewExec(
"", nil, 0,
"exec-multiple-layers", nil, 0,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
check.MustAbs("/opt/bin/testtool"),
@@ -253,6 +253,38 @@ func TestExec(t *testing.T) {
testtoolDestroy(t, base, c)
}, pkg.MustDecode("SITnQ6PTV12PAQQjIuLUxkvsXQiC9Gq_HJQlcb4BPL5YnRHnx8lsW7PRM9YMLBsx")},
{"overlay layer promotion", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
c.SetStrict(true)
testtool, testtoolDestroy := newTesttool()
cureMany(t, c, []cureStep{
{"container", pkg.NewExec(
"exec-layer-promotion", nil, 0,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
check.MustAbs("/opt/bin/testtool"),
[]string{"testtool", "promote"},
pkg.MustPath("/", true, &stubArtifact{
kind: pkg.KindTar,
params: []byte("another empty directory"),
cure: func(t *pkg.TContext) error {
return os.MkdirAll(t.GetWorkDir().String(), 0700)
},
}, &stubArtifact{
kind: pkg.KindTar,
params: []byte("empty directory"),
cure: func(t *pkg.TContext) error {
return os.MkdirAll(t.GetWorkDir().String(), 0700)
},
}),
pkg.MustPath("/opt", false, testtool),
), ignorePathname, wantChecksumOffline, nil},
})
testtoolDestroy(t, base, c)
}, pkg.MustDecode("fuC20BhMKr86TYzNPP2A-9P7mGLvdcOiG10exlhRvZm8ySI7csf0LhW3im_26l1N")},
})
}

View File

@@ -6,6 +6,7 @@ import (
"reflect"
"testing"
"testing/fstest"
"unique"
"hakurei.app/container/check"
"hakurei.app/internal/pkg"
@@ -16,10 +17,10 @@ func TestHTTPGet(t *testing.T) {
const testdata = "\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69"
testdataChecksum := func() pkg.Checksum {
testdataChecksum := func() unique.Handle[pkg.Checksum] {
h := sha512.New384()
h.Write([]byte(testdata))
return (pkg.Checksum)(h.Sum(nil))
return unique.Make(pkg.Checksum(h.Sum(nil)))
}()
var transport http.Transport
@@ -33,7 +34,7 @@ func TestHTTPGet(t *testing.T) {
f := pkg.NewHTTPGet(
&client,
"file:///testdata",
testdataChecksum,
testdataChecksum.Value(),
)
if got, err := f.Cure(t.Context()); err != nil {
t.Fatalf("Cure: error = %v", err)
@@ -48,7 +49,7 @@ func TestHTTPGet(t *testing.T) {
pkg.Checksum{},
)
wantErrMismatch := &pkg.ChecksumMismatchError{
Got: testdataChecksum,
Got: testdataChecksum.Value(),
}
if _, err := f.Cure(t.Context()); !reflect.DeepEqual(err, wantErrMismatch) {
t.Fatalf("Cure: error = %#v, want %#v", err, wantErrMismatch)
@@ -70,7 +71,7 @@ func TestHTTPGet(t *testing.T) {
f := pkg.NewHTTPGet(
&client,
"file:///testdata",
testdataChecksum,
testdataChecksum.Value(),
)
wantPathname := base.Append(
"identifier",
@@ -81,7 +82,7 @@ func TestHTTPGet(t *testing.T) {
} else if !pathname.Is(wantPathname) {
t.Fatalf("Cure: %q, want %q", pathname, wantPathname)
} else if checksum != testdataChecksum {
t.Fatalf("Cure: %x, want %x", checksum, testdataChecksum)
t.Fatalf("Cure: %x, want %x", checksum.Value(), testdataChecksum.Value())
}
if got, err := f.Cure(t.Context()); err != nil {
@@ -94,7 +95,7 @@ func TestHTTPGet(t *testing.T) {
f = pkg.NewHTTPGet(
&client,
"file:///testdata",
testdataChecksum,
testdataChecksum.Value(),
)
if got, err := f.Cure(t.Context()); err != nil {
t.Fatalf("Cure: error = %v", err)

View File

@@ -187,7 +187,7 @@ type FContext struct {
TContext
// Cured top-level dependencies looked up by Pathname.
deps map[Artifact]*check.Absolute
deps map[Artifact]cureRes
}
// InvalidLookupError is the identifier of non-dependency [Artifact] looked up
@@ -200,15 +200,17 @@ func (e InvalidLookupError) Error() string {
var _ error = InvalidLookupError{}
// Pathname returns the identifier pathname of an [Artifact]. Calling Pathname
// with an [Artifact] not part of the slice returned by [Artifact.Dependencies]
// panics.
func (f *FContext) Pathname(a Artifact) *check.Absolute {
if p, ok := f.deps[a]; ok {
return p
} else {
panic(InvalidLookupError(f.cache.Ident(a).Value()))
// GetArtifact returns the identifier pathname and checksum of an [Artifact].
// Calling Pathname with an [Artifact] not part of the slice returned by
// [Artifact.Dependencies] panics.
func (f *FContext) GetArtifact(a Artifact) (
pathname *check.Absolute,
checksum unique.Handle[Checksum],
) {
if res, ok := f.deps[a]; ok {
return res.pathname, res.checksum
}
panic(InvalidLookupError(f.cache.Ident(a).Value()))
}
// An Artifact is a read-only reference to a piece of data that may be created
@@ -378,6 +380,12 @@ const (
checksumLinknamePrefix = "../" + dirChecksum + "/"
)
// cureRes are the non-error results returned by [Cache.Cure].
type cureRes struct {
pathname *check.Absolute
checksum unique.Handle[Checksum]
}
// A pendingArtifactDep is a dependency [Artifact] pending concurrent curing,
// subject to the cures limit. Values pointed to by result addresses are safe
// to access after the [sync.WaitGroup] associated with this pendingArtifactDep
@@ -389,7 +397,7 @@ type pendingArtifactDep struct {
// Address of result pathname populated during [Cache.Cure] and dereferenced
// if curing succeeds.
resP **check.Absolute
resP *cureRes
// Address of result error slice populated during [Cache.Cure], dereferenced
// after acquiring errsMu if curing fails. No additional action is taken,
@@ -437,7 +445,7 @@ type Cache struct {
checksumMu sync.RWMutex
// Identifier to content pair cache.
ident map[unique.Handle[ID]]Checksum
ident map[unique.Handle[ID]]unique.Handle[Checksum]
// Identifier to error pair for unrecoverably faulted [Artifact].
identErr map[unique.Handle[ID]]error
// Pending identifiers, accessed through Cure for entries not in ident.
@@ -650,7 +658,7 @@ func (c *Cache) Scrub(checks int) error {
c.checksumMu.Lock()
defer c.checksumMu.Unlock()
c.ident = make(map[unique.Handle[ID]]Checksum)
c.ident = make(map[unique.Handle[ID]]unique.Handle[Checksum])
c.identErr = make(map[unique.Handle[ID]]error)
c.artifact.Clear()
@@ -848,7 +856,7 @@ func (c *Cache) Scrub(checks int) error {
// identifier is stored in identPending and a non-nil channel is returned.
func (c *Cache) loadOrStoreIdent(id unique.Handle[ID]) (
done chan<- struct{},
checksum Checksum,
checksum unique.Handle[Checksum],
err error,
) {
var ok bool
@@ -887,14 +895,14 @@ func (c *Cache) loadOrStoreIdent(id unique.Handle[ID]) (
func (c *Cache) finaliseIdent(
done chan<- struct{},
id unique.Handle[ID],
checksum *Checksum,
checksum unique.Handle[Checksum],
err error,
) {
c.identMu.Lock()
if err != nil {
c.identErr[id] = err
} else {
c.ident[id] = *checksum
c.ident[id] = checksum
}
delete(c.identPending, id)
c.identMu.Unlock()
@@ -1091,7 +1099,7 @@ func (e DependencyError) Error() string {
// calls to Cure are not subject to the cures limit.
func (c *Cache) Cure(a Artifact) (
pathname *check.Absolute,
checksum Checksum,
checksum unique.Handle[Checksum],
err error,
) {
if c.threshold > 0 {
@@ -1181,7 +1189,7 @@ func (e *DependencyCureError) Error() string {
// cure implements Cure without checking the full dependency graph.
func (c *Cache) cure(a Artifact) (
pathname *check.Absolute,
checksum Checksum,
checksum unique.Handle[Checksum],
err error,
) {
id := c.Ident(a)
@@ -1193,7 +1201,7 @@ func (c *Cache) cure(a Artifact) (
defer func() {
if err != nil {
pathname = nil
checksum = Checksum{}
checksum = unique.Handle[Checksum]{}
}
}()
@@ -1202,7 +1210,7 @@ func (c *Cache) cure(a Artifact) (
if done == nil {
return
} else {
defer func() { c.finaliseIdent(done, id, &checksum, err) }()
defer func() { c.finaliseIdent(done, id, checksum, err) }()
}
_, err = os.Lstat(pathname.String())
@@ -1211,7 +1219,12 @@ func (c *Cache) cure(a Artifact) (
if name, err = os.Readlink(pathname.String()); err != nil {
return
}
err = Decode(&checksum, path.Base(name))
buf := c.getIdentBuf()
err = Decode((*Checksum)(buf[:]), path.Base(name))
if err == nil {
checksum = unique.Make(Checksum(buf[:]))
}
c.putIdentBuf(buf)
return
}
if !errors.Is(err, os.ErrNotExist) {
@@ -1234,8 +1247,8 @@ func (c *Cache) cure(a Artifact) (
var checksumPathname *check.Absolute
var checksumFi os.FileInfo
if kc, ok := a.(KnownChecksum); ok {
checksum = kc.Checksum()
checksums = Encode(checksum)
checksum = unique.Make(kc.Checksum())
checksums = Encode(checksum.Value())
checksumPathname = c.base.Append(
dirChecksum,
checksums,
@@ -1288,8 +1301,11 @@ func (c *Cache) cure(a Artifact) (
if checksumPathname == nil {
h := sha512.New384()
h.Write(data)
h.Sum(checksum[:0])
checksums = Encode(checksum)
buf := c.getIdentBuf()
h.Sum(buf[:0])
checksum = unique.Make(Checksum(buf[:]))
checksums = Encode(Checksum(buf[:]))
c.putIdentBuf(buf)
checksumPathname = c.base.Append(
dirChecksum,
checksums,
@@ -1297,10 +1313,10 @@ func (c *Cache) cure(a Artifact) (
} else if c.IsStrict() {
h := sha512.New384()
h.Write(data)
if got := Checksum(h.Sum(nil)); got != checksum {
if got := Checksum(h.Sum(nil)); got != checksum.Value() {
err = &ChecksumMismatchError{
Got: got,
Want: checksum,
Want: checksum.Value(),
}
return
}
@@ -1355,11 +1371,11 @@ func (c *Cache) cure(a Artifact) (
case FloodArtifact:
deps := a.Dependencies()
f := FContext{t, make(map[Artifact]*check.Absolute, len(deps))}
f := FContext{t, make(map[Artifact]cureRes, len(deps))}
var wg sync.WaitGroup
wg.Add(len(deps))
res := make([]*check.Absolute, len(deps))
res := make([]cureRes, len(deps))
errs := make(DependencyCureError, 0, len(deps))
var errsMu sync.Mutex
for i, d := range deps {
@@ -1422,21 +1438,19 @@ func (c *Cache) cure(a Artifact) (
}
if checksumPathname == nil {
checksum = gotChecksum
checksums = Encode(checksum)
checksum = unique.Make(gotChecksum)
checksums = Encode(gotChecksum)
checksumPathname = c.base.Append(
dirChecksum,
checksums,
)
} else {
if gotChecksum != checksum {
} else if gotChecksum != checksum.Value() {
err = &ChecksumMismatchError{
Got: gotChecksum,
Want: checksum,
Want: checksum.Value(),
}
return
}
}
if err = os.Chmod(t.work.String(), 0700); err != nil {
return
@@ -1470,9 +1484,9 @@ func (c *Cache) cure(a Artifact) (
func (pending *pendingArtifactDep) cure(c *Cache) {
defer pending.Done()
pathname, _, err := c.cure(pending.a)
var err error
pending.resP.pathname, pending.resP.checksum, err = c.cure(pending.a)
if err == nil {
*pending.resP = pathname
return
}
@@ -1536,7 +1550,7 @@ func open(
msg: msg,
base: base,
ident: make(map[unique.Handle[ID]]Checksum),
ident: make(map[unique.Handle[ID]]unique.Handle[Checksum]),
identErr: make(map[unique.Handle[ID]]error),
identPending: make(map[unique.Handle[ID]]<-chan struct{}),
}

View File

@@ -159,7 +159,7 @@ func destroyArtifact(
} else {
p := base.Append(
"checksum",
pkg.Encode(checksum),
pkg.Encode(checksum.Value()),
)
if err = filepath.WalkDir(p.String(), func(
path string,
@@ -362,20 +362,34 @@ var ignorePathname = check.MustAbs("/\x00")
func cureMany(t *testing.T, c *pkg.Cache, steps []cureStep) {
t.Helper()
makeChecksumH := func(checksum pkg.Checksum) unique.Handle[pkg.Checksum] {
if checksum == (pkg.Checksum{}) {
return unique.Handle[pkg.Checksum]{}
}
return unique.Make(checksum)
}
for _, step := range steps {
t.Log("cure step:", step.name)
if pathname, checksum, err := c.Cure(step.a); !reflect.DeepEqual(err, step.err) {
t.Fatalf("Cure: error = %v, want %v", err, step.err)
} else if step.pathname != ignorePathname && !pathname.Is(step.pathname) {
t.Fatalf("Cure: pathname = %q, want %q", pathname, step.pathname)
} else if checksum != step.checksum {
t.Fatalf("Cure: checksum = %s, want %s", pkg.Encode(checksum), pkg.Encode(step.checksum))
} else if checksum != makeChecksumH(step.checksum) {
t.Fatalf(
"Cure: checksum = %s, want %s",
pkg.Encode(checksum.Value()), pkg.Encode(step.checksum),
)
} else {
v := any(err)
if err == nil {
v = pathname
}
t.Log(pkg.Encode(checksum)+":", v)
var checksumVal pkg.Checksum
if checksum != (unique.Handle[pkg.Checksum]{}) {
checksumVal = checksum.Value()
}
t.Log(pkg.Encode(checksumVal)+":", v)
}
}
}

View File

@@ -21,7 +21,7 @@ func main() {
log.SetFlags(0)
log.SetPrefix("testtool: ")
var hostNet, layers bool
var hostNet, layers, promote bool
if len(os.Args) == 2 && os.Args[0] == "testtool" {
switch os.Args[1] {
case "net":
@@ -34,6 +34,10 @@ func main() {
log.SetPrefix("testtool(layers): ")
break
case "promote":
promote = true
log.SetPrefix("testtool(promote): ")
default:
log.Fatalf("Args: %q", os.Args)
return
@@ -46,7 +50,7 @@ func main() {
wantEnv := []string{"HAKUREI_TEST=1"}
if len(os.Environ()) == 2 {
overlayRoot = true
if !layers {
if !layers && !promote {
log.SetPrefix("testtool(overlay root): ")
}
wantEnv = []string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"}
@@ -208,6 +212,10 @@ func main() {
}
}
if promote {
ident = "O-6VjlIUxc4PYLf5v35uhIeL8kkYCbHYklqlmDjFPXe0m4j6GkUDg5qwTzBRESnf"
}
next() // testtool artifact
next()