5 Commits

Author SHA1 Message Date
840d8f68bf internal/rosa/git: disable flaky test
All checks were successful
Test / Create distribution (push) Successful in 1m13s
Test / Sandbox (push) Successful in 3m15s
Test / ShareFS (push) Successful in 4m0s
Test / Hakurei (push) Successful in 4m15s
Test / Sandbox (race detector) (push) Successful in 5m38s
Test / Hakurei (race detector) (push) Successful in 6m41s
Test / Flake checks (push) Successful in 1m26s
Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-05-13 00:38:59 +09:00
4bede7ecdd internal/pkg: discontinue DCE resolution on signal
All checks were successful
Test / Create distribution (push) Successful in 1m4s
Test / Sandbox (push) Successful in 2m47s
Test / ShareFS (push) Successful in 3m45s
Test / Hakurei (push) Successful in 3m53s
Test / Sandbox (race detector) (push) Successful in 5m22s
Test / Hakurei (race detector) (push) Successful in 6m24s
Test / Flake checks (push) Successful in 1m28s
This serves as a stopgap measure to skip long-running DCE resolutions.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-05-13 00:29:01 +09:00
487a03b5a3 internal/pkg: deduplicate DCE by ident
All checks were successful
Test / Create distribution (push) Successful in 1m19s
Test / Sandbox (push) Successful in 3m19s
Test / ShareFS (push) Successful in 4m21s
Test / Hakurei (push) Successful in 4m33s
Test / Sandbox (race detector) (push) Successful in 5m48s
Test / Hakurei (race detector) (push) Successful in 6m52s
Test / Flake checks (push) Successful in 1m23s
This eliminates edge cases where target artifacts do not compare equal.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-05-13 00:18:27 +09:00
8f3c22896a internal/pkg: DCE benchmark unwrap only
All checks were successful
Test / Create distribution (push) Successful in 1m43s
Test / Sandbox (push) Successful in 3m54s
Test / ShareFS (push) Successful in 5m21s
Test / Hakurei (push) Successful in 5m28s
Test / Sandbox (race detector) (push) Successful in 6m50s
Test / Hakurei (race detector) (push) Successful in 7m52s
Test / Flake checks (push) Successful in 1m28s
This eliminates noise at lower depths.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-05-12 19:56:59 +09:00
a167c1aba5 internal/pkg: hold artifact in DCE
All checks were successful
Test / Create distribution (push) Successful in 1m3s
Test / Sandbox (push) Successful in 2m42s
Test / ShareFS (push) Successful in 3m43s
Test / Hakurei (push) Successful in 3m56s
Test / Sandbox (race detector) (push) Successful in 5m20s
Test / Hakurei (race detector) (push) Successful in 6m24s
Test / Flake checks (push) Successful in 1m21s
This is significantly slower but enables much better error reporting.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-05-12 19:45:25 +09:00
4 changed files with 111 additions and 69 deletions

View File

@@ -11,7 +11,6 @@ import (
"path/filepath" "path/filepath"
"slices" "slices"
"testing" "testing"
"unique"
"hakurei.app/check" "hakurei.app/check"
"hakurei.app/container" "hakurei.app/container"
@@ -50,6 +49,13 @@ func TestExec(t *testing.T) {
"check": {Mode: 0400, Data: []byte{0}}, "check": {Mode: 0400, Data: []byte{0}},
} }
wantOfflineEncode := pkg.Encode(wantOffline.hash()) wantOfflineEncode := pkg.Encode(wantOffline.hash())
failingArtifact := &stubArtifact{
kind: pkg.KindTar,
params: []byte("doomed artifact"),
cure: func(t *pkg.TContext) error {
return stub.UniqueError(0xcafe)
},
}
checkWithCache(t, []cacheTestCase{ checkWithCache(t, []cacheTestCase{
{"offline", pkg.CValidateKnown | checkDestroySubstitutes, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) { {"offline", pkg.CValidateKnown | checkDestroySubstitutes, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
@@ -86,18 +92,10 @@ func TestExec(t *testing.T) {
check.MustAbs("/opt/bin/testtool"), check.MustAbs("/opt/bin/testtool"),
[]string{"testtool"}, []string{"testtool"},
pkg.MustPath("/proc/nonexistent", false, &stubArtifact{ pkg.MustPath("/proc/nonexistent", false, failingArtifact),
kind: pkg.KindTar,
params: []byte("doomed artifact"),
cure: func(t *pkg.TContext) error {
return stub.UniqueError(0xcafe)
},
}),
), nil, nil, &pkg.DependencyCureError{ ), nil, nil, &pkg.DependencyCureError{
{ {
Ident: unique.Make(pkg.ID(pkg.MustDecode( A: failingArtifact,
"Sowo6oZRmG6xVtUaxB6bDWZhVsqAJsIJWUp0OPKlE103cY0lodx7dem8J-qQF0Z1",
))),
Err: stub.UniqueError(0xcafe), Err: stub.UniqueError(0xcafe),
}, },
}}, }},

View File

@@ -15,6 +15,7 @@ import (
"io/fs" "io/fs"
"maps" "maps"
"os" "os"
"os/signal"
"path/filepath" "path/filepath"
"runtime" "runtime"
"slices" "slices"
@@ -1485,7 +1486,7 @@ func (c *Cache) Cure(a Artifact) (
// CureError wraps a non-nil error returned attempting to cure an [Artifact]. // CureError wraps a non-nil error returned attempting to cure an [Artifact].
type CureError struct { type CureError struct {
Ident unique.Handle[ID] A Artifact
Err error Err error
} }
@@ -1499,40 +1500,63 @@ func (e *CureError) Error() string { return e.Err.Error() }
type DependencyCureError []*CureError type DependencyCureError []*CureError
// unwrapM recursively expands underlying errors into a caller-supplied map. // unwrapM recursively expands underlying errors into a caller-supplied map.
func (e *DependencyCureError) unwrapM(me map[unique.Handle[ID]]*CureError) { func (e *DependencyCureError) unwrapM(
ctx context.Context,
ir *IRCache,
me map[unique.Handle[ID]]*CureError,
) {
for _, err := range *e { for _, err := range *e {
if _, ok := me[err.Ident]; ok { if ctx.Err() != nil {
break
}
id := ir.Ident(err.A)
if _, ok := me[id]; ok {
continue continue
} }
if _e, ok := err.Err.(*DependencyCureError); ok { if _e, ok := err.Err.(*DependencyCureError); ok {
_e.unwrapM(me) _e.unwrapM(ctx, ir, me)
continue continue
} }
me[err.Ident] = err me[id] = err
} }
} }
// unwrap recursively expands and deduplicates underlying errors. // unwrap recursively expands and deduplicates underlying errors.
func (e *DependencyCureError) unwrap() DependencyCureError { func (e *DependencyCureError) unwrap(
ctx context.Context,
ir *IRCache,
) DependencyCureError {
me := make(map[unique.Handle[ID]]*CureError) me := make(map[unique.Handle[ID]]*CureError)
e.unwrapM(me) e.unwrapM(ctx, ir, me)
errs := slices.AppendSeq( type ent struct {
make(DependencyCureError, 0, len(me)), id unique.Handle[ID]
maps.Values(me), err *CureError
) }
errs := make([]*ent, 0, len(me))
for id, err := range me {
errs = append(errs, &ent{id, err})
}
var identBuf [2]ID var identBuf [2]ID
slices.SortFunc(errs, func(a, b *CureError) int { slices.SortFunc(errs, func(a, b *ent) int {
identBuf[0], identBuf[1] = a.Ident.Value(), b.Ident.Value() identBuf[0], identBuf[1] = a.id.Value(), b.id.Value()
return slices.Compare(identBuf[0][:], identBuf[1][:]) return slices.Compare(identBuf[0][:], identBuf[1][:])
}) })
return errs _errs := make(DependencyCureError, len(errs))
for i, v := range errs {
_errs[i] = v.err
}
return _errs
} }
// Unwrap returns a deduplicated slice of underlying errors. // Unwrap returns a deduplicated slice of underlying errors.
func (e *DependencyCureError) Unwrap() []error { func (e *DependencyCureError) Unwrap() []error {
errs := e.unwrap() ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt)
defer cancel()
errs := e.unwrap(ctx, NewIR())
_errs := make([]error, len(errs)) _errs := make([]error, len(errs))
for i, err := range errs { for i, err := range errs {
_errs[i] = err _errs[i] = err
@@ -1542,14 +1566,23 @@ func (e *DependencyCureError) Unwrap() []error {
// Error returns a user-facing multiline error message. // Error returns a user-facing multiline error message.
func (e *DependencyCureError) Error() string { func (e *DependencyCureError) Error() string {
errs := e.unwrap() ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt)
defer cancel()
ir := NewIR()
errs := e.unwrap(ctx, ir)
if len(errs) == 0 { if len(errs) == 0 {
return "invalid dependency cure outcome" return "invalid dependency cure outcome"
} }
var buf strings.Builder var buf strings.Builder
buf.WriteString("errors curing dependencies:") buf.WriteString("errors curing dependencies:")
for _, err := range errs { for _, err := range errs {
buf.WriteString("\n\t" + Encode(err.Ident.Value()) + ": " + err.Error()) buf.WriteString("\n\t" +
reportName(err.A, ir.Ident(err.A)) + ": " +
err.Error())
}
if ctx.Err() != nil {
buf.WriteString("\nerror resolution cancelled")
} }
return buf.String() return buf.String()
} }
@@ -2082,7 +2115,7 @@ func (pending *pendingArtifactDep) cure(c *Cache) {
} }
pending.errsMu.Lock() pending.errsMu.Lock()
*pending.errs = append(*pending.errs, &CureError{c.Ident(pending.a), err}) *pending.errs = append(*pending.errs, &CureError{pending.a, err})
pending.errsMu.Unlock() pending.errsMu.Unlock()
} }

View File

@@ -651,6 +651,15 @@ func TestCache(t *testing.T) {
"identifier", "identifier",
"cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe", "cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe",
) )
failingFile := newStubFile(
pkg.KindHTTPGet,
pkg.ID{0xff, 3},
nil,
nil, struct {
_ []byte
stub.UniqueError
}{UniqueError: 0xbad},
)
cureMany(t, c, []cureStep{ cureMany(t, c, []cureStep{
{"initial file", newStubFile( {"initial file", newStubFile(
@@ -732,22 +741,14 @@ func TestCache(t *testing.T) {
{"noncomparable error", &stubArtifactF{ {"noncomparable error", &stubArtifactF{
kind: pkg.KindExec, kind: pkg.KindExec,
params: []byte("artifact with dependency returning noncomparable error"), params: []byte("artifact with dependency returning noncomparable error"),
deps: []pkg.Artifact{newStubFile( deps: []pkg.Artifact{failingFile},
pkg.KindHTTPGet,
pkg.ID{0xff, 3},
nil,
nil, struct {
_ []byte
stub.UniqueError
}{UniqueError: 0xbad},
)},
cure: func(f *pkg.FContext) error { cure: func(f *pkg.FContext) error {
panic("attempting to cure impossible artifact") panic("attempting to cure impossible artifact")
}, },
}, nil, nil, &pkg.DependencyCureError{ }, nil, nil, &pkg.DependencyCureError{
{ {
Ident: unique.Make(pkg.ID{0xff, 3}), A: failingFile,
Err: struct { Err: struct {
_ []byte _ []byte
stub.UniqueError stub.UniqueError
@@ -1511,6 +1512,14 @@ errors during scrub:
func TestDependencyCureError(t *testing.T) { func TestDependencyCureError(t *testing.T) {
t.Parallel() t.Parallel()
makeIdent := func(ident ...byte) pkg.Artifact {
var a overrideIdent
copy(a.id[:], ident)
// does not compare equal
a.TrivialArtifact = new(stubArtifact)
return a
}
testCases := []struct { testCases := []struct {
name string name string
err pkg.DependencyCureError err pkg.DependencyCureError
@@ -1518,51 +1527,51 @@ func TestDependencyCureError(t *testing.T) {
unwrap []error unwrap []error
}{ }{
{"simple", pkg.DependencyCureError{ {"simple", pkg.DependencyCureError{
{Ident: unique.Make(pkg.ID{0xff, 9}), Err: stub.UniqueError(0xbad09)}, {A: makeIdent(0xff, 9), Err: stub.UniqueError(0xbad09)},
{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, {A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
{Ident: unique.Make(pkg.ID{0xff, 0xf}), Err: stub.UniqueError(0xbad0f)}, {A: makeIdent(0xff, 0xf), Err: stub.UniqueError(0xbad0f)},
{Ident: unique.Make(pkg.ID{0xff, 1}), Err: stub.UniqueError(0xbad01)}, {A: makeIdent(0xff, 1), Err: stub.UniqueError(0xbad01)},
}, `errors curing dependencies: }, `errors curing dependencies:
_wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765184 injected by the test suite _wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765184 injected by the test suite
_wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765185 injected by the test suite _wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765185 injected by the test suite
_wkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765193 injected by the test suite _wkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765193 injected by the test suite
_w8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765199 injected by the test suite`, []error{ _w8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765199 injected by the test suite`, []error{
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, &pkg.CureError{A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 1}), Err: stub.UniqueError(0xbad01)}, &pkg.CureError{A: makeIdent(0xff, 1), Err: stub.UniqueError(0xbad01)},
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 9}), Err: stub.UniqueError(0xbad09)}, &pkg.CureError{A: makeIdent(0xff, 9), Err: stub.UniqueError(0xbad09)},
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 0xf}), Err: stub.UniqueError(0xbad0f)}, &pkg.CureError{A: makeIdent(0xff, 0xf), Err: stub.UniqueError(0xbad0f)},
}}, }},
{"dedup", pkg.DependencyCureError{ {"dedup", pkg.DependencyCureError{
{Ident: unique.Make(pkg.ID{0xff, 9}), Err: stub.UniqueError(0xbad09)}, {A: makeIdent(0xff, 9), Err: stub.UniqueError(0xbad09)},
{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, {A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
{Ident: unique.Make(pkg.ID{0xff, 0xfd}), Err: &pkg.DependencyCureError{ {A: makeIdent(0xff, 0xfd), Err: &pkg.DependencyCureError{
{Ident: unique.Make(pkg.ID{0xff, 9}), Err: stub.UniqueError(0xbad09)}, {A: makeIdent(0xff, 9), Err: stub.UniqueError(0xbad09)},
{Ident: unique.Make(pkg.ID{0xff, 0xc}), Err: &pkg.DependencyCureError{ {A: makeIdent(0xff, 0xc), Err: &pkg.DependencyCureError{
{Ident: unique.Make(pkg.ID{0xff, 0xf}), Err: stub.UniqueError(0xbad0f)}, {A: makeIdent(0xff, 0xf), Err: stub.UniqueError(0xbad0f)},
{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, {A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
}}, }},
{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, {A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, {A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
}}, }},
{Ident: unique.Make(pkg.ID{0xff, 0xff}), Err: &pkg.DependencyCureError{ {A: makeIdent(0xff, 0xff), Err: &pkg.DependencyCureError{
{Ident: unique.Make(pkg.ID{0xff, 9}), Err: stub.UniqueError(0xbad09)}, {A: makeIdent(0xff, 9), Err: stub.UniqueError(0xbad09)},
{Ident: unique.Make(pkg.ID{0xff, 0xc}), Err: &pkg.DependencyCureError{ {A: makeIdent(0xff, 0xc), Err: &pkg.DependencyCureError{
{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, {A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
}}, }},
{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, {A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
}}, }},
{Ident: unique.Make(pkg.ID{0xff, 0xf}), Err: stub.UniqueError(0xbad0f)}, {A: makeIdent(0xff, 0xf), Err: stub.UniqueError(0xbad0f)},
{Ident: unique.Make(pkg.ID{0xff, 1}), Err: stub.UniqueError(0xbad01)}, {A: makeIdent(0xff, 1), Err: stub.UniqueError(0xbad01)},
}, `errors curing dependencies: }, `errors curing dependencies:
_wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765184 injected by the test suite _wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765184 injected by the test suite
_wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765185 injected by the test suite _wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765185 injected by the test suite
_wkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765193 injected by the test suite _wkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765193 injected by the test suite
_w8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765199 injected by the test suite`, []error{ _w8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA: unique error 765199 injected by the test suite`, []error{
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 0}), Err: stub.UniqueError(0xbad00)}, &pkg.CureError{A: makeIdent(0xff, 0), Err: stub.UniqueError(0xbad00)},
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 1}), Err: stub.UniqueError(0xbad01)}, &pkg.CureError{A: makeIdent(0xff, 1), Err: stub.UniqueError(0xbad01)},
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 9}), Err: stub.UniqueError(0xbad09)}, &pkg.CureError{A: makeIdent(0xff, 9), Err: stub.UniqueError(0xbad09)},
&pkg.CureError{Ident: unique.Make(pkg.ID{0xff, 0xf}), Err: stub.UniqueError(0xbad0f)}, &pkg.CureError{A: makeIdent(0xff, 0xf), Err: stub.UniqueError(0xbad0f)},
}}, }},
} }
for _, tc := range testCases { for _, tc := range testCases {
@@ -1615,9 +1624,10 @@ func BenchmarkEarlyDCE(b *testing.B) {
b.Fatalf("Cure: error = %v", err) b.Fatalf("Cure: error = %v", err)
} }
c.Close() c.Close()
dce := err.(*pkg.DependencyCureError)
for b.Loop() { for b.Loop() {
err.Error() dce.Unwrap()
} }
} }

View File

@@ -61,6 +61,7 @@ disable_test t7703-repack-geometric
disable_test t7002-mv-sparse-checkout disable_test t7002-mv-sparse-checkout
disable_test t1451-fsck-buffer disable_test t1451-fsck-buffer
disable_test t4104-apply-boundary disable_test t4104-apply-boundary
disable_test t4200-rerere
`, `,
Check: []string{ Check: []string{
"-C t", "-C t",