35 Commits

Author SHA1 Message Date
c4f64f7606 dist: include target in filename
All checks were successful
Test / Create distribution (push) Successful in 1m1s
Test / Sandbox (push) Successful in 2m43s
Test / Hakurei (push) Successful in 3m53s
Test / ShareFS (push) Successful in 4m3s
Test / Hpkg (push) Successful in 4m25s
Test / Sandbox (race detector) (push) Successful in 5m3s
Test / Hakurei (race detector) (push) Successful in 5m59s
Test / Flake checks (push) Successful in 1m44s
Backport patch will be removed in the next release.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-08 16:03:41 +09:00
a9e2a5e59f internal/rosa/go: 1.25.6 to 1.25.7
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m38s
Test / Hakurei (push) Successful in 3m54s
Test / ShareFS (push) Successful in 4m3s
Test / Hpkg (push) Successful in 4m40s
Test / Sandbox (race detector) (push) Successful in 5m2s
Test / Hakurei (race detector) (push) Successful in 5m58s
Test / Flake checks (push) Successful in 1m47s
Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-08 15:42:58 +09:00
9fb0722cdf internal/rosa/go: alternative bootstrap path
All checks were successful
Test / Create distribution (push) Successful in 59s
Test / Sandbox (push) Successful in 2m43s
Test / Hakurei (push) Successful in 3m57s
Test / ShareFS (push) Successful in 4m2s
Test / Hpkg (push) Successful in 4m31s
Test / Sandbox (race detector) (push) Successful in 5m2s
Test / Hakurei (race detector) (push) Successful in 5m59s
Test / Flake checks (push) Successful in 1m45s
For targets where the bootstrap toolchain is not available.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-08 15:04:03 +09:00
2f3e323c46 internal/rosa/gnu: gcc toolchain artifact
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m38s
Test / Hakurei (push) Successful in 4m0s
Test / ShareFS (push) Successful in 4m1s
Test / Hpkg (push) Successful in 4m34s
Test / Sandbox (race detector) (push) Successful in 5m2s
Test / Hakurei (race detector) (push) Successful in 5m57s
Test / Flake checks (push) Successful in 2m35s
This toolchain is hacked to pieces. It works well enough to bootstrap Go, though.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-08 01:00:15 +09:00
1fc9c3200f internal/rosa: libucontext artifact
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m45s
Test / Hakurei (push) Successful in 3m51s
Test / ShareFS (push) Successful in 4m2s
Test / Hpkg (push) Successful in 4m37s
Test / Sandbox (race detector) (push) Successful in 5m6s
Test / Hakurei (race detector) (push) Successful in 4m11s
Test / Flake checks (push) Successful in 2m15s
Required by GCC on musl.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-07 22:33:12 +09:00
096a25ad3a cmd/mbf: dump IR of artifact presets
All checks were successful
Test / Create distribution (push) Successful in 59s
Test / Sandbox (push) Successful in 2m47s
Test / Hakurei (push) Successful in 3m49s
Test / ShareFS (push) Successful in 3m58s
Test / Hpkg (push) Successful in 4m37s
Test / Sandbox (race detector) (push) Successful in 5m0s
Test / Hakurei (race detector) (push) Successful in 6m11s
Test / Flake checks (push) Successful in 1m43s
This exposes IR outside test cases, useful for verifying correctness of alternative IR emitters.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-07 17:21:43 +09:00
ffd2f979fb internal/pkg: skip duplicate early
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m43s
Test / Hakurei (push) Successful in 3m50s
Test / ShareFS (push) Successful in 4m1s
Test / Hpkg (push) Successful in 4m44s
Test / Sandbox (race detector) (push) Successful in 5m9s
Test / Hakurei (race detector) (push) Successful in 5m55s
Test / Flake checks (push) Successful in 1m44s
This significantly increases IR generation performance.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-07 17:11:41 +09:00
31a8cc9b5c internal/rosa/gnu: binutils artifact
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m52s
Test / Hakurei (push) Successful in 3m57s
Test / ShareFS (push) Successful in 4m2s
Test / Hpkg (push) Successful in 4m33s
Test / Sandbox (race detector) (push) Successful in 5m7s
Test / Hakurei (race detector) (push) Successful in 6m6s
Test / Flake checks (push) Successful in 1m47s
Appears to be required by GCC? It complains with stuff installed by LLVM.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-07 14:02:23 +09:00
bb3f60fc74 internal/rosa/gnu: gmp, mpfr, mpc artifacts
All checks were successful
Test / Create distribution (push) Successful in 1m1s
Test / Sandbox (push) Successful in 2m50s
Test / Hakurei (push) Successful in 4m1s
Test / ShareFS (push) Successful in 4m6s
Test / Hpkg (push) Successful in 4m40s
Test / Sandbox (race detector) (push) Successful in 5m2s
Test / Hakurei (race detector) (push) Successful in 6m15s
Test / Flake checks (push) Successful in 1m58s
Required by GCC.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-07 13:55:40 +09:00
697c91e04d internal/rosa/cmake: expose earlier build script
All checks were successful
Test / Create distribution (push) Successful in 59s
Test / Sandbox (push) Successful in 2m37s
Test / Hakurei (push) Successful in 3m54s
Test / ShareFS (push) Successful in 3m58s
Test / Hpkg (push) Successful in 4m25s
Test / Sandbox (race detector) (push) Successful in 4m59s
Test / Hakurei (race detector) (push) Successful in 6m9s
Test / Flake checks (push) Successful in 1m36s
This allows for more flexible build setups.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-07 13:23:13 +09:00
3f7b8b4332 internal/rosa/git: git clone helper
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m40s
Test / Hakurei (push) Successful in 3m55s
Test / ShareFS (push) Successful in 3m59s
Test / Hpkg (push) Successful in 4m34s
Test / Sandbox (race detector) (push) Successful in 5m2s
Test / Hakurei (race detector) (push) Successful in 6m12s
Test / Flake checks (push) Successful in 1m45s
For obtaining sources of projects that stubbornly refuse to provide release tarballs.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-06 21:10:59 +09:00
fa94155f42 internal/rosa/etc: resolv.conf
All checks were successful
Test / Create distribution (push) Successful in 59s
Test / Sandbox (push) Successful in 2m47s
Test / Hakurei (push) Successful in 3m50s
Test / ShareFS (push) Successful in 4m4s
Test / Hpkg (push) Successful in 4m35s
Test / Sandbox (race detector) (push) Successful in 5m3s
Test / Hakurei (race detector) (push) Successful in 3m22s
Test / Flake checks (push) Successful in 1m50s
Required by programs that download from the internet in measured execArtifact.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-06 21:04:59 +09:00
233bd163fb internal/rosa/git: disable flaky test
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m38s
Test / Hakurei (push) Successful in 3m52s
Test / ShareFS (push) Successful in 4m1s
Test / Hpkg (push) Successful in 4m34s
Test / Sandbox (race detector) (push) Successful in 5m6s
Test / Hakurei (race detector) (push) Successful in 5m56s
Test / Flake checks (push) Successful in 1m45s
This fails intermittently.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-06 20:45:52 +09:00
f9b69c94bc internal/rosa/ssl: prefix CA paths
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m55s
Test / Hakurei (push) Successful in 4m13s
Test / ShareFS (push) Successful in 4m21s
Test / Hpkg (push) Successful in 4m54s
Test / Sandbox (race detector) (push) Successful in 5m13s
Test / Hakurei (race detector) (push) Successful in 6m8s
Test / Flake checks (push) Successful in 1m58s
This makes prefixes consistent with everything else since this will end up in the final Rosa OS image.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-06 20:41:58 +09:00
68aefa6d59 internal/rosa/openssl: fix paths
All checks were successful
Test / Create distribution (push) Successful in 58s
Test / Sandbox (push) Successful in 2m36s
Test / Hakurei (push) Successful in 3m47s
Test / ShareFS (push) Successful in 3m55s
Test / Hpkg (push) Successful in 4m35s
Test / Sandbox (race detector) (push) Successful in 4m59s
Test / Hakurei (race detector) (push) Successful in 6m15s
Test / Flake checks (push) Successful in 1m44s
Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-06 19:53:51 +09:00
159fd55dbb internal/rosa/ssl: fix dependencies
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m38s
Test / Hakurei (push) Successful in 3m45s
Test / ShareFS (push) Successful in 4m0s
Test / Hpkg (push) Successful in 4m30s
Test / Sandbox (race detector) (push) Successful in 5m2s
Test / Hakurei (race detector) (push) Successful in 5m58s
Test / Flake checks (push) Successful in 1m30s
These used to be provided by busybox.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 19:12:48 +09:00
ce6b3ff53b internal/rosa: unzip artifact
All checks were successful
Test / Create distribution (push) Successful in 1m1s
Test / Sandbox (push) Successful in 2m38s
Test / Hakurei (push) Successful in 3m59s
Test / ShareFS (push) Successful in 4m1s
Test / Hpkg (push) Successful in 4m30s
Test / Sandbox (race detector) (push) Successful in 5m3s
Test / Hakurei (race detector) (push) Successful in 5m55s
Test / Flake checks (push) Successful in 1m52s
Because the zip format is too awful and cannot be streamed anyway, supporting it natively comes with no benefit.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 19:10:32 +09:00
30afa0e2ab internal/rosa/git: compile with http support
All checks were successful
Test / Create distribution (push) Successful in 59s
Test / Sandbox (push) Successful in 2m37s
Test / ShareFS (push) Successful in 4m2s
Test / Hpkg (push) Successful in 4m36s
Test / Sandbox (race detector) (push) Successful in 4m57s
Test / Hakurei (race detector) (push) Successful in 6m16s
Test / Hakurei (push) Successful in 2m40s
Test / Flake checks (push) Successful in 1m40s
This should be able to fetch repositories deterministically.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 18:51:02 +09:00
9b751de078 internal/rosa/gnu: fix test suite flags
All checks were successful
Test / Create distribution (push) Successful in 1m1s
Test / Sandbox (push) Successful in 2m38s
Test / Hakurei (push) Successful in 3m53s
Test / ShareFS (push) Successful in 4m4s
Test / Hpkg (push) Successful in 4m31s
Test / Sandbox (race detector) (push) Successful in 5m10s
Test / Hakurei (race detector) (push) Successful in 5m58s
Test / Flake checks (push) Successful in 1m40s
This sets the correct flag and also avoids changing ident per system.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 18:29:47 +09:00
d77ad3bb6e internal/rosa: curl artifact
All checks were successful
Test / Create distribution (push) Successful in 1m29s
Test / Sandbox (push) Successful in 3m19s
Test / ShareFS (push) Successful in 5m36s
Test / Hakurei (push) Successful in 5m41s
Test / Sandbox (race detector) (push) Successful in 6m6s
Test / Hpkg (push) Successful in 6m5s
Test / Hakurei (race detector) (push) Successful in 6m57s
Test / Flake checks (push) Successful in 2m1s
Required for http support in git.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 18:15:16 +09:00
0142fc90b0 internal/rosa/make: post-configure script
All checks were successful
Test / Create distribution (push) Successful in 1m7s
Test / Sandbox (push) Successful in 2m53s
Test / Hakurei (push) Successful in 4m24s
Test / ShareFS (push) Successful in 4m29s
Test / Hpkg (push) Successful in 4m54s
Test / Sandbox (race detector) (push) Successful in 5m23s
Test / Hakurei (race detector) (push) Successful in 6m21s
Test / Flake checks (push) Successful in 2m31s
Required for some projects with broken build scripts.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 18:13:48 +09:00
3c9f7cfcd0 internal/rosa: libpsl artifact
All checks were successful
Test / Create distribution (push) Successful in 33s
Test / Sandbox (push) Successful in 1m46s
Test / Hakurei (push) Successful in 2m59s
Test / ShareFS (push) Successful in 3m2s
Test / Hpkg (push) Successful in 4m2s
Test / Sandbox (race detector) (push) Successful in 5m14s
Test / Hakurei (race detector) (push) Successful in 6m17s
Test / Flake checks (push) Successful in 2m28s
Required by curl.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 18:06:33 +09:00
a3526b3ceb internal/rosa: openssl artifact
All checks were successful
Test / Create distribution (push) Successful in 59s
Test / Sandbox (push) Successful in 2m38s
Test / Hakurei (push) Successful in 4m3s
Test / ShareFS (push) Successful in 4m2s
Test / Hpkg (push) Successful in 4m35s
Test / Sandbox (race detector) (push) Successful in 5m6s
Test / Hakurei (race detector) (push) Successful in 6m13s
Test / Flake checks (push) Successful in 1m52s
Optional for many programs.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 18:03:18 +09:00
6ad21e2288 internal/rosa: register custom artifacts
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m39s
Test / Hakurei (push) Successful in 3m56s
Test / ShareFS (push) Successful in 4m6s
Test / Hpkg (push) Successful in 4m44s
Test / Sandbox (race detector) (push) Successful in 5m8s
Test / Hakurei (race detector) (push) Successful in 6m22s
Test / Flake checks (push) Successful in 1m47s
This also encodes extra information for iana-etc.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 17:50:48 +09:00
27e2e3f996 internal/rosa/llvm: drop git dependency
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m43s
Test / Hakurei (push) Successful in 4m1s
Test / ShareFS (push) Successful in 4m0s
Test / Hpkg (push) Successful in 4m29s
Test / Sandbox (race detector) (push) Successful in 4m58s
Test / Hakurei (race detector) (push) Successful in 5m59s
Test / Flake checks (push) Successful in 1m57s
This was added quite early and has no effect. Remove it to avoid compiling git early.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 17:44:58 +09:00
e0c720681b internal/pkg: standardise artifact IR
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m41s
Test / Hakurei (push) Successful in 4m1s
Test / ShareFS (push) Successful in 4m1s
Test / Hpkg (push) Successful in 4m35s
Test / Sandbox (race detector) (push) Successful in 5m4s
Test / Hakurei (race detector) (push) Successful in 6m0s
Test / Flake checks (push) Successful in 1m46s
This should hopefully provide good separation between the artifact curing backend implementation and the (still work in progress) language. Making the IR parseable also guarantees uniqueness of the representation.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 08:24:09 +09:00
f982b13a59 internal/pkg: improve error resolution
All checks were successful
Test / Create distribution (push) Successful in 58s
Test / Sandbox (push) Successful in 2m35s
Test / Hakurei (push) Successful in 3m40s
Test / ShareFS (push) Successful in 3m54s
Test / Hpkg (push) Successful in 4m31s
Test / Sandbox (race detector) (push) Successful in 5m1s
Test / Hakurei (race detector) (push) Successful in 5m57s
Test / Flake checks (push) Successful in 1m33s
This was taking way too long for early failures.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-03 10:01:44 +09:00
443911ada1 internal/rosa: use stage3 mirror
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m47s
Test / Hakurei (push) Successful in 3m56s
Test / ShareFS (push) Successful in 3m59s
Test / Hpkg (push) Successful in 4m37s
Test / Sandbox (race detector) (push) Successful in 5m2s
Test / Hakurei (race detector) (push) Successful in 6m9s
Test / Flake checks (push) Successful in 1m56s
These get taken down periodically and causes way too many rebuilds when they are taken down. Use mirror until a more elegant solution is available.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-02 23:43:34 +09:00
d7a3706db3 internal/rosa/x: regenerate build system
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m33s
Test / Hakurei (push) Successful in 3m47s
Test / ShareFS (push) Successful in 3m57s
Test / Hpkg (push) Successful in 4m25s
Test / Sandbox (race detector) (push) Successful in 4m58s
Test / Hakurei (race detector) (push) Successful in 6m8s
Test / Flake checks (push) Successful in 1m35s
These come with 16-year-old scripts that do not understand aarch64 or really anything else relevant to Rosa OS.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-02 19:57:39 +09:00
3226dc44dc internal/rosa/gnu: libtool artifact
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m41s
Test / Hakurei (push) Successful in 3m54s
Test / ShareFS (push) Successful in 4m2s
Test / Hpkg (push) Successful in 4m37s
Test / Sandbox (race detector) (push) Successful in 5m5s
Test / Hakurei (race detector) (push) Successful in 6m14s
Test / Flake checks (push) Successful in 1m43s
Required when generating autotools build systems.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-02 19:52:08 +09:00
9f98d12ad8 internal/rosa/gnu: automake artifact
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m43s
Test / Hakurei (push) Successful in 3m56s
Test / ShareFS (push) Successful in 4m1s
Test / Hpkg (push) Successful in 4m42s
Test / Sandbox (race detector) (push) Successful in 5m3s
Test / Hakurei (race detector) (push) Successful in 5m56s
Test / Flake checks (push) Successful in 1m41s
This is very expensive. Avoid.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-02 18:49:18 +09:00
550e83dda9 internal/rosa/gnu: grep artifact
All checks were successful
Test / Create distribution (push) Successful in 1m0s
Test / Sandbox (push) Successful in 2m54s
Test / Hakurei (push) Successful in 4m28s
Test / ShareFS (push) Successful in 4m34s
Test / Hpkg (push) Successful in 5m3s
Test / Sandbox (race detector) (push) Successful in 5m36s
Test / Hakurei (race detector) (push) Successful in 6m53s
Test / Flake checks (push) Successful in 1m59s
Some GNU software do not like the grep in toybox.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-02 18:38:01 +09:00
7877b4e627 cmd/mbf: print extra stage3 information
All checks were successful
Test / Create distribution (push) Successful in 1m1s
Test / Sandbox (push) Successful in 2m49s
Test / Hakurei (push) Successful in 4m20s
Test / ShareFS (push) Successful in 4m37s
Test / Hpkg (push) Successful in 5m6s
Test / Sandbox (race detector) (push) Successful in 5m25s
Test / Hakurei (race detector) (push) Successful in 6m38s
Test / Flake checks (push) Successful in 1m59s
This includes ident of all three stages and the matching checksum if check is passing.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-02 18:33:16 +09:00
47ce6f5bd0 internal/rosa/llvm: conditionally add Rosa OS paths
All checks were successful
Test / Create distribution (push) Successful in 59s
Test / Sandbox (push) Successful in 2m56s
Test / ShareFS (push) Successful in 4m11s
Test / Hakurei (push) Successful in 4m23s
Test / Hpkg (push) Successful in 4m48s
Test / Sandbox (race detector) (push) Successful in 5m13s
Test / Hakurei (race detector) (push) Successful in 6m35s
Test / Flake checks (push) Successful in 2m6s
This change also moves rpath flags to a more appropriate method.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-02 07:29:35 +09:00
48f4ccba33 internal/rosa/llvm: add rosa vendor
All checks were successful
Test / Create distribution (push) Successful in 1m8s
Test / Sandbox (push) Successful in 2m52s
Test / Hakurei (push) Successful in 4m37s
Test / ShareFS (push) Successful in 4m41s
Test / Hpkg (push) Successful in 5m26s
Test / Sandbox (race detector) (push) Successful in 5m32s
Test / Hakurei (race detector) (push) Successful in 6m32s
Test / Flake checks (push) Successful in 1m39s
This cleans up checks specific to Rosa OS, and fixes stack overflow in llvm under certain conditions.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-01-31 22:47:13 +09:00
35 changed files with 2159 additions and 356 deletions

View File

@@ -113,6 +113,7 @@ func main() {
"stage3",
"Check for toolchain 3-stage non-determinism",
func(args []string) (err error) {
_, _, _, stage1 := (rosa.Std - 2).NewLLVM()
_, _, _, stage2 := (rosa.Std - 1).NewLLVM()
_, _, _, stage3 := rosa.Std.NewLLVM()
var (
@@ -120,6 +121,11 @@ func main() {
checksum [2]unique.Handle[pkg.Checksum]
)
if pathname, _, err = cache.Cure(stage1); err != nil {
return err
}
log.Println("stage1:", pathname)
if pathname, checksum[0], err = cache.Cure(stage2); err != nil {
return err
}
@@ -134,29 +140,60 @@ func main() {
Got: checksum[0].Value(),
Want: checksum[1].Value(),
}
} else {
log.Println(
"stage2 is identical to stage3",
"("+pkg.Encode(checksum[0].Value())+")",
)
}
return
},
)
c.NewCommand(
"cure",
"Cure the named artifact and show its path",
func(args []string) error {
if len(args) != 1 {
return errors.New("cure requires 1 argument")
}
if p, ok := rosa.ResolveName(args[0]); !ok {
return fmt.Errorf("unsupported artifact %q", args[0])
} else {
pathname, _, err := cache.Cure(rosa.Std.Load(p))
if err == nil {
log.Println(pathname)
{
var (
flagDump string
)
c.NewCommand(
"cure",
"Cure the named artifact and show its path",
func(args []string) error {
if len(args) != 1 {
return errors.New("cure requires 1 argument")
}
return err
}
},
)
if p, ok := rosa.ResolveName(args[0]); !ok {
return fmt.Errorf("unsupported artifact %q", args[0])
} else if flagDump == "" {
pathname, _, err := cache.Cure(rosa.Std.Load(p))
if err == nil {
log.Println(pathname)
}
return err
} else {
f, err := os.OpenFile(
flagDump,
os.O_WRONLY|os.O_CREATE|os.O_EXCL,
0644,
)
if err != nil {
return err
}
if err = cache.EncodeAll(f, rosa.Std.Load(p)); err != nil {
_ = f.Close()
return err
}
return f.Close()
}
},
).
Flag(
&flagDump,
"dump", command.StringFlag(""),
"Write IR to specified pathname and terminate",
)
}
c.MustParse(os.Args[1:], func(err error) {
if cache != nil {

2
dist/release.sh vendored
View File

@@ -1,7 +1,7 @@
#!/bin/sh -e
cd "$(dirname -- "$0")/.."
VERSION="${HAKUREI_VERSION:-untagged}"
pname="hakurei-${VERSION}"
pname="hakurei-${VERSION}-$(go env GOARCH)"
out="${DESTDIR:-dist}/${pname}"
echo '# Preparing distribution files.'

View File

@@ -28,14 +28,6 @@ type FlatEntry struct {
| data []byte |
*/
// wordSize is the boundary which binary segments are always aligned to.
const wordSize = 8
// alignSize returns the padded size for aligning sz.
func alignSize(sz int) int {
return sz + (wordSize-(sz)%wordSize)%wordSize
}
// Encode encodes the entry for transmission or hashing.
func (ent *FlatEntry) Encode(w io.Writer) (n int, err error) {
pPathSize := alignSize(len(ent.Path))

View File

@@ -76,7 +76,7 @@ func TestFlatten(t *testing.T) {
"checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU": {Mode: 0400, Data: []byte("\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/00BNNr-PsNMtowTpEG86ZeI7eQKoD-pjSCPAal1e5MYqr_N7FLpyXKdXLXE8WEBF": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
"identifier/oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
@@ -86,10 +86,10 @@ func TestFlatten(t *testing.T) {
{Mode: 0400, Path: "checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU", Data: []byte("\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/00BNNr-PsNMtowTpEG86ZeI7eQKoD-pjSCPAal1e5MYqr_N7FLpyXKdXLXE8WEBF", Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_", Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("KkdL8x2a84V8iYZop5jSTyba54xSgf_NZ1R0c4nSp9xTdk3SK_zUKGhNJ2uK8wMY"), nil},
}, pkg.MustDecode("L_0RFHpr9JUS4Zp14rz2dESSRvfLzpvqsLhR1-YjQt8hYlmEdVl7vI3_-v8UNPKs"), nil},
{"sample directory step simple", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
@@ -208,8 +208,8 @@ func TestFlatten(t *testing.T) {
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/work": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/5-NfJKBlUgVhAP_JeCjjo1UxF72x8QVMgeKPWd8s0J-RYvRJy6veTQDwmgxOvr6v": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
"identifier/VWakDFDSjNMvdzwxG0Y1IKFdCzExgiQnjg-vv2srsZObwh-5WOJx7H5HtCgDXHcq": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
"identifier/rg7F1D5hwv6o4xctjD5zDq4i5MD0mArTsUIWfhUbik8xC6Bsyt3mjXXOm3goojTz": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -230,12 +230,12 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/work"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/5-NfJKBlUgVhAP_JeCjjo1UxF72x8QVMgeKPWd8s0J-RYvRJy6veTQDwmgxOvr6v", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/VWakDFDSjNMvdzwxG0Y1IKFdCzExgiQnjg-vv2srsZObwh-5WOJx7H5HtCgDXHcq", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/rg7F1D5hwv6o4xctjD5zDq4i5MD0mArTsUIWfhUbik8xC6Bsyt3mjXXOm3goojTz", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("nnOiyjjjvgZChsGtO4rA1JHckwYBBbxwNfecPJp62OFP6aoYUxHQ5UtYsrDpnwan"), nil},
}, pkg.MustDecode("NQTlc466JmSVLIyWklm_u8_g95jEEb98PxJU-kjwxLpfdjwMWJq0G8ze9R4Vo1Vu"), nil},
{"sample tar expand step unpack", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
@@ -255,8 +255,8 @@ func TestFlatten(t *testing.T) {
"checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/VWakDFDSjNMvdzwxG0Y1IKFdCzExgiQnjg-vv2srsZObwh-5WOJx7H5HtCgDXHcq": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
"identifier/v3z1m-ofUqJz4_rasXRlTw5NgKk63RLvd5JKBpDeiNaYiUKSGN5KJbJGJHMt7cTf": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
"identifier/_v1blm2h-_KA-dVaawdpLas6MjHc6rbhhFS8JWwx8iJxZGUu8EBbRrhr5AaZ9PJL": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -268,12 +268,12 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeSymlink | 0777, Path: "checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN/libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/VWakDFDSjNMvdzwxG0Y1IKFdCzExgiQnjg-vv2srsZObwh-5WOJx7H5HtCgDXHcq", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/v3z1m-ofUqJz4_rasXRlTw5NgKk63RLvd5JKBpDeiNaYiUKSGN5KJbJGJHMt7cTf", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_v1blm2h-_KA-dVaawdpLas6MjHc6rbhhFS8JWwx8iJxZGUu8EBbRrhr5AaZ9PJL", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("bQVH19N7dX50SdQ6JNVYbFdDZV4t8IaM4dhxGvjACpdoEgJ2jZJfYKLH4ya7ZD_s"), nil},
}, pkg.MustDecode("hSoSSgCYTNonX3Q8FjvjD1fBl-E-BQyA6OTXro2OadXqbST4tZ-akGXszdeqphRe"), nil},
{"testtool", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
@@ -295,9 +295,9 @@ func TestFlatten(t *testing.T) {
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/LRxdkRYNKnZT6bKiu5W8ATeAAmq3n_5AAJkF6G0EpAOEloiZvADJBkfixgtgF1Z9": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -311,13 +311,13 @@ func TestFlatten(t *testing.T) {
{Mode: 0400, Path: "checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb", Data: []byte{}},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/LRxdkRYNKnZT6bKiu5W8ATeAAmq3n_5AAJkF6G0EpAOEloiZvADJBkfixgtgF1Z9", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("BQb5SCAo0Rw0YBGqjGemK1jH3jk0cgxAQ-JyeqVRqaqmEevJ3jtXNL8HB470XNnB"), nil},
}, pkg.MustDecode("Q5DluWQCAeohLoiGRImurwFp3vdz9IfQCoj7Fuhh73s4KQPRHpEQEnHTdNHmB8Fx"), nil},
{"testtool net", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
@@ -339,9 +339,9 @@ func TestFlatten(t *testing.T) {
"checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W/check": {Mode: 0400, Data: []byte("net")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/TAspufRsG2I_TsxUUj2b7bUnCHgcVSdh6aOZpzL0W5Bjn4EZmOGzjofaOWd8J11H": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
"identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -355,13 +355,13 @@ func TestFlatten(t *testing.T) {
{Mode: 0400, Path: "checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W/check", Data: []byte("net")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/TAspufRsG2I_TsxUUj2b7bUnCHgcVSdh6aOZpzL0W5Bjn4EZmOGzjofaOWd8J11H", Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3", Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("hJ7nCMLea_09Z7Fi4ALXOgubMNwK7C61THdQobpQJhH3tnr7PJ86aY98Mte3rBje"), nil},
}, pkg.MustDecode("bPYvvqxpfV7xcC1EptqyKNK1klLJgYHMDkzBcoOyK6j_Aj5hb0mXNPwTwPSK5F6Z"), nil},
{"sample exec container overlay root", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
@@ -372,8 +372,8 @@ func TestFlatten(t *testing.T) {
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/UB9HPeMgMPJf3Ut4jLWwnCtu_P3Lr29i8Erf084bHe8jjzBMKPDNxQ3RMrirkH6H": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -386,12 +386,12 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/UB9HPeMgMPJf3Ut4jLWwnCtu_P3Lr29i8Erf084bHe8jjzBMKPDNxQ3RMrirkH6H", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("PAZyVTxxROg4eCQX3yKuiIlB1k9VFXmtvvyy7QxoqaFhYwGZpT4wYec4R2dTtfyh"), nil},
}, pkg.MustDecode("PO2DSSCa4yoSgEYRcCSZfQfwow1yRigL3Ry-hI0RDI4aGuFBha-EfXeSJnG_5_Rl"), nil},
{"sample exec container overlay work", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
@@ -402,8 +402,8 @@ func TestFlatten(t *testing.T) {
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/Fud5ldJfpsgLt-rkLWrLO-aVYhQm-esTswetjxydPeQMK4jHNJ_1fGHVahaiCZ9y": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -416,12 +416,12 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/Fud5ldJfpsgLt-rkLWrLO-aVYhQm-esTswetjxydPeQMK4jHNJ_1fGHVahaiCZ9y", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("uMZyWOQGjhI1oNKfJyw8I6EtUmWkOsZNeUEZLjy1lmkAV7cR1hmOKsOlXs4RkuEC"), nil},
}, pkg.MustDecode("iaRt6l_Wm2n-h5UsDewZxQkCmjZjyL8r7wv32QT2kyV55-Lx09Dq4gfg9BiwPnKs"), nil},
{"sample exec container multiple layers", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
@@ -435,10 +435,10 @@ func TestFlatten(t *testing.T) {
"checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK/check": {Mode: 0400, Data: []byte("layers")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/YK1yDoi_qaUuXSPeVLJnaL8CBuZC4LoCarId5vdBCTLU82-vZFIfLDlaJuLM1iBj": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/g6gj2JWNXN-oNikou626vDqcMeZCn_TcV4xKuizBaPAWcasG2sVvItb5kZovMrzE": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/B-kc5iJMx8GtlCua4dz6BiJHnDAOUfPjgpbKq4e-QEn0_CZkSYs3fOA1ve06qMs2": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
"identifier/p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -454,14 +454,14 @@ func TestFlatten(t *testing.T) {
{Mode: 0400, Path: "checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK/check", Data: []byte("layers")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/YK1yDoi_qaUuXSPeVLJnaL8CBuZC4LoCarId5vdBCTLU82-vZFIfLDlaJuLM1iBj", Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/B-kc5iJMx8GtlCua4dz6BiJHnDAOUfPjgpbKq4e-QEn0_CZkSYs3fOA1ve06qMs2", Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/g6gj2JWNXN-oNikou626vDqcMeZCn_TcV4xKuizBaPAWcasG2sVvItb5kZovMrzE", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("OG6C_fL-U4dZndkiKJvXf31qrM7DNpmCGxbWASwhWK_e8twIwC_ZvMvw142pVqz-"), nil},
}, pkg.MustDecode("O2YzyR7IUGU5J2CADy0hUZ3A5NkP_Vwzs4UadEdn2oMZZVWRtH0xZGJ3HXiimTnZ"), nil},
{"sample exec container layer promotion", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
@@ -472,9 +472,9 @@ func TestFlatten(t *testing.T) {
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/CuKcA4aAApOvWqI6-KzZEDyGLltRdBjOTyrTibam2fFVxtXmGL_RVuElOFTVlKfq": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/ywzI31S5McuYu7vzI2kqpSC_nsNzpWBXVCwPoLAYi9QVT0mODgzqoo9jYYaczPbf": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/kvJIqZo5DKFOxC2ZQ-8_nPaQzEAz9cIm3p6guO-uLqm-xaiPu7oRkSnsu411jd_U": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
@@ -487,13 +487,13 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/CuKcA4aAApOvWqI6-KzZEDyGLltRdBjOTyrTibam2fFVxtXmGL_RVuElOFTVlKfq", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/cGvuAdKA2tThRa4w3ZI2c5S5TXDx4j2qvfFM7pzf7y5vBz7NisBV06ThBXw_97xK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/ywzI31S5McuYu7vzI2kqpSC_nsNzpWBXVCwPoLAYi9QVT0mODgzqoo9jYYaczPbf", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/kvJIqZo5DKFOxC2ZQ-8_nPaQzEAz9cIm3p6guO-uLqm-xaiPu7oRkSnsu411jd_U", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("CKt376G_wCHRR26hBxLnoXyz5boEOX12AiHbQM2qz4bejKtfmys3Swqh60eKFn9y"), nil},
}, pkg.MustDecode("3EaW6WibLi9gl03_UieiFPaFcPy5p4x3JPxrnLJxGaTI-bh3HU9DK9IMx7c3rrNm"), nil},
{"sample file short", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
@@ -502,7 +502,7 @@ func TestFlatten(t *testing.T) {
"checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX": {Mode: 0400, Data: []byte{0}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/3nNZXzfgfDW2aHqmgf1VpJUYxe1GMzU7eA9Q_NnNVTDPpPOCTYKEVX-yscOiLT-e": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
"identifier/3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
@@ -511,10 +511,10 @@ func TestFlatten(t *testing.T) {
{Mode: 0400, Path: "checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX", Data: []byte{0}},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/3nNZXzfgfDW2aHqmgf1VpJUYxe1GMzU7eA9Q_NnNVTDPpPOCTYKEVX-yscOiLT-e", Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi", Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("azjTvqkTjLhFzvPDM4DEHiHyLupOnRq9GCikVN6DEElR1Gxz_BDo4SA0zZzaYUGa"), nil},
}, pkg.MustDecode("iR6H5OIsyOW4EwEgtm9rGzGF6DVtyHLySEtwnFE8bnus9VJcoCbR4JIek7Lw-vwT"), nil},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {

View File

@@ -103,8 +103,7 @@ type execArtifact struct {
args []string
// Duration the initial process is allowed to run. The zero value is
// equivalent to execTimeoutDefault. This value is never encoded in Params
// because it cannot affect outcome.
// equivalent to [ExecTimeoutDefault].
timeout time.Duration
// Caller-supplied exclusivity value, returned as is by IsExclusive.
@@ -129,12 +128,6 @@ func (a *execNetArtifact) Checksum() Checksum { return a.checksum }
// Kind returns the hardcoded [Kind] constant.
func (*execNetArtifact) Kind() Kind { return KindExecNet }
// Params is [Checksum] concatenated with [KindExec] params.
func (a *execNetArtifact) Params(ctx *IContext) {
ctx.GetHash().Write(a.checksum[:])
a.execArtifact.Params(ctx)
}
// Cure cures the [Artifact] in the container described by the caller. The
// container retains host networking.
func (a *execNetArtifact) Cure(f *FContext) error {
@@ -198,38 +191,131 @@ func (*execArtifact) Kind() Kind { return KindExec }
// Params writes paths, executable pathname and args.
func (a *execArtifact) Params(ctx *IContext) {
h := ctx.GetHash()
ctx.WriteString(a.name)
_0, _1 := []byte{0}, []byte{1}
ctx.WriteUint32(uint32(len(a.paths)))
for _, p := range a.paths {
if p.W {
h.Write(_1)
} else {
h.Write(_0)
}
if p.P != nil {
h.Write([]byte(p.P.String()))
ctx.WriteString(p.P.String())
} else {
h.Write([]byte("invalid P\x00"))
ctx.WriteString("invalid P\x00")
}
h.Write(_0)
ctx.WriteUint32(uint32(len(p.A)))
for _, d := range p.A {
ctx.WriteIdent(d)
}
h.Write(_0)
if p.W {
ctx.WriteUint32(1)
} else {
ctx.WriteUint32(0)
}
}
h.Write(_0)
h.Write([]byte(a.dir.String()))
h.Write(_0)
ctx.WriteString(a.dir.String())
ctx.WriteUint32(uint32(len(a.env)))
for _, e := range a.env {
h.Write([]byte(e))
ctx.WriteString(e)
}
h.Write(_0)
h.Write([]byte(a.path.String()))
h.Write(_0)
ctx.WriteString(a.path.String())
ctx.WriteUint32(uint32(len(a.args)))
for _, arg := range a.args {
h.Write([]byte(arg))
ctx.WriteString(arg)
}
ctx.WriteUint32(uint32(a.timeout & 0xffffffff))
ctx.WriteUint32(uint32(a.timeout >> 32))
if a.exclusive {
ctx.WriteUint32(1)
} else {
ctx.WriteUint32(0)
}
}
// readExecArtifact interprets IR values and returns the address of execArtifact
// or execNetArtifact.
func readExecArtifact(r *IRReader, net bool) Artifact {
r.DiscardAll()
name := r.ReadString()
sz := r.ReadUint32()
if sz > irMaxDeps {
panic(ErrIRDepend)
}
paths := make([]ExecPath, sz)
for i := range paths {
paths[i].P = check.MustAbs(r.ReadString())
sz = r.ReadUint32()
if sz > irMaxDeps {
panic(ErrIRDepend)
}
paths[i].A = make([]Artifact, sz)
for j := range paths[i].A {
paths[i].A[j] = r.ReadIdent()
}
paths[i].W = r.ReadUint32() != 0
}
dir := check.MustAbs(r.ReadString())
sz = r.ReadUint32()
if sz > irMaxValues {
panic(ErrIRValues)
}
env := make([]string, sz)
for i := range env {
env[i] = r.ReadString()
}
pathname := check.MustAbs(r.ReadString())
sz = r.ReadUint32()
if sz > irMaxValues {
panic(ErrIRValues)
}
args := make([]string, sz)
for i := range args {
args[i] = r.ReadString()
}
timeout := time.Duration(r.ReadUint32())
timeout |= time.Duration(r.ReadUint32()) << 32
exclusive := r.ReadUint32() != 0
checksum, ok := r.Finalise()
var checksumP *Checksum
if net {
if !ok {
panic(ErrExpectedChecksum)
}
checksumVal := checksum.Value()
checksumP = &checksumVal
} else {
if ok {
panic(ErrUnexpectedChecksum)
}
}
return NewExec(
name, checksumP, timeout, exclusive, dir, env, pathname, args, paths...,
)
}
func init() {
register(KindExec,
func(r *IRReader) Artifact { return readExecArtifact(r, false) })
register(KindExecNet,
func(r *IRReader) Artifact { return readExecArtifact(r, true) })
}
// Dependencies returns a slice of all artifacts collected from caller-supplied

View File

@@ -78,7 +78,7 @@ func TestExec(t *testing.T) {
), nil, pkg.Checksum{}, &pkg.DependencyCureError{
{
Ident: unique.Make(pkg.ID(pkg.MustDecode(
"zegItlAz7Lr1xFyzCtHTz_eL08KNuccT3S8b7zqekz3lZxqdELdqTlGjvUYPVykW",
"Sowo6oZRmG6xVtUaxB6bDWZhVsqAJsIJWUp0OPKlE103cY0lodx7dem8J-qQF0Z1",
))),
Err: stub.UniqueError(0xcafe),
},
@@ -109,7 +109,7 @@ func TestExec(t *testing.T) {
}
testtoolDestroy(t, base, c)
}, pkg.MustDecode("BQb5SCAo0Rw0YBGqjGemK1jH3jk0cgxAQ-JyeqVRqaqmEevJ3jtXNL8HB470XNnB")},
}, pkg.MustDecode("Q5DluWQCAeohLoiGRImurwFp3vdz9IfQCoj7Fuhh73s4KQPRHpEQEnHTdNHmB8Fx")},
{"net", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
c.SetStrict(true)
@@ -144,7 +144,7 @@ func TestExec(t *testing.T) {
})
testtoolDestroy(t, base, c)
}, pkg.MustDecode("hJ7nCMLea_09Z7Fi4ALXOgubMNwK7C61THdQobpQJhH3tnr7PJ86aY98Mte3rBje")},
}, pkg.MustDecode("bPYvvqxpfV7xcC1EptqyKNK1klLJgYHMDkzBcoOyK6j_Aj5hb0mXNPwTwPSK5F6Z")},
{"overlay root", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
c.SetStrict(true)
@@ -170,7 +170,7 @@ func TestExec(t *testing.T) {
})
testtoolDestroy(t, base, c)
}, pkg.MustDecode("PAZyVTxxROg4eCQX3yKuiIlB1k9VFXmtvvyy7QxoqaFhYwGZpT4wYec4R2dTtfyh")},
}, pkg.MustDecode("PO2DSSCa4yoSgEYRcCSZfQfwow1yRigL3Ry-hI0RDI4aGuFBha-EfXeSJnG_5_Rl")},
{"overlay work", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
c.SetStrict(true)
@@ -201,7 +201,7 @@ func TestExec(t *testing.T) {
})
testtoolDestroy(t, base, c)
}, pkg.MustDecode("uMZyWOQGjhI1oNKfJyw8I6EtUmWkOsZNeUEZLjy1lmkAV7cR1hmOKsOlXs4RkuEC")},
}, pkg.MustDecode("iaRt6l_Wm2n-h5UsDewZxQkCmjZjyL8r7wv32QT2kyV55-Lx09Dq4gfg9BiwPnKs")},
{"multiple layers", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
c.SetStrict(true)
@@ -254,7 +254,7 @@ func TestExec(t *testing.T) {
})
testtoolDestroy(t, base, c)
}, pkg.MustDecode("OG6C_fL-U4dZndkiKJvXf31qrM7DNpmCGxbWASwhWK_e8twIwC_ZvMvw142pVqz-")},
}, pkg.MustDecode("O2YzyR7IUGU5J2CADy0hUZ3A5NkP_Vwzs4UadEdn2oMZZVWRtH0xZGJ3HXiimTnZ")},
{"overlay layer promotion", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
c.SetStrict(true)
@@ -286,7 +286,7 @@ func TestExec(t *testing.T) {
})
testtoolDestroy(t, base, c)
}, pkg.MustDecode("CKt376G_wCHRR26hBxLnoXyz5boEOX12AiHbQM2qz4bejKtfmys3Swqh60eKFn9y")},
}, pkg.MustDecode("3EaW6WibLi9gl03_UieiFPaFcPy5p4x3JPxrnLJxGaTI-bh3HU9DK9IMx7c3rrNm")},
})
}

View File

@@ -25,6 +25,12 @@ var _ KnownChecksum = new(fileArtifactNamed)
// String returns the caller-supplied reporting name.
func (a *fileArtifactNamed) String() string { return a.name }
// Params writes the caller-supplied reporting name and the file body.
func (a *fileArtifactNamed) Params(ctx *IContext) {
ctx.WriteString(a.name)
ctx.Write(a.fileArtifact)
}
// NewFile returns a [FileArtifact] that cures into a caller-supplied byte slice.
//
// Caller must not modify data after NewFile returns.
@@ -39,8 +45,22 @@ func NewFile(name string, data []byte) FileArtifact {
// Kind returns the hardcoded [Kind] constant.
func (*fileArtifact) Kind() Kind { return KindFile }
// Params writes the result of Cure.
func (a *fileArtifact) Params(ctx *IContext) { ctx.GetHash().Write(*a) }
// Params writes an empty string and the file body.
func (a *fileArtifact) Params(ctx *IContext) {
ctx.WriteString("")
ctx.Write(*a)
}
func init() {
register(KindFile, func(r *IRReader) Artifact {
name := r.ReadString()
data := r.ReadStringBytes()
if _, ok := r.Finalise(); !ok {
panic(ErrExpectedChecksum)
}
return NewFile(name, data)
})
}
// Dependencies returns a nil slice.
func (*fileArtifact) Dependencies() []Artifact { return nil }

View File

@@ -17,13 +17,13 @@ func TestFile(t *testing.T) {
cureMany(t, c, []cureStep{
{"short", pkg.NewFile("null", []byte{0}), base.Append(
"identifier",
"3nNZXzfgfDW2aHqmgf1VpJUYxe1GMzU7eA9Q_NnNVTDPpPOCTYKEVX-yscOiLT-e",
"3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi",
), pkg.MustDecode(
"vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX",
), nil},
})
}, pkg.MustDecode(
"azjTvqkTjLhFzvPDM4DEHiHyLupOnRq9GCikVN6DEElR1Gxz_BDo4SA0zZzaYUGa",
"iR6H5OIsyOW4EwEgtm9rGzGF6DVtyHLySEtwnFE8bnus9VJcoCbR4JIek7Lw-vwT",
)},
})
}

762
internal/pkg/ir.go Normal file
View File

@@ -0,0 +1,762 @@
package pkg
import (
"bufio"
"bytes"
"context"
"crypto/sha512"
"encoding/binary"
"errors"
"fmt"
"io"
"slices"
"strconv"
"syscall"
"unique"
"unsafe"
)
// wordSize is the boundary which binary segments are always aligned to.
const wordSize = 8
// alignSize returns the padded size for aligning sz.
func alignSize(sz int) int {
return sz + (wordSize-(sz)%wordSize)%wordSize
}
// panicToError recovers from a panic and replaces a nil error with the panicked
// error value. If the value does not implement error, it is re-panicked.
func panicToError(errP *error) {
r := recover()
if r == nil {
return
}
if err, ok := r.(error); !ok {
panic(r)
} else if *errP == nil {
*errP = err
}
}
// IContext is passed to [Artifact.Params] and provides methods for writing
// values to the IR writer. It does not expose the underlying [io.Writer].
//
// IContext is valid until [Artifact.Params] returns.
type IContext struct {
// Address of underlying [Cache], should be zeroed or made unusable after
// [Artifact.Params] returns and must not be exposed directly.
cache *Cache
// Written to by various methods, should be zeroed after [Artifact.Params]
// returns and must not be exposed directly.
w io.Writer
}
// Unwrap returns the underlying [context.Context].
func (i *IContext) Unwrap() context.Context { return i.cache.ctx }
// irZero is a zero IR word.
var irZero [wordSize]byte
// IRValueKind denotes the kind of encoded value.
type IRValueKind uint32
const (
// IRKindEnd denotes the end of the current parameters stream. The ancillary
// value is interpreted as [IREndFlag].
IRKindEnd IRValueKind = iota
// IRKindIdent denotes the identifier of a dependency [Artifact]. The
// ancillary value is reserved for future use.
IRKindIdent
// IRKindUint32 denotes an inlined uint32 value.
IRKindUint32
// IRKindString denotes a string with its true length encoded in header
// ancillary data. Its wire length is always aligned to 8 byte boundary.
IRKindString
irHeaderShift = 32
irHeaderMask = 0xffffffff
)
// String returns a user-facing name of k.
func (k IRValueKind) String() string {
switch k {
case IRKindEnd:
return "terminator"
case IRKindIdent:
return "ident"
case IRKindUint32:
return "uint32"
case IRKindString:
return "string"
default:
return "invalid kind " + strconv.Itoa(int(k))
}
}
// irValueHeader encodes [IRValueKind] and a 32-bit ancillary value.
type irValueHeader uint64
// encodeHeader returns irValueHeader encoding [IRValueKind] and ancillary data.
func (k IRValueKind) encodeHeader(v uint32) irValueHeader {
return irValueHeader(v)<<irHeaderShift | irValueHeader(k)
}
// put stores h in b[0:8].
func (h irValueHeader) put(b []byte) {
binary.LittleEndian.PutUint64(b[:], uint64(h))
}
// append appends the bytes of h to b and returns the appended slice.
func (h irValueHeader) append(b []byte) []byte {
return binary.LittleEndian.AppendUint64(b, uint64(h))
}
// IREndFlag is ancillary data encoded in the header of an [IRKindEnd] value and
// specifies the presence of optional fields in the remaining [IRKindEnd] data.
// Order of present fields is the order of their corresponding constants defined
// below.
type IREndFlag uint32
const (
// IREndKnownChecksum denotes a [KnownChecksum] artifact. For an [IRKindEnd]
// value with this flag set, the remaining data contains the [Checksum].
IREndKnownChecksum IREndFlag = 1 << iota
)
// mustWrite writes to IContext.w and panics on error. The panic is recovered
// from by the caller and used as the return value.
func (i *IContext) mustWrite(p []byte) {
if _, err := i.w.Write(p); err != nil {
panic(err)
}
}
// WriteIdent writes the identifier of [Artifact] to the IR. The behaviour of
// WriteIdent is not defined for an [Artifact] not part of the slice returned by
// [Artifact.Dependencies].
func (i *IContext) WriteIdent(a Artifact) {
buf := i.cache.getIdentBuf()
defer i.cache.putIdentBuf(buf)
IRKindIdent.encodeHeader(0).put(buf[:])
*(*ID)(buf[wordSize:]) = i.cache.Ident(a).Value()
i.mustWrite(buf[:])
}
// WriteUint32 writes a uint32 value to the IR.
func (i *IContext) WriteUint32(v uint32) {
i.mustWrite(IRKindUint32.encodeHeader(v).append(nil))
}
// irMaxStringLength is the maximum acceptable wire size of [IRKindString].
const irMaxStringLength = 1 << 20
// IRStringError is a string value too big to encode in IR.
type IRStringError string
func (IRStringError) Error() string {
return "params value too big to encode in IR"
}
// Write writes p as a string value to the IR.
func (i *IContext) Write(p []byte) {
sz := alignSize(len(p))
if len(p) > irMaxStringLength || sz > irMaxStringLength {
panic(IRStringError(p))
}
i.mustWrite(IRKindString.encodeHeader(uint32(len(p))).append(nil))
i.mustWrite(p)
psz := sz - len(p)
if psz > 0 {
i.mustWrite(irZero[:psz])
}
}
// WriteString writes s as a string value to the IR.
func (i *IContext) WriteString(s string) {
p := unsafe.Slice(unsafe.StringData(s), len(s))
i.Write(p)
}
// Encode writes a deterministic, efficient representation of a to w and returns
// the first non-nil error encountered while writing to w.
func (c *Cache) Encode(w io.Writer, a Artifact) (err error) {
deps := a.Dependencies()
idents := make([]*extIdent, len(deps))
for i, d := range deps {
dbuf, did := c.unsafeIdent(d, true)
if dbuf == nil {
dbuf = c.getIdentBuf()
binary.LittleEndian.PutUint64(dbuf[:], uint64(d.Kind()))
*(*ID)(dbuf[wordSize:]) = did.Value()
} else {
c.storeIdent(d, dbuf)
}
defer c.putIdentBuf(dbuf)
idents[i] = dbuf
}
slices.SortFunc(idents, func(a, b *extIdent) int {
return bytes.Compare(a[:], b[:])
})
idents = slices.CompactFunc(idents, func(a, b *extIdent) bool {
return *a == *b
})
// kind uint64 | deps_sz uint64
var buf [wordSize * 2]byte
binary.LittleEndian.PutUint64(buf[:], uint64(a.Kind()))
binary.LittleEndian.PutUint64(buf[wordSize:], uint64(len(idents)))
if _, err = w.Write(buf[:]); err != nil {
return
}
for _, dn := range idents {
// kind uint64 | ident ID
if _, err = w.Write(dn[:]); err != nil {
return
}
}
func() {
i := IContext{c, w}
defer panicToError(&err)
defer func() { i.cache, i.w = nil, nil }()
a.Params(&i)
}()
if err != nil {
return
}
var f IREndFlag
kcBuf := c.getIdentBuf()
sz := wordSize
if kc, ok := a.(KnownChecksum); ok {
f |= IREndKnownChecksum
*(*Checksum)(kcBuf[wordSize:]) = kc.Checksum()
sz += len(Checksum{})
}
IRKindEnd.encodeHeader(uint32(f)).put(kcBuf[:])
_, err = w.Write(kcBuf[:sz])
c.putIdentBuf(kcBuf)
return
}
// encodeAll implements EncodeAll by recursively encoding dependencies and
// performs deduplication by value via the encoded map.
func (c *Cache) encodeAll(
w io.Writer,
a Artifact,
encoded map[Artifact]struct{},
) (err error) {
if _, ok := encoded[a]; ok {
return
}
for _, d := range a.Dependencies() {
if err = c.encodeAll(w, d, encoded); err != nil {
return
}
}
encoded[a] = struct{}{}
return c.Encode(w, a)
}
// EncodeAll writes a self-describing IR stream of a to w and returns the first
// non-nil error encountered while writing to w.
//
// EncodeAll tries to avoid encoding the same [Artifact] more than once, however
// it will fail to do so if they do not compare equal by value, as that will
// require buffering and greatly reduce performance. It is therefore up to the
// caller to avoid causing dependencies to be represented in a way such that
// two equivalent artifacts do not compare equal. While an IR stream with
// repeated artifacts is valid, it is somewhat inefficient, and the reference
// [IRDecoder] implementation produces a warning for it.
//
// Note that while EncodeAll makes use of the ident free list, it does not use
// the ident cache, nor does it contribute identifiers it computes back to the
// ident cache. Because of this, multiple invocations of EncodeAll will have
// similar cost and does not amortise when combined with a call to Cure.
func (c *Cache) EncodeAll(w io.Writer, a Artifact) error {
return c.encodeAll(w, a, make(map[Artifact]struct{}))
}
// ErrRemainingIR is returned for a [IRReadFunc] that failed to call
// [IRReader.Finalise] before returning.
var ErrRemainingIR = errors.New("implementation did not consume final value")
// DanglingIdentError is an identifier in a [IRKindIdent] value that was never
// described in the IR stream before it was encountered.
type DanglingIdentError unique.Handle[ID]
func (e DanglingIdentError) Error() string {
return "artifact " + Encode(unique.Handle[ID](e).Value()) +
" was never described"
}
type (
// IRDecoder decodes [Artifact] from an IR stream. The stream is read to
// EOF and the final [Artifact] is returned. Previous artifacts may be
// looked up by their identifier.
//
// An [Artifact] may appear more than once in the same IR stream. A
// repeating [Artifact] generates a warning via [Cache] and will appear if
// verbose logging is enabled. Artifacts may only depend on artifacts
// previously described in the IR stream.
//
// Methods of IRDecoder are not safe for concurrent use.
IRDecoder struct {
// Address of underlying [Cache], must not be exposed directly.
c *Cache
// Underlying IR reader. Methods of [IRReader] must not use this as it
// bypasses ident measurement.
r io.Reader
// Artifacts already seen in the IR stream.
ident map[unique.Handle[ID]]Artifact
// Whether Decode returned, and the entire IR stream was decoded.
done, ok bool
}
// IRReader provides methods to decode the IR wire format and read values
// from the reader embedded in the underlying [IRDecoder]. It is
// deliberately impossible to obtain the [IRValueKind] of the next value,
// and callers must never recover from panics in any read method.
//
// It is the responsibility of the caller to call Finalise after all IR
// values have been read. Failure to call Finalise causes the resulting
// [Artifact] to be rejected with [ErrRemainingIR].
//
// For an [Artifact] expected to have dependencies, the caller must consume
// all dependencies by calling Next until all dependencies are depleted, or
// call DiscardAll to explicitly discard them and rely on values encoded as
// [IRKindIdent] instead. Failure to consume all unstructured dependencies
// causes the resulting [Artifact] to be rejected with [MissedDependencyError].
//
// Requesting the value of an unstructured dependency not yet described in
// the IR stream via Next, or reading an [IRKindIdent] value not part of
// unstructured dependencies via ReadIdent may cause the resulting
// [Artifact] to be rejected with [DanglingIdentError], however either
// method may return a non-nil [Artifact] implementation of unspecified
// value.
IRReader struct {
// Address of underlying [IRDecoder], should be zeroed or made unusable
// after finalisation and must not be exposed directly.
d *IRDecoder
// Common buffer for word-sized reads.
buf [wordSize]byte
// Dependencies sent before params, sorted by identifier. Resliced on
// each call to Next and checked to be depleted during Finalise.
deps []*extIdent
// Number of values already read, -1 denotes a finalised IRReader.
count int
// Header of value currently being read.
h irValueHeader
// Measured IR reader. All reads for the current [Artifact] must go
// through this to produce a correct ident.
r io.Reader
// Buffers measure writes. Flushed and returned to d during Finalise.
ibw *bufio.Writer
}
// IRReadFunc reads IR values written by [Artifact.Params] to produce an
// instance of [Artifact] identical to the one to produce these values.
IRReadFunc func(r *IRReader) Artifact
)
// kind returns the [IRValueKind] encoded in h.
func (h irValueHeader) kind() IRValueKind {
return IRValueKind(h & irHeaderMask)
}
// value returns ancillary data encoded in h.
func (h irValueHeader) value() uint32 {
return uint32(h >> irHeaderShift)
}
// irArtifact refers to artifact IR interpretation functions and must not be
// written to directly.
var irArtifact = make(map[Kind]IRReadFunc)
// InvalidKindError is an unregistered [Kind] value.
type InvalidKindError Kind
func (e InvalidKindError) Error() string {
return "invalid artifact kind " + strconv.Itoa(int(e))
}
// register records the [IRReadFunc] of an implementation of [Artifact] under
// the specified [Kind]. Expecting to be used only during initialization, it
// panics if the mapping between [Kind] and [IRReadFunc] is not a bijection.
//
// register is not safe for concurrent use. register must not be called after
// the first instance of [Cache] has been opened.
func register(k Kind, f IRReadFunc) {
if _, ok := irArtifact[k]; ok {
panic("attempting to register " + strconv.Itoa(int(k)) + " twice")
}
irArtifact[k] = f
}
// Register records the [IRReadFunc] of a custom implementation of [Artifact]
// under the specified [Kind]. Expecting to be used only during initialization,
// it panics if the mapping between [Kind] and [IRReadFunc] is not a bijection,
// or the specified [Kind] is below [KindCustomOffset].
//
// Register is not safe for concurrent use. Register must not be called after
// the first instance of [Cache] has been opened.
func Register(k Kind, f IRReadFunc) {
if k < KindCustomOffset {
panic("attempting to register within internal kind range")
}
register(k, f)
}
// NewDecoder returns a new [IRDecoder] that reads from the [io.Reader].
func (c *Cache) NewDecoder(r io.Reader) *IRDecoder {
return &IRDecoder{c, r, make(map[unique.Handle[ID]]Artifact), false, false}
}
const (
// irMaxValues is the arbitrary maximum number of values allowed to be
// written by [Artifact.Params] and subsequently read via [IRReader].
irMaxValues = 1 << 12
// irMaxDeps is the arbitrary maximum number of direct dependencies allowed
// to be returned by [Artifact.Dependencies] and subsequently decoded by
// [IRDecoder].
irMaxDeps = 1 << 10
)
var (
// ErrIRValues is returned for an [Artifact] with too many parameter values.
ErrIRValues = errors.New("artifact has too many IR parameter values")
// ErrIRDepend is returned for an [Artifact] with too many dependencies.
ErrIRDepend = errors.New("artifact has too many dependencies")
// ErrAlreadyFinalised is returned when attempting to use an [IRReader] that
// has already been finalised.
ErrAlreadyFinalised = errors.New("reader has already finalised")
)
// enterReader panics with an appropriate error for an out-of-bounds count and
// must be called at some point in any exported method.
func (ir *IRReader) enterReader(read bool) {
if ir.count < 0 {
panic(ErrAlreadyFinalised)
}
if ir.count >= irMaxValues {
panic(ErrIRValues)
}
if read {
ir.count++
}
}
// IRKindError describes an attempt to read an IR value of unexpected kind.
type IRKindError struct {
Got, Want IRValueKind
Ancillary uint32
}
func (e *IRKindError) Error() string {
return fmt.Sprintf(
"got %s IR value (%#x) instead of %s",
e.Got, e.Ancillary, e.Want,
)
}
// readFull reads until either p is filled or an error is encountered.
func (ir *IRReader) readFull(p []byte) (n int, err error) {
for n < len(p) && err == nil {
var nn int
nn, err = ir.r.Read(p[n:])
n += nn
}
return
}
// mustRead reads from the underlying measured reader and panics on error. If
// an [io.EOF] is encountered and n != len(p), the error is promoted to a
// [io.ErrUnexpectedEOF], if n == 0, [io.EOF] is kept as is, otherwise it is
// zeroed.
func (ir *IRReader) mustRead(p []byte) {
n, err := ir.readFull(p)
if err == nil {
return
}
if errors.Is(err, io.EOF) {
if n == len(p) {
return
}
err = io.ErrUnexpectedEOF
}
panic(err)
}
// mustReadHeader reads the next header via d and checks its kind.
func (ir *IRReader) mustReadHeader(k IRValueKind) {
ir.mustRead(ir.buf[:])
ir.h = irValueHeader(binary.LittleEndian.Uint64(ir.buf[:]))
if wk := ir.h.kind(); wk != k {
panic(&IRKindError{wk, k, ir.h.value()})
}
}
// putAll returns all dependency buffers to the underlying [Cache].
func (ir *IRReader) putAll() {
for _, buf := range ir.deps {
ir.d.c.putIdentBuf(buf)
}
ir.deps = nil
}
// DiscardAll discards all unstructured dependencies. This is useful to
// implementations that encode dependencies as [IRKindIdent] which are read back
// via ReadIdent.
func (ir *IRReader) DiscardAll() {
if ir.deps == nil {
panic("attempting to discard dependencies twice")
}
ir.putAll()
}
// ErrDependencyDepleted is returned when attempting to advance to the next
// unstructured dependency when there are none left.
var ErrDependencyDepleted = errors.New("reading past end of dependencies")
// Next returns the next unstructured dependency.
func (ir *IRReader) Next() Artifact {
if len(ir.deps) == 0 {
panic(ErrDependencyDepleted)
}
id := unique.Make(ID(ir.deps[0][wordSize:]))
ir.d.c.putIdentBuf(ir.deps[0])
ir.deps = ir.deps[1:]
if a, ok := ir.d.ident[id]; !ok {
ir.putAll()
panic(DanglingIdentError(id))
} else {
return a
}
}
// MissedDependencyError is the number of unstructured dependencies remaining
// in [IRReader] that was never requested or explicitly discarded before
// finalisation.
type MissedDependencyError int
func (e MissedDependencyError) Error() string {
return "missed " + strconv.Itoa(int(e)) + " unstructured dependencies"
}
var (
// ErrUnexpectedChecksum is returned by a [IRReadFunc] that does not expect
// a checksum but received one in [IRKindEnd] anyway.
ErrUnexpectedChecksum = errors.New("checksum specified on unsupported artifact")
// ErrExpectedChecksum is returned by a [IRReadFunc] that expects a checksum
// but did not receive one in [IRKindEnd].
ErrExpectedChecksum = errors.New("checksum required but not specified")
)
// Finalise reads the final [IRKindEnd] value and marks r as finalised. Methods
// of r are invalid upon entry into Finalise. If a [Checksum] is available via
// [IREndKnownChecksum], its handle is returned and the caller must store its
// value in the resulting [Artifact].
func (ir *IRReader) Finalise() (checksum unique.Handle[Checksum], ok bool) {
ir.enterReader(true)
ir.count = -1
ir.mustReadHeader(IRKindEnd)
f := IREndFlag(ir.h.value())
if f&IREndKnownChecksum != 0 {
buf := ir.d.c.getIdentBuf()
defer ir.d.c.putIdentBuf(buf)
ir.mustRead(buf[wordSize:])
checksum = unique.Make(Checksum(buf[wordSize:]))
ok = true
}
if err := ir.ibw.Flush(); err != nil {
panic(err)
}
ir.r, ir.ibw = nil, nil
if len(ir.deps) != 0 {
panic(MissedDependencyError(len(ir.deps)))
}
return
}
// ReadIdent reads the next value as [IRKindIdent].
func (ir *IRReader) ReadIdent() Artifact {
ir.enterReader(true)
ir.mustReadHeader(IRKindIdent)
buf := ir.d.c.getIdentBuf()
defer ir.d.c.putIdentBuf(buf)
ir.mustRead(buf[wordSize:])
id := unique.Make(ID(buf[wordSize:]))
if a, ok := ir.d.ident[id]; !ok {
panic(DanglingIdentError(id))
} else {
return a
}
}
// ReadUint32 reads the next value as [IRKindUint32].
func (ir *IRReader) ReadUint32() uint32 {
ir.enterReader(true)
ir.mustReadHeader(IRKindUint32)
return ir.h.value()
}
// ReadStringBytes reads the next value as [IRKindString] but returns it as a
// byte slice instead.
func (ir *IRReader) ReadStringBytes() []byte {
ir.enterReader(true)
ir.mustReadHeader(IRKindString)
sz := int(ir.h.value())
szWire := alignSize(sz)
if szWire > irMaxStringLength {
panic(IRStringError("\x00"))
}
p := make([]byte, szWire)
ir.mustRead(p)
return p[:sz]
}
// ReadString reads the next value as [IRKindString].
func (ir *IRReader) ReadString() string {
p := ir.ReadStringBytes()
return unsafe.String(unsafe.SliceData(p), len(p))
}
// decode decodes the next [Artifact] in the IR stream and returns any buffer
// originating from [Cache] before returning. decode returns [io.EOF] if and
// only if the underlying [io.Reader] is already read to EOF.
func (d *IRDecoder) decode() (a Artifact, err error) {
defer panicToError(&err)
var ir IRReader
defer func() { ir.d = nil }()
ir.d = d
h := sha512.New384()
ir.ibw = d.c.getWriter(h)
defer d.c.putWriter(ir.ibw)
ir.r = io.TeeReader(d.r, ir.ibw)
if n, _err := ir.readFull(ir.buf[:]); _err != nil {
if errors.Is(_err, io.EOF) {
if n != 0 {
_err = io.ErrUnexpectedEOF
}
}
err = _err
return
}
ak := Kind(binary.LittleEndian.Uint64(ir.buf[:]))
f, ok := irArtifact[ak]
if !ok {
err = InvalidKindError(ak)
return
}
defer ir.putAll()
ir.mustRead(ir.buf[:])
sz := binary.LittleEndian.Uint64(ir.buf[:])
if sz > irMaxDeps {
err = ErrIRDepend
return
}
ir.deps = make([]*extIdent, sz)
for i := range ir.deps {
ir.deps[i] = d.c.getIdentBuf()
}
for _, buf := range ir.deps {
ir.mustRead(buf[:])
}
a = f(&ir)
if a == nil {
err = syscall.ENOTRECOVERABLE
return
}
if ir.count != -1 {
err = ErrRemainingIR
return
}
buf := d.c.getIdentBuf()
h.Sum(buf[wordSize:wordSize])
id := unique.Make(ID(buf[wordSize:]))
d.c.putIdentBuf(buf)
if _, ok = d.ident[id]; !ok {
d.ident[id] = a
} else {
d.c.msg.Verbosef(
"artifact %s appeared more than once in IR stream",
Encode(id.Value()),
)
}
return
}
// Decode consumes the IR stream to EOF and returns the final [Artifact]. After
// Decode returns, Lookup is available and Decode must not be called again.
func (d *IRDecoder) Decode() (a Artifact, err error) {
if d.done {
panic("attempting to decode an IR stream twice")
}
defer func() { d.done = true }()
var cur Artifact
next:
a, err = d.decode()
if err == nil {
cur = a
goto next
}
if errors.Is(err, io.EOF) {
a, err = cur, nil
d.ok = true
}
return
}
// Lookup looks up an [Artifact] described by the IR stream by its identifier.
func (d *IRDecoder) Lookup(id unique.Handle[ID]) (a Artifact, ok bool) {
if !d.ok {
panic("attempting to look up artifact without full IR stream")
}
a, ok = d.ident[id]
return
}

114
internal/pkg/ir_test.go Normal file
View File

@@ -0,0 +1,114 @@
package pkg_test
import (
"bytes"
"io"
"reflect"
"testing"
"hakurei.app/container/check"
"hakurei.app/internal/pkg"
)
func TestIRRoundtrip(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
a pkg.Artifact
}{
{"http get aligned", pkg.NewHTTPGet(
nil, "file:///testdata",
pkg.Checksum(bytes.Repeat([]byte{0xfd}, len(pkg.Checksum{}))),
)},
{"http get unaligned", pkg.NewHTTPGet(
nil, "https://hakurei.app",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
)},
{"http get tar", pkg.NewHTTPGetTar(
nil, "file:///testdata",
pkg.Checksum(bytes.Repeat([]byte{0xff}, len(pkg.Checksum{}))),
pkg.TarBzip2,
)},
{"http get tar unaligned", pkg.NewHTTPGetTar(
nil, "https://hakurei.app",
pkg.Checksum(bytes.Repeat([]byte{0xfe}, len(pkg.Checksum{}))),
pkg.TarUncompressed,
)},
{"exec offline", pkg.NewExec(
"exec-offline", nil, 0, false,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1"},
check.MustAbs("/opt/bin/testtool"),
[]string{"testtool"},
pkg.MustPath("/file", false, pkg.NewFile("file", []byte(
"stub file",
))), pkg.MustPath("/.hakurei", false, pkg.NewHTTPGetTar(
nil, "file:///hakurei.tar",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarUncompressed,
)), pkg.MustPath("/opt", false, pkg.NewHTTPGetTar(
nil, "file:///testtool.tar.gz",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarGzip,
)),
)},
{"exec net", pkg.NewExec(
"exec-net",
(*pkg.Checksum)(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
0, false,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1"},
check.MustAbs("/opt/bin/testtool"),
[]string{"testtool", "net"},
pkg.MustPath("/file", false, pkg.NewFile("file", []byte(
"stub file",
))), pkg.MustPath("/.hakurei", false, pkg.NewHTTPGetTar(
nil, "file:///hakurei.tar",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarUncompressed,
)), pkg.MustPath("/opt", false, pkg.NewHTTPGetTar(
nil, "file:///testtool.tar.gz",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarGzip,
)),
)},
{"file anonymous", pkg.NewFile("", []byte{0})},
{"file", pkg.NewFile("stub", []byte("stub"))},
}
testCasesCache := make([]cacheTestCase, len(testCases))
for i, tc := range testCases {
want := tc.a
testCasesCache[i] = cacheTestCase{tc.name, nil,
func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
r, w := io.Pipe()
done := make(chan error, 1)
go func() {
t.Helper()
done <- c.EncodeAll(w, want)
_ = w.Close()
}()
if got, err := c.NewDecoder(r).Decode(); err != nil {
t.Fatalf("Decode: error = %v", err)
} else if !reflect.DeepEqual(got, want) {
t.Fatalf("Decode: %#v, want %#v", got, want)
}
if err := <-done; err != nil {
t.Fatalf("EncodeAll: error = %v", err)
}
}, pkg.MustDecode(
"E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C",
),
}
}
checkWithCache(t, testCasesCache)
}

View File

@@ -19,8 +19,8 @@ type httpArtifact struct {
// closing the [io.ReadCloser] returned by Cure.
checksum unique.Handle[Checksum]
// doFunc is the Do method of [http.Client] supplied by the caller.
doFunc func(req *http.Request) (*http.Response, error)
// client is the address of the caller-supplied [http.Client].
client *http.Client
}
var _ KnownChecksum = new(httpArtifact)
@@ -33,10 +33,7 @@ func NewHTTPGet(
url string,
checksum Checksum,
) FileArtifact {
if c == nil {
c = http.DefaultClient
}
return &httpArtifact{url: url, checksum: unique.Make(checksum), doFunc: c.Do}
return &httpArtifact{url: url, checksum: unique.Make(checksum), client: c}
}
// Kind returns the hardcoded [Kind] constant.
@@ -44,8 +41,17 @@ func (*httpArtifact) Kind() Kind { return KindHTTPGet }
// Params writes the backing url string. Client is not represented as it does
// not affect [Cache.Cure] outcome.
func (a *httpArtifact) Params(ctx *IContext) {
ctx.GetHash().Write([]byte(a.url))
func (a *httpArtifact) Params(ctx *IContext) { ctx.WriteString(a.url) }
func init() {
register(KindHTTPGet, func(r *IRReader) Artifact {
url := r.ReadString()
checksum, ok := r.Finalise()
if !ok {
panic(ErrExpectedChecksum)
}
return NewHTTPGet(nil, url, checksum.Value())
})
}
// Dependencies returns a nil slice.
@@ -80,8 +86,13 @@ func (a *httpArtifact) Cure(r *RContext) (rc io.ReadCloser, err error) {
}
req.Header.Set("User-Agent", "Hakurei/1.1")
c := a.client
if c == nil {
c = http.DefaultClient
}
var resp *http.Response
if resp, err = a.doFunc(req); err != nil {
if resp, err = c.Do(req); err != nil {
return
}

View File

@@ -109,7 +109,7 @@ func TestHTTPGet(t *testing.T) {
)
wantPathname := base.Append(
"identifier",
"00BNNr-PsNMtowTpEG86ZeI7eQKoD-pjSCPAal1e5MYqr_N7FLpyXKdXLXE8WEBF",
"oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_",
)
if pathname, checksum, err := c.Cure(f); err != nil {
t.Fatalf("Cure: error = %v", err)
@@ -156,6 +156,6 @@ func TestHTTPGet(t *testing.T) {
if _, _, err := c.Cure(f); !reflect.DeepEqual(err, wantErrNotFound) {
t.Fatalf("Pathname: error = %#v, want %#v", err, wantErrNotFound)
}
}, pkg.MustDecode("KkdL8x2a84V8iYZop5jSTyba54xSgf_NZ1R0c4nSp9xTdk3SK_zUKGhNJ2uK8wMY")},
}, pkg.MustDecode("L_0RFHpr9JUS4Zp14rz2dESSRvfLzpvqsLhR1-YjQt8hYlmEdVl7vI3_-v8UNPKs")},
})
}

View File

@@ -65,35 +65,6 @@ func MustDecode(s string) (checksum Checksum) {
return
}
// IContext is passed to [Artifact.Params] and provides identifier information
// and the target [hash.Hash] for writing params into.
//
// Methods of IContext are safe for concurrent use. IContext is valid
// until [Artifact.Params] returns.
type IContext struct {
// Address of underlying [Cache], should be zeroed or made unusable after
// [Artifact.Params] returns and must not be exposed directly.
cache *Cache
// Made available for writing, should be zeroed after [Artifact.Params]
// returns. Internal state must not be inspected.
h hash.Hash
}
// Unwrap returns the underlying [context.Context].
func (i *IContext) Unwrap() context.Context { return i.cache.ctx }
// GetHash returns the underlying [hash.Hash] for writing. Callers must not
// attempt to inspect its internal state.
func (i *IContext) GetHash() hash.Hash { return i.h }
// WriteIdent writes the identifier of [Artifact] to the underlying [hash.Hash].
func (i *IContext) WriteIdent(a Artifact) {
buf := i.cache.getIdentBuf()
*(*ID)(buf[wordSize:]) = i.cache.Ident(a).Value()
i.h.Write(buf[wordSize:])
i.cache.putIdentBuf(buf)
}
// TContext is passed to [TrivialArtifact.Cure] and provides information and
// methods required for curing the [TrivialArtifact].
//
@@ -238,10 +209,12 @@ type Artifact interface {
// [Artifact] is allowed to return the same [Kind] value.
Kind() Kind
// Params writes opaque bytes that describes [Artifact]. Implementations
// Params writes deterministic values describing [Artifact]. Implementations
// must guarantee that these values are unique among differing instances
// of the same implementation with the same dependencies. Callers must not
// attempt to interpret these params.
// of the same implementation with identical dependencies and conveys enough
// information to create another instance of [Artifact] identical to the
// instance emitting these values. The new instance created via [IRReadFunc]
// from these values must then produce identical IR values.
//
// Result must remain identical across multiple invocations.
Params(ctx *IContext)
@@ -564,47 +537,13 @@ func (c *Cache) unsafeIdent(a Artifact, encodeKind bool) (
return
}
deps := a.Dependencies()
idents := make([]*extIdent, len(deps))
for i, d := range deps {
dbuf, did := c.unsafeIdent(d, true)
if dbuf == nil {
dbuf = c.getIdentBuf()
binary.LittleEndian.PutUint64(dbuf[:], uint64(d.Kind()))
*(*ID)(dbuf[wordSize:]) = did.Value()
} else {
c.storeIdent(d, dbuf)
}
defer c.putIdentBuf(dbuf)
idents[i] = dbuf
}
slices.SortFunc(idents, func(a, b *extIdent) int {
return bytes.Compare(a[:], b[:])
})
idents = slices.CompactFunc(idents, func(a, b *extIdent) bool {
return *a == *b
})
buf = c.getIdentBuf()
h := sha512.New384()
if err := c.Encode(h, a); err != nil {
// unreachable
panic(err)
}
binary.LittleEndian.PutUint64(buf[:], uint64(a.Kind()))
h.Write(buf[:wordSize])
for _, dn := range idents {
h.Write(dn[:])
}
kcBuf := c.getIdentBuf()
if kc, ok := a.(KnownChecksum); ok {
*(*Checksum)(kcBuf[:]) = kc.Checksum()
} else {
*(*Checksum)(kcBuf[:]) = Checksum{}
}
h.Write((*Checksum)(kcBuf[:])[:])
c.putIdentBuf(kcBuf)
i := IContext{c, h}
a.Params(&i)
i.cache, i.h = nil, nil
h.Sum(buf[wordSize:wordSize])
return
}
@@ -1000,8 +939,9 @@ func (c *Cache) openFile(f FileArtifact) (r io.ReadCloser, err error) {
return
}
// InvalidFileModeError describes an [Artifact.Cure] that did not result in
// a regular file or directory located at the work pathname.
// InvalidFileModeError describes a [FloodArtifact.Cure] or
// [TrivialArtifact.Cure] that did not result in a regular file or directory
// located at the work pathname.
type InvalidFileModeError fs.FileMode
// Error returns a constant string.
@@ -1009,8 +949,8 @@ func (e InvalidFileModeError) Error() string {
return "artifact did not produce a regular file or directory"
}
// NoOutputError describes an [Artifact.Cure] that did not populate its
// work pathname despite completing successfully.
// NoOutputError describes a [FloodArtifact.Cure] or [TrivialArtifact.Cure]
// that did not populate its work pathname despite completing successfully.
type NoOutputError struct{}
// Unwrap returns [os.ErrNotExist].
@@ -1188,39 +1128,38 @@ func (e *CureError) Error() string { return e.Err.Error() }
// A DependencyCureError wraps errors returned while curing dependencies.
type DependencyCureError []*CureError
// sort sorts underlying errors by their identifier.
func (e *DependencyCureError) sort() {
var identBuf [2]ID
slices.SortFunc(*e, func(a, b *CureError) int {
identBuf[0], identBuf[1] = a.Ident.Value(), b.Ident.Value()
return slices.Compare(identBuf[0][:], identBuf[1][:])
})
// unwrapM recursively expands underlying errors into a caller-supplied map.
func (e *DependencyCureError) unwrapM(me map[unique.Handle[ID]]*CureError) {
for _, err := range *e {
if _e, ok := err.Err.(*DependencyCureError); ok {
_e.unwrapM(me)
continue
}
me[err.Ident] = err
}
}
// unwrap recursively expands and deduplicates underlying errors.
func (e *DependencyCureError) unwrap() DependencyCureError {
errs := make(DependencyCureError, 0, len(*e))
for _, err := range *e {
if _e, ok := err.Err.(*DependencyCureError); ok {
errs = append(errs, _e.unwrap()...)
continue
}
errs = append(errs, err)
}
me := make(map[unique.Handle[ID]]*CureError, len(errs))
for _, err := range errs {
me[err.Ident] = err
}
return slices.AppendSeq(
me := make(map[unique.Handle[ID]]*CureError)
e.unwrapM(me)
errs := slices.AppendSeq(
make(DependencyCureError, 0, len(me)),
maps.Values(me),
)
var identBuf [2]ID
slices.SortFunc(errs, func(a, b *CureError) int {
identBuf[0], identBuf[1] = a.Ident.Value(), b.Ident.Value()
return slices.Compare(identBuf[0][:], identBuf[1][:])
})
return errs
}
// Unwrap returns a deduplicated slice of underlying errors.
func (e *DependencyCureError) Unwrap() []error {
errs := e.unwrap()
errs.sort()
_errs := make([]error, len(errs))
for i, err := range errs {
_errs[i] = err
@@ -1231,7 +1170,6 @@ func (e *DependencyCureError) Unwrap() []error {
// Error returns a user-facing multiline error message.
func (e *DependencyCureError) Error() string {
errs := e.unwrap()
errs.sort()
if len(errs) == 0 {
return "invalid dependency cure outcome"
}

View File

@@ -15,6 +15,7 @@ import (
"os"
"path/filepath"
"reflect"
"strconv"
"syscall"
"testing"
"unique"
@@ -93,7 +94,7 @@ type stubArtifact struct {
}
func (a *stubArtifact) Kind() pkg.Kind { return a.kind }
func (a *stubArtifact) Params(ctx *pkg.IContext) { ctx.GetHash().Write(a.params) }
func (a *stubArtifact) Params(ctx *pkg.IContext) { ctx.Write(a.params) }
func (a *stubArtifact) Dependencies() []pkg.Artifact { return a.deps }
func (a *stubArtifact) Cure(t *pkg.TContext) error { return a.cure(t) }
func (*stubArtifact) IsExclusive() bool { return false }
@@ -109,7 +110,7 @@ type stubArtifactF struct {
}
func (a *stubArtifactF) Kind() pkg.Kind { return a.kind }
func (a *stubArtifactF) Params(ctx *pkg.IContext) { ctx.GetHash().Write(a.params) }
func (a *stubArtifactF) Params(ctx *pkg.IContext) { ctx.Write(a.params) }
func (a *stubArtifactF) Dependencies() []pkg.Artifact { return a.deps }
func (a *stubArtifactF) Cure(f *pkg.FContext) error { return a.cure(f) }
func (a *stubArtifactF) IsExclusive() bool { return a.excl }
@@ -218,7 +219,7 @@ func TestIdent(t *testing.T) {
},
nil,
}, unique.Make[pkg.ID](pkg.MustDecode(
"v86qCz5fDqUsjA3KY_4LIrEh3aQnp04plNiWJ5_ap06McHSSBlROyKIFEwx3c0O7",
"WKErnjTOVbuH2P9a0gM4OcAAO4p-CoX2HQu7CbZrg8ZOzApvWoO3-ISzPw6av_rN",
))},
}
@@ -531,7 +532,7 @@ func TestCache(t *testing.T) {
kind: pkg.KindExec,
params: []byte("artifact overridden to be incomplete"),
}}, nil, pkg.Checksum{}, pkg.InvalidArtifactError(pkg.MustDecode(
"0z3fA0YngFaRRCQRrxKburhpAGz3gkYIZ346X_tAwOr_ldelYg1nTifI3-WX8hQD",
"E__uZ1sLIvb84vzSm5Uezb03RogsiaeTt1nfIVv8TKnnf4LqwtSi-smdHhlkZrUJ",
))},
{"error passthrough", newStubFile(
@@ -953,6 +954,17 @@ func TestErrors(t *testing.T) {
{"NoOutputError", pkg.NoOutputError{
// empty struct
}, "artifact cured successfully but did not produce any output"},
{"IRKindError", &pkg.IRKindError{
Got: pkg.IRKindEnd,
Want: pkg.IRKindIdent,
Ancillary: 0xcafebabe,
}, "got terminator IR value (0xcafebabe) instead of ident"},
{"IRKindError invalid", &pkg.IRKindError{
Got: 0xbeef,
Want: pkg.IRKindIdent,
Ancillary: 0xcafe,
}, "got invalid kind 48879 IR value (0xcafe) instead of ident"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
@@ -1137,6 +1149,40 @@ func TestDependencyCureError(t *testing.T) {
}
}
// earlyFailureF is a [FloodArtifact] with a large dependency graph resulting in
// a large [DependencyCureError].
type earlyFailureF int
func (earlyFailureF) Kind() pkg.Kind { return pkg.KindExec }
func (earlyFailureF) Params(*pkg.IContext) {}
func (earlyFailureF) IsExclusive() bool { return false }
func (a earlyFailureF) Dependencies() []pkg.Artifact {
deps := make([]pkg.Artifact, a)
for i := range deps {
deps[i] = a - 1
}
return deps
}
func (a earlyFailureF) Cure(*pkg.FContext) error {
if a != 0 {
panic("unexpected cure on " + strconv.Itoa(int(a)))
}
return stub.UniqueError(0xcafe)
}
func TestDependencyCureErrorEarly(t *testing.T) {
checkWithCache(t, []cacheTestCase{
{"early", nil, func(t *testing.T, _ *check.Absolute, c *pkg.Cache) {
_, _, err := c.Cure(earlyFailureF(8))
if !errors.Is(err, stub.UniqueError(0xcafe)) {
t.Fatalf("Cure: error = %v", err)
}
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
})
}
func TestNew(t *testing.T) {
t.Parallel()

View File

@@ -4,7 +4,6 @@ import (
"archive/tar"
"compress/bzip2"
"compress/gzip"
"encoding/binary"
"errors"
"fmt"
"io"
@@ -29,7 +28,7 @@ type tarArtifact struct {
// Caller-supplied backing tarball.
f Artifact
// Compression on top of the tarball.
compression uint64
compression uint32
}
// tarArtifactNamed embeds tarArtifact for a [fmt.Stringer] tarball.
@@ -47,7 +46,7 @@ func (a *tarArtifactNamed) String() string { return a.name + "-unpack" }
// NewTar returns a new [Artifact] backed by the supplied [Artifact] and
// compression method. The source [Artifact] must be compatible with
// [TContext.Open].
func NewTar(a Artifact, compression uint64) Artifact {
func NewTar(a Artifact, compression uint32) Artifact {
ta := tarArtifact{a, compression}
if s, ok := a.(fmt.Stringer); ok {
if name := s.String(); name != "" {
@@ -62,7 +61,7 @@ func NewHTTPGetTar(
hc *http.Client,
url string,
checksum Checksum,
compression uint64,
compression uint32,
) Artifact {
return NewTar(NewHTTPGet(hc, url, checksum), compression)
}
@@ -71,8 +70,16 @@ func NewHTTPGetTar(
func (a *tarArtifact) Kind() Kind { return KindTar }
// Params writes compression encoded in little endian.
func (a *tarArtifact) Params(ctx *IContext) {
ctx.GetHash().Write(binary.LittleEndian.AppendUint64(nil, a.compression))
func (a *tarArtifact) Params(ctx *IContext) { ctx.WriteUint32(a.compression) }
func init() {
register(KindTar, func(r *IRReader) Artifact {
a := NewTar(r.Next(), r.ReadUint32())
if _, ok := r.Finalise(); ok {
panic(ErrUnexpectedChecksum)
}
return a
})
}
// Dependencies returns a slice containing the backing file.

View File

@@ -40,7 +40,7 @@ func TestTar(t *testing.T) {
}, pkg.MustDecode(
"cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM",
))
}, pkg.MustDecode("nnOiyjjjvgZChsGtO4rA1JHckwYBBbxwNfecPJp62OFP6aoYUxHQ5UtYsrDpnwan")},
}, pkg.MustDecode("NQTlc466JmSVLIyWklm_u8_g95jEEb98PxJU-kjwxLpfdjwMWJq0G8ze9R4Vo1Vu")},
{"http expand", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
checkTarHTTP(t, base, c, fstest.MapFS{
@@ -51,7 +51,7 @@ func TestTar(t *testing.T) {
}, pkg.MustDecode(
"CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN",
))
}, pkg.MustDecode("bQVH19N7dX50SdQ6JNVYbFdDZV4t8IaM4dhxGvjACpdoEgJ2jZJfYKLH4ya7ZD_s")},
}, pkg.MustDecode("hSoSSgCYTNonX3Q8FjvjD1fBl-E-BQyA6OTXro2OadXqbST4tZ-akGXszdeqphRe")},
})
}
@@ -98,17 +98,36 @@ func checkTarHTTP(
wantIdent := func() pkg.ID {
h := sha512.New384()
// kind uint64
h.Write([]byte{byte(pkg.KindTar), 0, 0, 0, 0, 0, 0, 0})
// deps_sz uint64
h.Write([]byte{1, 0, 0, 0, 0, 0, 0, 0})
// kind uint64
h.Write([]byte{byte(pkg.KindHTTPGet), 0, 0, 0, 0, 0, 0, 0})
// ident ID
h0 := sha512.New384()
// kind uint64
h0.Write([]byte{byte(pkg.KindHTTPGet), 0, 0, 0, 0, 0, 0, 0})
h0.Write(testdataChecksum[:])
// deps_sz uint64
h0.Write([]byte{0, 0, 0, 0, 0, 0, 0, 0})
// url string
h0.Write([]byte{byte(pkg.IRKindString), 0, 0, 0})
h0.Write([]byte{0x10, 0, 0, 0})
h0.Write([]byte("file:///testdata"))
// end(KnownChecksum)
h0.Write([]byte{byte(pkg.IRKindEnd), 0, 0, 0})
h0.Write([]byte{byte(pkg.IREndKnownChecksum), 0, 0, 0})
// checksum Checksum
h0.Write(testdataChecksum[:])
h.Write(h0.Sum(nil))
h.Write(make([]byte, len(pkg.Checksum{})))
h.Write([]byte{pkg.TarGzip, 0, 0, 0, 0, 0, 0, 0})
// compression uint32
h.Write([]byte{byte(pkg.IRKindUint32), 0, 0, 0})
h.Write([]byte{pkg.TarGzip, 0, 0, 0})
// end
h.Write([]byte{byte(pkg.IRKindEnd), 0, 0, 0})
h.Write([]byte{0, 0, 0, 0})
return pkg.ID(h.Sum(nil))
}()

View File

@@ -142,12 +142,12 @@ func main() {
}
const checksumEmptyDir = "MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"
ident := "LRxdkRYNKnZT6bKiu5W8ATeAAmq3n_5AAJkF6G0EpAOEloiZvADJBkfixgtgF1Z9"
ident := "dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks"
log.Println(m)
next := func() { m = m.Next; log.Println(m) }
if overlayRoot {
ident = "UB9HPeMgMPJf3Ut4jLWwnCtu_P3Lr29i8Erf084bHe8jjzBMKPDNxQ3RMrirkH6H"
ident = "RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb"
if m.Root != "/" || m.Target != "/" ||
m.Source != "overlay" || m.FsType != "overlay" {
@@ -165,7 +165,7 @@ func main() {
log.Fatal("unexpected artifact checksum")
}
} else {
ident = "g6gj2JWNXN-oNikou626vDqcMeZCn_TcV4xKuizBaPAWcasG2sVvItb5kZovMrzE"
ident = "p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT"
lowerdirsEscaped := strings.Split(lowerdir, ":")
lowerdirs := lowerdirsEscaped[:0]
@@ -194,7 +194,7 @@ func main() {
}
} else {
if hostNet {
ident = "TAspufRsG2I_TsxUUj2b7bUnCHgcVSdh6aOZpzL0W5Bjn4EZmOGzjofaOWd8J11H"
ident = "G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3"
}
if m.Root != "/sysroot" || m.Target != "/" {
@@ -213,14 +213,14 @@ func main() {
}
if promote {
ident = "ywzI31S5McuYu7vzI2kqpSC_nsNzpWBXVCwPoLAYi9QVT0mODgzqoo9jYYaczPbf"
ident = "xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ"
}
next() // testtool artifact
next()
if overlayWork {
ident = "Fud5ldJfpsgLt-rkLWrLO-aVYhQm-esTswetjxydPeQMK4jHNJ_1fGHVahaiCZ9y"
ident = "5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-"
if m.Root != "/" || m.Target != "/work" ||
m.Source != "overlay" || m.FsType != "overlay" {
log.Fatal("unexpected work mount entry")

View File

@@ -13,17 +13,22 @@ const (
ACL PArtifact = iota
Attr
Autoconf
Automake
Bash
Binutils
CMake
Coreutils
Curl
Diffutils
Findutils
Fuse
Gawk
GMP
Gettext
Git
Go
Gperf
Grep
Gzip
Hakurei
HakureiDist
@@ -31,17 +36,23 @@ const (
KernelHeaders
LibXau
Libexpat
Libpsl
Libffi
Libgd
Libtool
Libseccomp
Libucontext
Libxml2
M4
MPC
MPFR
Make
Meson
Mksh
NSS
NSSCACert
Ninja
OpenSSL
Packaging
Patch
Perl
@@ -55,6 +66,8 @@ const (
Setuptools
Toybox
toyboxEarly
Unzip
utilMacros
Wayland
WaylandProtocols
XCB
@@ -65,6 +78,10 @@ const (
buildcatrust
// gcc is a hacked-to-pieces GCC toolchain meant for use in intermediate
// stages only. This preset and its direct output must never be exposed.
gcc
// _presetEnd is the total number of presets and does not denote a preset.
_presetEnd
)
@@ -93,17 +110,22 @@ func ResolveName(name string) (p PArtifact, ok bool) {
"acl": ACL,
"attr": Attr,
"autoconf": Autoconf,
"automake": Automake,
"bash": Bash,
"binutils": Binutils,
"cmake": CMake,
"coreutils": Coreutils,
"curl": Curl,
"diffutils": Diffutils,
"findutils": Findutils,
"fuse": Fuse,
"gawk": Gawk,
"gmp": GMP,
"gettext": Gettext,
"git": Git,
"go": Go,
"gperf": Gperf,
"grep": Grep,
"gzip": Gzip,
"hakurei": Hakurei,
"hakurei-dist": HakureiDist,
@@ -111,17 +133,23 @@ func ResolveName(name string) (p PArtifact, ok bool) {
"kernel-headers": KernelHeaders,
"libXau": LibXau,
"libexpat": Libexpat,
"libpsl": Libpsl,
"libseccomp": Libseccomp,
"libucontext": Libucontext,
"libxml2": Libxml2,
"libffi": Libffi,
"libgd": Libgd,
"libtool": Libtool,
"m4": M4,
"mpc": MPC,
"mpfr": MPFR,
"make": Make,
"meson": Meson,
"mksh": Mksh,
"nss": NSS,
"nss-cacert": NSSCACert,
"ninja": Ninja,
"openssl": OpenSSL,
"packaging": Packaging,
"patch": Patch,
"perl": Perl,
@@ -134,6 +162,7 @@ func ResolveName(name string) (p PArtifact, ok bool) {
"sed": Sed,
"setuptools": Setuptools,
"toybox": Toybox,
"unzip": Unzip,
"wayland": Wayland,
"wayland-protocols": WaylandProtocols,
"xcb": XCB,

View File

@@ -32,6 +32,16 @@ func (a busyboxBin) Dependencies() []pkg.Artifact {
return []pkg.Artifact{a.bin}
}
func init() {
pkg.Register(kindBusyboxBin, func(r *pkg.IRReader) pkg.Artifact {
a := busyboxBin{r.Next().(pkg.FileArtifact)}
if _, ok := r.Finalise(); ok {
panic(pkg.ErrUnexpectedChecksum)
}
return a
})
}
// String returns the reporting name of the underlying file prefixed with expand.
func (a busyboxBin) String() string {
return "expand-" + a.bin.(fmt.Stringer).String()

View File

@@ -51,12 +51,16 @@ type CMakeAttr struct {
Env []string
// Runs before cmake.
ScriptEarly string
// Runs after cmake.
// Runs after cmake, replaces default.
ScriptConfigured string
// Runs after install.
Script string
// Override the default installation prefix [AbsSystem].
Prefix *check.Absolute
// Passed through to [Toolchain.New].
Paths []pkg.ExecPath
// Passed through to [Toolchain.New].
Flag int
}
@@ -82,11 +86,9 @@ func (t Toolchain) NewViaCMake(
panic("CACHE must be non-empty")
}
scriptEarly := attr.ScriptEarly
if attr.Writable {
scriptEarly = `
chmod -R +w "${ROSA_SOURCE}"
` + scriptEarly
scriptConfigured := "cmake --build .\ncmake --install .\n"
if attr.ScriptConfigured != "" {
scriptConfigured = attr.ScriptConfigured
}
prefix := attr.Prefix
@@ -102,7 +104,7 @@ chmod -R +w "${ROSA_SOURCE}"
"ROSA_SOURCE=" + sourcePath.String(),
"ROSA_CMAKE_SOURCE=" + sourcePath.Append(attr.Append...).String(),
"ROSA_INSTALL_PREFIX=/work" + prefix.String(),
}, attr.Env), scriptEarly+`
}, attr.Env), attr.ScriptEarly+`
mkdir /cure && cd /cure
cmake -G Ninja \
-DCMAKE_C_COMPILER_TARGET="${ROSA_TRIPLE}" \
@@ -117,7 +119,7 @@ cmake -G Ninja \
}), " \\\n\t")+` \
-DCMAKE_INSTALL_PREFIX="${ROSA_INSTALL_PREFIX}" \
"${ROSA_CMAKE_SOURCE}"
cmake --build .
cmake --install .
`+attr.Script, pkg.Path(sourcePath, attr.Writable, source))
`+scriptConfigured+attr.Script, slices.Concat([]pkg.ExecPath{
pkg.Path(sourcePath, attr.Writable, source),
}, attr.Paths)...)
}

32
internal/rosa/curl.go Normal file
View File

@@ -0,0 +1,32 @@
package rosa
import "hakurei.app/internal/pkg"
func (t Toolchain) newCurl() pkg.Artifact {
const (
version = "8.18.0"
checksum = "YpOolP_sx1DIrCEJ3elgVAu0wTLDS-EZMZFvOP0eha7FaLueZUlEpuMwDzJNyi7i"
)
return t.NewViaMake("curl", version, pkg.NewHTTPGetTar(
nil, "https://curl.se/download/curl-"+version+".tar.bz2",
mustDecode(checksum),
pkg.TarBzip2,
), &MakeAttr{
Env: []string{
"TFLAGS=-j256",
},
Configure: [][2]string{
{"with-openssl"},
{"with-ca-bundle", "/system/etc/ssl/certs/ca-bundle.crt"},
},
ScriptConfigured: `
make "-j$(nproc)"
`,
},
t.Load(Perl),
t.Load(Libpsl),
t.Load(OpenSSL),
)
}
func init() { artifactsF[Curl] = Toolchain.newCurl }

View File

@@ -4,8 +4,10 @@ import (
"errors"
"io"
"os"
"sync"
"syscall"
"hakurei.app/container/fhs"
"hakurei.app/internal/pkg"
)
@@ -86,8 +88,27 @@ nobody:x:65534:
// Kind returns the hardcoded [pkg.Kind] value.
func (cureEtc) Kind() pkg.Kind { return kindEtc }
// Params is a noop.
func (cureEtc) Params(*pkg.IContext) {}
// Params writes whether iana-etc is populated.
func (a cureEtc) Params(ctx *pkg.IContext) {
if a.iana != nil {
ctx.WriteUint32(1)
} else {
ctx.WriteUint32(0)
}
}
func init() {
pkg.Register(kindEtc, func(r *pkg.IRReader) pkg.Artifact {
a := cureEtc{}
if r.ReadUint32() != 0 {
a.iana = r.Next()
}
if _, ok := r.Finalise(); ok {
panic(pkg.ErrUnexpectedChecksum)
}
return a
})
}
// IsExclusive returns false: Cure performs a few trivial filesystem writes.
func (cureEtc) IsExclusive() bool { return false }
@@ -121,3 +142,22 @@ func newIANAEtc() pkg.Artifact {
pkg.TarGzip,
)
}
var (
resolvconfPath pkg.ExecPath
resolvconfOnce sync.Once
)
// resolvconf returns a hardcoded /etc/resolv.conf file.
func resolvconf() pkg.ExecPath {
resolvconfOnce.Do(func() {
resolvconfPath = pkg.Path(
fhs.AbsEtc.Append("resolv.conf"), false,
pkg.NewFile("resolv.conf", []byte(`
nameserver 1.1.1.1
nameserver 1.0.0.1
`)),
)
})
return resolvconfPath
}

View File

@@ -53,6 +53,9 @@ disable_test t4301-merge-tree-write-tree
disable_test t8005-blame-i18n
disable_test t9350-fast-export
disable_test t9300-fast-import
disable_test t0211-trace2-perf
disable_test t1517-outside-repo
disable_test t2200-add-update
make \
-C t \
@@ -67,6 +70,32 @@ make \
t.Load(Gettext),
t.Load(Zlib),
t.Load(Curl),
t.Load(OpenSSL),
t.Load(Libexpat),
)
}
func init() { artifactsF[Git] = Toolchain.newGit }
// NewViaGit returns a [pkg.Artifact] for cloning a git repository.
func (t Toolchain) NewViaGit(
name, url, rev string,
checksum pkg.Checksum,
) pkg.Artifact {
return t.New(name+"-"+rev, 0, []pkg.Artifact{
t.Load(NSSCACert),
t.Load(OpenSSL),
t.Load(Libpsl),
t.Load(Curl),
t.Load(Libexpat),
t.Load(Git),
}, &checksum, nil, `
git \
-c advice.detachedHead=false \
clone \
--revision=`+rev+` \
`+url+` \
/work
rm -rf /work/.git
`, resolvconf())
}

View File

@@ -1,11 +1,6 @@
package rosa
import (
"runtime"
"strconv"
"hakurei.app/internal/pkg"
)
import "hakurei.app/internal/pkg"
func (t Toolchain) newM4() pkg.Artifact {
const (
@@ -49,14 +44,14 @@ func (t Toolchain) newAutoconf() pkg.Artifact {
checksum = "-c5blYkC-xLDer3TWEqJTyh1RLbOd1c5dnRLKsDnIrg_wWNOLBpaqMY8FvmUFJ33"
)
return t.NewViaMake("autoconf", version, pkg.NewHTTPGetTar(
nil,
"https://ftpmirror.gnu.org/gnu/autoconf/autoconf-"+version+".tar.gz",
nil, "https://ftpmirror.gnu.org/gnu/autoconf/autoconf-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), &MakeAttr{
Env: []string{
"TESTSUITEFLAGS=" + strconv.Itoa(runtime.NumCPU()),
Make: []string{
`TESTSUITEFLAGS="-j$(nproc)"`,
},
Flag: TExclusive,
},
t.Load(M4),
t.Load(Perl),
@@ -66,6 +61,60 @@ func (t Toolchain) newAutoconf() pkg.Artifact {
}
func init() { artifactsF[Autoconf] = Toolchain.newAutoconf }
func (t Toolchain) newAutomake() pkg.Artifact {
const (
version = "1.18.1"
checksum = "FjvLG_GdQP7cThTZJLDMxYpRcKdpAVG-YDs1Fj1yaHlSdh_Kx6nRGN14E0r_BjcG"
)
return t.NewViaMake("automake", version, pkg.NewHTTPGetTar(
nil, "https://ftpmirror.gnu.org/gnu/automake/automake-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), &MakeAttr{
Writable: true,
ScriptEarly: `
cd /usr/src/automake
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
test_disable '#!/bin/sh' t/objcxx-minidemo.sh
test_disable '#!/bin/sh' t/objcxx-deps.sh
test_disable '#!/bin/sh' t/dist-no-built-sources.sh
test_disable '#!/bin/sh' t/distname.sh
test_disable '#!/bin/sh' t/pr9.sh
`,
},
t.Load(M4),
t.Load(Perl),
t.Load(Grep),
t.Load(Gzip),
t.Load(Autoconf),
t.Load(Diffutils),
)
}
func init() { artifactsF[Automake] = Toolchain.newAutomake }
func (t Toolchain) newLibtool() pkg.Artifact {
const (
version = "2.5.4"
checksum = "pa6LSrQggh8mSJHQfwGjysAApmZlGJt8wif2cCLzqAAa2jpsTY0jZ-6stS3BWZ2Q"
)
return t.NewViaMake("libtool", version, pkg.NewHTTPGetTar(
nil, "https://ftpmirror.gnu.org/gnu/libtool/libtool-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), &MakeAttr{
Make: []string{
`TESTSUITEFLAGS=32`,
},
},
t.Load(M4),
t.Load(Diffutils),
)
}
func init() { artifactsF[Libtool] = Toolchain.newLibtool }
func (t Toolchain) newGzip() pkg.Artifact {
const (
version = "1.14"
@@ -250,6 +299,31 @@ func (t Toolchain) newGawk() pkg.Artifact {
}
func init() { artifactsF[Gawk] = Toolchain.newGawk }
func (t Toolchain) newGrep() pkg.Artifact {
const (
version = "3.12"
checksum = "qMB4RjaPNRRYsxix6YOrjE8gyAT1zVSTy4nW4wKW9fqa0CHYAuWgPwDTirENzm_1"
)
return t.NewViaMake("grep", version, pkg.NewHTTPGetTar(
nil, "https://ftpmirror.gnu.org/gnu/grep/grep-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), &MakeAttr{
Writable: true,
ScriptEarly: `
cd /usr/src/grep
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
test_disable '#!/bin/sh' gnulib-tests/test-c32ispunct.sh
test_disable 'int main(){return 0;}' gnulib-tests/test-c32ispunct.c
`,
},
t.Load(Diffutils),
)
}
func init() { artifactsF[Grep] = Toolchain.newGrep }
func (t Toolchain) newFindutils() pkg.Artifact {
const (
version = "4.10.0"
@@ -276,3 +350,274 @@ echo 'int main(){return 0;}' > tests/xargs/test-sigusr.c
)
}
func init() { artifactsF[Findutils] = Toolchain.newFindutils }
func (t Toolchain) newBinutils() pkg.Artifact {
const (
version = "2.45"
checksum = "hlLtqqHDmzAT2OQVHaKEd_io2DGFvJkaeS-igBuK8bRRir7LUKGHgHYNkDVKaHTT"
)
return t.NewViaMake("binutils", version, pkg.NewHTTPGetTar(
nil, "https://ftpmirror.gnu.org/gnu/binutils/binutils-"+version+".tar.bz2",
mustDecode(checksum),
pkg.TarBzip2,
), &MakeAttr{
ScriptConfigured: `
make "-j$(nproc)"
`,
},
t.Load(Bash),
)
}
func init() { artifactsF[Binutils] = Toolchain.newBinutils }
func (t Toolchain) newGMP() pkg.Artifact {
const (
version = "6.3.0"
checksum = "yrgbgEDWKDdMWVHh7gPbVl56-sRtVVhfvv0M_LX7xMUUk_mvZ1QOJEAnt7g4i3k5"
)
return t.NewViaMake("gmp", version, pkg.NewHTTPGetTar(
nil, "https://gcc.gnu.org/pub/gcc/infrastructure/"+
"gmp-"+version+".tar.bz2",
mustDecode(checksum),
pkg.TarBzip2,
), &MakeAttr{
ScriptConfigured: `
make "-j$(nproc)"
`,
},
t.Load(M4),
)
}
func init() { artifactsF[GMP] = Toolchain.newGMP }
func (t Toolchain) newMPFR() pkg.Artifact {
const (
version = "4.2.2"
checksum = "wN3gx0zfIuCn9r3VAn_9bmfvAYILwrRfgBjYSD1IjLqyLrLojNN5vKyQuTE9kA-B"
)
return t.NewViaMake("mpfr", version, pkg.NewHTTPGetTar(
nil, "https://gcc.gnu.org/pub/gcc/infrastructure/"+
"mpfr-"+version+".tar.bz2",
mustDecode(checksum),
pkg.TarBzip2,
), nil,
t.Load(GMP),
)
}
func init() { artifactsF[MPFR] = Toolchain.newMPFR }
func (t Toolchain) newMPC() pkg.Artifact {
const (
version = "1.3.1"
checksum = "o8r8K9R4x7PuRx0-JE3-bC5jZQrtxGV2nkB773aqJ3uaxOiBDCID1gKjPaaDxX4V"
)
return t.NewViaMake("mpc", version, pkg.NewHTTPGetTar(
nil, "https://gcc.gnu.org/pub/gcc/infrastructure/"+
"mpc-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), nil,
t.Load(GMP),
t.Load(MPFR),
)
}
func init() { artifactsF[MPC] = Toolchain.newMPC }
func (t Toolchain) newGCC() pkg.Artifact {
const (
version = "15.2.0"
checksum = "TXJ5WrbXlGLzy1swghQTr4qxgDCyIZFgJry51XEPTBZ8QYbVmFeB4lZbSMtPJ-a1"
)
return t.NewViaMake("gcc", version, t.NewPatchedSource(
"gcc", version,
pkg.NewHTTPGetTar(
nil, "https://ftp.tsukuba.wide.ad.jp/software/gcc/releases/"+
"gcc-"+version+"/gcc-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), true, [2]string{"musl-off64_t-loff_t", `diff --git a/libgo/sysinfo.c b/libgo/sysinfo.c
index 180f5c31d74..44d7ea73f7d 100644
--- a/libgo/sysinfo.c
+++ b/libgo/sysinfo.c
@@ -365,11 +365,7 @@ enum {
typedef loff_t libgo_loff_t_type;
#endif
-#if defined(HAVE_OFF64_T)
-typedef off64_t libgo_off_t_type;
-#else
typedef off_t libgo_off_t_type;
-#endif
// The following section introduces explicit references to types and
// constants of interest to support bootstrapping libgo using a
`}, [2]string{"musl-legacy-lfs", `diff --git a/libgo/go/internal/syscall/unix/at_largefile.go b/libgo/go/internal/syscall/unix/at_largefile.go
index 82e0dcfd074..16151ecad1b 100644
--- a/libgo/go/internal/syscall/unix/at_largefile.go
+++ b/libgo/go/internal/syscall/unix/at_largefile.go
@@ -10,5 +10,5 @@ import (
"syscall"
)
-//extern fstatat64
+//extern fstatat
func fstatat(int32, *byte, *syscall.Stat_t, int32) int32
diff --git a/libgo/go/os/dir_largefile.go b/libgo/go/os/dir_largefile.go
index 1fc5ee0771f..0c6dffe1a75 100644
--- a/libgo/go/os/dir_largefile.go
+++ b/libgo/go/os/dir_largefile.go
@@ -11,5 +11,5 @@ package os
import "syscall"
-//extern readdir64
+//extern readdir
func libc_readdir(*syscall.DIR) *syscall.Dirent
diff --git a/libgo/go/syscall/libcall_glibc.go b/libgo/go/syscall/libcall_glibc.go
index 5c1ec483c75..5a1245ed44b 100644
--- a/libgo/go/syscall/libcall_glibc.go
+++ b/libgo/go/syscall/libcall_glibc.go
@@ -114,7 +114,7 @@ func Pipe2(p []int, flags int) (err error) {
}
//sys sendfile(outfd int, infd int, offset *Offset_t, count int) (written int, err error)
-//sendfile64(outfd _C_int, infd _C_int, offset *Offset_t, count Size_t) Ssize_t
+//sendfile(outfd _C_int, infd _C_int, offset *Offset_t, count Size_t) Ssize_t
func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
if race.Enabled {
diff --git a/libgo/go/syscall/libcall_linux.go b/libgo/go/syscall/libcall_linux.go
index 03ca7261b59..ad21fd0b3ac 100644
--- a/libgo/go/syscall/libcall_linux.go
+++ b/libgo/go/syscall/libcall_linux.go
@@ -158,7 +158,7 @@ func Reboot(cmd int) (err error) {
//adjtimex(buf *Timex) _C_int
//sys Fstatfs(fd int, buf *Statfs_t) (err error)
-//fstatfs64(fd _C_int, buf *Statfs_t) _C_int
+//fstatfs(fd _C_int, buf *Statfs_t) _C_int
func Gettid() (tid int) {
r1, _, _ := Syscall(SYS_GETTID, 0, 0, 0)
@@ -245,7 +245,7 @@ func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n i
}
//sys Statfs(path string, buf *Statfs_t) (err error)
-//statfs64(path *byte, buf *Statfs_t) _C_int
+//statfs(path *byte, buf *Statfs_t) _C_int
//sysnb Sysinfo(info *Sysinfo_t) (err error)
//sysinfo(info *Sysinfo_t) _C_int
diff --git a/libgo/go/syscall/libcall_posix_largefile.go b/libgo/go/syscall/libcall_posix_largefile.go
index f90055bb29a..334212f0af1 100644
--- a/libgo/go/syscall/libcall_posix_largefile.go
+++ b/libgo/go/syscall/libcall_posix_largefile.go
@@ -10,40 +10,40 @@
package syscall
//sys Creat(path string, mode uint32) (fd int, err error)
-//creat64(path *byte, mode Mode_t) _C_int
+//creat(path *byte, mode Mode_t) _C_int
//sys Fstat(fd int, stat *Stat_t) (err error)
-//fstat64(fd _C_int, stat *Stat_t) _C_int
+//fstat(fd _C_int, stat *Stat_t) _C_int
//sys Ftruncate(fd int, length int64) (err error)
-//ftruncate64(fd _C_int, length Offset_t) _C_int
+//ftruncate(fd _C_int, length Offset_t) _C_int
//sysnb Getrlimit(resource int, rlim *Rlimit) (err error)
-//getrlimit64(resource _C_int, rlim *Rlimit) _C_int
+//getrlimit(resource _C_int, rlim *Rlimit) _C_int
//sys Lstat(path string, stat *Stat_t) (err error)
-//lstat64(path *byte, stat *Stat_t) _C_int
+//lstat(path *byte, stat *Stat_t) _C_int
//sys mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error)
-//mmap64(addr *byte, length Size_t, prot _C_int, flags _C_int, fd _C_int, offset Offset_t) *byte
+//mmap(addr *byte, length Size_t, prot _C_int, flags _C_int, fd _C_int, offset Offset_t) *byte
//sys Open(path string, mode int, perm uint32) (fd int, err error)
-//__go_open64(path *byte, mode _C_int, perm Mode_t) _C_int
+//__go_open(path *byte, mode _C_int, perm Mode_t) _C_int
//sys Pread(fd int, p []byte, offset int64) (n int, err error)
-//pread64(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
+//pread(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
//sys Pwrite(fd int, p []byte, offset int64) (n int, err error)
-//pwrite64(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
+//pwrite(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
//sys Seek(fd int, offset int64, whence int) (off int64, err error)
-//lseek64(fd _C_int, offset Offset_t, whence _C_int) Offset_t
+//lseek(fd _C_int, offset Offset_t, whence _C_int) Offset_t
//sysnb Setrlimit(resource int, rlim *Rlimit) (err error)
-//setrlimit64(resource int, rlim *Rlimit) _C_int
+//setrlimit(resource int, rlim *Rlimit) _C_int
//sys Stat(path string, stat *Stat_t) (err error)
-//stat64(path *byte, stat *Stat_t) _C_int
+//stat(path *byte, stat *Stat_t) _C_int
//sys Truncate(path string, length int64) (err error)
-//truncate64(path *byte, length Offset_t) _C_int
+//truncate(path *byte, length Offset_t) _C_int
diff --git a/libgo/runtime/go-varargs.c b/libgo/runtime/go-varargs.c
index f84860891e6..7efc9615985 100644
--- a/libgo/runtime/go-varargs.c
+++ b/libgo/runtime/go-varargs.c
@@ -84,7 +84,7 @@ __go_ioctl_ptr (int d, int request, void *arg)
int
__go_open64 (char *path, int mode, mode_t perm)
{
- return open64 (path, mode, perm);
+ return open (path, mode, perm);
}
#endif
`}), &MakeAttr{
ScriptEarly: `
ln -s system/lib /
ln -s system/lib /work/
`,
Configure: [][2]string{
{"disable-multilib"},
{"with-multilib-list", `""`},
{"enable-default-pie"},
{"disable-nls"},
{"with-gnu-as"},
{"with-gnu-ld"},
{"with-system-zlib"},
{"enable-languages", "c,c++,go"},
{"with-native-system-header-dir", "/system/include"},
},
Make: []string{
"BOOT_CFLAGS='-O2 -g'",
"bootstrap",
},
// This toolchain is hacked to pieces, it is not expected to ever work
// well in its current state. That does not matter as long as the
// toolchain it produces passes its own test suite.
SkipCheck: true,
// GCC spends most of its time in its many configure scripts, however
// it also saturates the CPU for a consequential amount of time.
Flag: TExclusive,
},
t.Load(Binutils),
t.Load(GMP),
t.Load(MPFR),
t.Load(MPC),
t.Load(Zlib),
t.Load(Libucontext),
t.Load(KernelHeaders),
)
}
func init() { artifactsF[gcc] = Toolchain.newGCC }

View File

@@ -15,15 +15,12 @@ func (t Toolchain) newGoBootstrap() pkg.Artifact {
}, nil, []string{
"CGO_ENABLED=0",
}, `
mkdir -p /var/tmp
cp -r /usr/src/go /work
cd /work/go/src
mkdir -p /var/tmp/ /work/system/
cp -r /usr/src/go /work/system/
cd /work/system/go/src
chmod -R +w ..
./make.bash
cd /work/
mkdir system/
mv go/ system/
`, pkg.Path(AbsUsrSrc.Append("go"), false, pkg.NewHTTPGetTar(
nil, "https://dl.google.com/go/go1.4-bootstrap-20171003.tar.gz",
mustDecode(checksum),
@@ -66,18 +63,54 @@ ln -s \
}
func (t Toolchain) newGoLatest() pkg.Artifact {
var (
bootstrapEnv []string
bootstrapExtra []pkg.Artifact
finalEnv []string
)
switch runtime.GOARCH {
case "amd64":
bootstrapExtra = append(bootstrapExtra, t.newGoBootstrap())
case "arm64":
bootstrapEnv = append(bootstrapEnv,
"GOROOT_BOOTSTRAP=/system",
)
bootstrapExtra = append(bootstrapExtra,
t.Load(Binutils),
t.Load(GMP),
t.Load(MPFR),
t.Load(MPC),
t.Load(Zlib),
t.Load(Libucontext),
t.Load(gcc),
)
finalEnv = append(finalEnv, "CGO_ENABLED=0")
default:
panic("unsupported target " + runtime.GOARCH)
}
go119 := t.newGo(
"1.19",
"9_e0aFHsIkVxWVGsp9T2RvvjOc3p4n9o9S8tkNe9Cvgzk_zI2FhRQB7ioQkeAAro",
[]string{"CGO_ENABLED=0"}, `
append(bootstrapEnv, "CGO_ENABLED=0"), `
rm \
crypto/tls/handshake_client_test.go \
cmd/pprof/pprof_test.go \
os/os_unix_test.go
sed -i \
's/os\.Getenv("GCCGO")$/"nonexistent"/' \
go/internal/gccgoimporter/importer_test.go
echo \
'type syscallDescriptor = int' >> \
os/rawconn_test.go
`, t.newGoBootstrap(),
)
`, bootstrapExtra...)
go121 := t.newGo(
"1.21.13",
@@ -100,7 +133,7 @@ echo \
go123 := t.newGo(
"1.23.12",
"wcI32bl1tkqbgcelGtGWPI4RtlEddd-PTd76Eb-k7nXA5LbE9yTNdIL9QSOOxMOs",
nil, `
[]string{"CGO_ENABLED=0"}, `
sed -i \
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
cmd/link/internal/`+runtime.GOARCH+`/obj.go
@@ -108,9 +141,9 @@ sed -i \
)
go125 := t.newGo(
"1.25.6",
"x0z430qoDvQbbw_fftjW0rh_GSoh0VJhPzttWk_0hj9yz9AKOjuwRMupF_Q0dbt7",
nil, `
"1.25.7",
"fyylHdBVRUobnBjYj3NKBaYPUw3kGmo2mEELiZonOYurPfbarNU1x77B99Fjut7Q",
finalEnv, `
sed -i \
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
cmd/link/internal/`+runtime.GOARCH+`/obj.go

View File

@@ -49,7 +49,7 @@ HAKUREI_VERSION='v`+version+`'
"v"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), true, [2]string{"dist", `From 67e453f5c4de915de23ecbe5980e595758f0f2fb Mon Sep 17 00:00:00 2001
), true, [2]string{"dist-00-tests", `From 67e453f5c4de915de23ecbe5980e595758f0f2fb Mon Sep 17 00:00:00 2001
From: Ophestra <cat@gensokyo.uk>
Date: Tue, 27 Jan 2026 06:49:48 +0900
Subject: [PATCH] dist: run tests
@@ -236,7 +236,20 @@ index d737a18..98713cb 100644
+
if !cur.EqualWithIgnore(mnt[i], "\x00") {
fail = true
log.Printf("[FAIL] %s", cur)`}),
log.Printf("[FAIL] %s", cur)`}, [2]string{"dist-01-tarball-name", `diff --git a/dist/release.sh b/dist/release.sh
index 0ba9104..2990ee1 100755
--- a/dist/release.sh
+++ b/dist/release.sh
@@ -1,7 +1,7 @@
#!/bin/sh -e
cd "$(dirname -- "$0")/.."
VERSION="${HAKUREI_VERSION:-untagged}"
-pname="hakurei-${VERSION}"
+pname="hakurei-${VERSION}-$(go env GOARCH)"
out="${DESTDIR:-dist}/${pname}"
echo '# Preparing distribution files.'
`}),
), pkg.Path(AbsUsrSrc.Append("hostname", "main.go"), false, pkg.NewFile(
"hostname.go",
[]byte(`

28
internal/rosa/libpsl.go Normal file
View File

@@ -0,0 +1,28 @@
package rosa
import "hakurei.app/internal/pkg"
func (t Toolchain) newLibpsl() pkg.Artifact {
const (
version = "0.21.5"
checksum = "XjfxSzh7peG2Vg4vJlL8z4JZJLcXqbuP6pLWkrGCmRxlnYUFTKNBqWGHCxEOlCad"
)
return t.NewViaMake("libpsl", version, pkg.NewHTTPGetTar(
nil, "https://github.com/rockdaboot/libpsl/releases/download/"+
version+"/libpsl-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), &MakeAttr{
Writable: true,
ScriptEarly: `
cd /usr/src/libpsl
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
test_disable 'int main(){return 0;}' tests/test-is-public-builtin.c
`,
},
t.Load(Python),
)
}
func init() { artifactsF[Libpsl] = Toolchain.newLibpsl }

View File

@@ -0,0 +1,40 @@
package rosa
import "hakurei.app/internal/pkg"
func (t Toolchain) newLibucontext() pkg.Artifact {
const (
version = "1.5"
checksum = "Ggk7FMmDNBdCx1Z9PcNWWW6LSpjGYssn2vU0GK5BLXJYw7ZxZbA2m_eSgT9TFnIG"
)
return t.New("libucontext", 0, []pkg.Artifact{
t.Load(Make),
}, nil, []string{
"ARCH=" + linuxArch(),
}, `
cd /usr/src/libucontext
make check
make DESTDIR=/work install
`, pkg.Path(AbsUsrSrc.Append("libucontext"), true,
t.NewPatchedSource("libucontext", version, pkg.NewHTTPGetTar(
nil, "https://github.com/kaniini/libucontext/archive/refs/tags/"+
"libucontext-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), true, [2]string{"rosa-prefix", `diff --git a/Makefile b/Makefile
index c80e574..4a8c1d3 100644
--- a/Makefile
+++ b/Makefile
@@ -17,7 +17,7 @@ ifeq ($(ARCH),$(filter $(ARCH),arm64))
override ARCH = aarch64
endif
-prefix = /usr
+prefix = /system
libdir = ${prefix}/lib
shared_libdir = ${libdir}
static_libdir = ${libdir}
`}),
))
}
func init() { artifactsF[Libucontext] = Toolchain.newLibucontext }

View File

@@ -22,12 +22,18 @@ type llvmAttr struct {
cmake [][2]string
// Override CMakeAttr.Append.
append []string
// Concatenated with default dependencies for CMakeAttr.Extra.
// Concatenated with default dependencies for Toolchain.NewViaCMake.
extra []pkg.Artifact
// Passed through to CMakeAttr.Paths.
paths []pkg.ExecPath
// Passed through to CMakeAttr.ScriptConfigured.
scriptConfigured string
// Concatenated with default fixup for CMakeAttr.Script.
script string
// Passed through to CMakeAttr.Prefix.
prefix *check.Absolute
// Passed through to CMakeAttr.Writable.
writable bool
// Patch name and body pairs.
patches [][2]string
@@ -186,9 +192,13 @@ cp -r /system/include /usr/include && rm -rf /system/include
"ROSA_LLVM_PROJECTS=" + strings.Join(projects, ";"),
"ROSA_LLVM_RUNTIMES=" + strings.Join(runtimes, ";"),
}, attr.env),
ScriptEarly: scriptEarly, Script: script + attr.script,
ScriptEarly: scriptEarly,
ScriptConfigured: attr.scriptConfigured,
Script: script + attr.script,
Writable: attr.writable,
Flag: TExclusive,
Paths: attr.paths,
Flag: TExclusive,
}, stage3Concat(t, attr.extra,
t.Load(Libffi),
t.Load(Python),
@@ -312,7 +322,6 @@ ln -s \
musl,
compilerRT,
runtimes,
t.Load(Git),
},
script: `
ln -s clang /work/system/bin/cc
@@ -322,6 +331,40 @@ ninja check-all
`,
patches: [][2]string{
{"add-rosa-vendor", `diff --git a/llvm/include/llvm/TargetParser/Triple.h b/llvm/include/llvm/TargetParser/Triple.h
index 657f4230379e..12c305756184 100644
--- a/llvm/include/llvm/TargetParser/Triple.h
+++ b/llvm/include/llvm/TargetParser/Triple.h
@@ -185,6 +185,7 @@ public:
Apple,
PC,
+ Rosa,
SCEI,
Freescale,
IBM,
diff --git a/llvm/lib/TargetParser/Triple.cpp b/llvm/lib/TargetParser/Triple.cpp
index 0584c941d2e6..e4d6ef963cc7 100644
--- a/llvm/lib/TargetParser/Triple.cpp
+++ b/llvm/lib/TargetParser/Triple.cpp
@@ -269,6 +269,7 @@ StringRef Triple::getVendorTypeName(VendorType Kind) {
case NVIDIA: return "nvidia";
case OpenEmbedded: return "oe";
case PC: return "pc";
+ case Rosa: return "rosa";
case SCEI: return "scei";
case SUSE: return "suse";
}
@@ -669,6 +670,7 @@ static Triple::VendorType parseVendor(StringRef VendorName) {
.Case("suse", Triple::SUSE)
.Case("oe", Triple::OpenEmbedded)
.Case("intel", Triple::Intel)
+ .Case("rosa", Triple::Rosa)
.Default(Triple::UnknownVendor);
}
`},
{"xfail-broken-tests", `diff --git a/clang/test/Modules/timestamps.c b/clang/test/Modules/timestamps.c
index 50fdce630255..4b4465a75617 100644
--- a/clang/test/Modules/timestamps.c
@@ -361,66 +404,85 @@ index cdbf21fb9026..dd052858700d 100644
addSystemInclude(DriverArgs, CC1Args, ResourceDirInclude);
`},
{"path-system-libraries", `diff --git a/clang/lib/Driver/ToolChains/CommonArgs.cpp b/clang/lib/Driver/ToolChains/CommonArgs.cpp
index 8d3775de9be5..1e126e2d6f24 100644
--- a/clang/lib/Driver/ToolChains/CommonArgs.cpp
+++ b/clang/lib/Driver/ToolChains/CommonArgs.cpp
@@ -463,6 +463,15 @@ void tools::AddLinkerInputs(const ToolChain &TC, const InputInfoList &Inputs,
if (!TC.isCrossCompiling())
addDirectoryList(Args, CmdArgs, "-L", "LIBRARY_PATH");
+ const std::string RosaSuffix = "-rosa-linux-musl";
+ if (TC.getTripleString().size() > RosaSuffix.size() &&
+ std::equal(RosaSuffix.rbegin(), RosaSuffix.rend(), TC.getTripleString().rbegin())) {
+ CmdArgs.push_back("-rpath");
+ CmdArgs.push_back("/system/lib");
+ CmdArgs.push_back("-rpath");
+ CmdArgs.push_back(("/system/lib/" + TC.getTripleString()).c_str());
+ }
+
for (const auto &II : Inputs) {
// If the current tool chain refers to an OpenMP offloading host, we
// should ignore inputs that refer to OpenMP offloading devices -
diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
index 8ac8d4eb9181..795995bb53cb 100644
{"path-system-libraries", `diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
index 8ac8d4eb9181..f4d1347ab64d 100644
--- a/clang/lib/Driver/ToolChains/Linux.cpp
+++ b/clang/lib/Driver/ToolChains/Linux.cpp
@@ -324,6 +324,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
@@ -282,6 +282,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
const bool IsHexagon = Arch == llvm::Triple::hexagon;
const bool IsRISCV = Triple.isRISCV();
const bool IsCSKY = Triple.isCSKY();
+ const bool IsRosa = Triple.getVendor() == llvm::Triple::Rosa;
if (IsCSKY && !SelectedMultilibs.empty())
SysRoot = SysRoot + SelectedMultilibs.back().osSuffix();
@@ -318,12 +319,23 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
const std::string OSLibDir = std::string(getOSLibDir(Triple, Args));
const std::string MultiarchTriple = getMultiarchTriple(D, Triple, SysRoot);
+ if (IsRosa) {
+ ExtraOpts.push_back("-rpath");
+ ExtraOpts.push_back("/system/lib");
+ ExtraOpts.push_back("-rpath");
+ ExtraOpts.push_back(concat("/system/lib", MultiarchTriple));
+ }
+
// mips32: Debian multilib, we use /libo32, while in other case, /lib is
// used. We need add both libo32 and /lib.
if (Arch == llvm::Triple::mips || Arch == llvm::Triple::mipsel) {
Generic_GCC::AddMultilibPaths(D, SysRoot, "libo32", MultiarchTriple, Paths);
addPathIfExists(D, concat(SysRoot, "/libo32"), Paths);
addPathIfExists(D, concat(SysRoot, "/usr/libo32"), Paths);
+ addPathIfExists(D, concat(SysRoot, "/system/libo32"), Paths);
- addPathIfExists(D, concat(SysRoot, "/libo32"), Paths);
- addPathIfExists(D, concat(SysRoot, "/usr/libo32"), Paths);
+ if (!IsRosa) {
+ addPathIfExists(D, concat(SysRoot, "/libo32"), Paths);
+ addPathIfExists(D, concat(SysRoot, "/usr/libo32"), Paths);
+ } else {
+ addPathIfExists(D, concat(SysRoot, "/system/libo32"), Paths);
+ }
}
Generic_GCC::AddMultilibPaths(D, SysRoot, OSLibDir, MultiarchTriple, Paths);
@@ -343,16 +344,20 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
@@ -341,18 +353,30 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
Paths);
}
addPathIfExists(D, concat(SysRoot, "/usr/lib", MultiarchTriple), Paths);
addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir), Paths);
+ addPathIfExists(D, concat(SysRoot, "/system/lib", MultiarchTriple), Paths);
+ addPathIfExists(D, concat(SysRoot, "/system", OSLibDir), Paths);
- addPathIfExists(D, concat(SysRoot, "/usr/lib", MultiarchTriple), Paths);
- addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir), Paths);
+ if (!IsRosa) {
+ addPathIfExists(D, concat(SysRoot, "/usr/lib", MultiarchTriple), Paths);
+ addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir), Paths);
+ } else {
+ addPathIfExists(D, concat(SysRoot, "/system/lib", MultiarchTriple), Paths);
+ addPathIfExists(D, concat(SysRoot, "/system", OSLibDir), Paths);
+ }
if (IsRISCV) {
StringRef ABIName = tools::riscv::getRISCVABI(Args, Triple);
addPathIfExists(D, concat(SysRoot, "/", OSLibDir, ABIName), Paths);
addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir, ABIName), Paths);
+ addPathIfExists(D, concat(SysRoot, "/system", OSLibDir, ABIName), Paths);
- addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir, ABIName), Paths);
+ if (!IsRosa)
+ addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir, ABIName), Paths);
+ else
+ addPathIfExists(D, concat(SysRoot, "/system", OSLibDir, ABIName), Paths);
}
Generic_GCC::AddMultiarchPaths(D, SysRoot, OSLibDir, Paths);
addPathIfExists(D, concat(SysRoot, "/lib"), Paths);
addPathIfExists(D, concat(SysRoot, "/usr/lib"), Paths);
+ addPathIfExists(D, concat(SysRoot, "/system/lib"), Paths);
- addPathIfExists(D, concat(SysRoot, "/lib"), Paths);
- addPathIfExists(D, concat(SysRoot, "/usr/lib"), Paths);
+ if (!IsRosa) {
+ addPathIfExists(D, concat(SysRoot, "/lib"), Paths);
+ addPathIfExists(D, concat(SysRoot, "/usr/lib"), Paths);
+ } else {
+ addPathIfExists(D, concat(SysRoot, "/system/lib"), Paths);
+ }
}
ToolChain::RuntimeLibType Linux::GetDefaultRuntimeLibType() const {
@@ -457,6 +462,11 @@ std::string Linux::getDynamicLinker(const ArgList &Args) const {
@@ -457,6 +481,9 @@ std::string Linux::getDynamicLinker(const ArgList &Args) const {
return Triple.isArch64Bit() ? "/system/bin/linker64" : "/system/bin/linker";
}
if (Triple.isMusl()) {
+ const std::string RosaSuffix = "-rosa-linux-musl";
+ if (Triple.str().size() > RosaSuffix.size() &&
+ std::equal(RosaSuffix.rbegin(), RosaSuffix.rend(), Triple.str().rbegin()))
+ if (Triple.getVendor() == llvm::Triple::Rosa)
+ return "/system/bin/linker";
+
std::string ArchName;

View File

@@ -41,9 +41,11 @@ type MakeAttr struct {
// Additional environment variables.
Env []string
// Runs before cmake.
// Runs before configure.
ScriptEarly string
// Runs after cmake.
// Runs after configure.
ScriptConfigured string
// Runs after install.
Script string
// Remain in working directory set up during ScriptEarly.
@@ -151,7 +153,7 @@ func (t Toolchain) NewViaMake(
finalExtra...,
), nil, attr.Env, scriptEarly+`
/usr/src/`+name+`/configure \
--prefix=/system`+buildFlag+configureFlags+`
--prefix=/system`+buildFlag+configureFlags+attr.ScriptConfigured+`
make "-j$(nproc)"`+strings.Join(makeTargets, " ")+`
make DESTDIR=/work install
`+attr.Script, pkg.Path(AbsUsrSrc.Append(

36
internal/rosa/openssl.go Normal file
View File

@@ -0,0 +1,36 @@
package rosa
import "hakurei.app/internal/pkg"
func (t Toolchain) newOpenSSL() pkg.Artifact {
const (
version = "3.5.5"
checksum = "I2Hp1LxcTR8j4G6LFEQMVy6EJH-Na1byI9Ti-ThBot6EMLNRnjGXGq-WXrim3Fkz"
)
return t.New("openssl-"+version, 0, []pkg.Artifact{
t.Load(Perl),
t.Load(Make),
t.Load(Zlib),
t.Load(KernelHeaders),
}, nil, []string{
"CC=cc",
}, `
cd "$(mktemp -d)"
/usr/src/openssl/Configure \
--prefix=/system \
--libdir=lib \
--openssldir=etc/ssl
make \
"-j$(nproc)" \
HARNESS_JOBS=256 \
test
make DESTDIR=/work install
`, pkg.Path(AbsUsrSrc.Append("openssl"), false, pkg.NewHTTPGetTar(
nil, "https://github.com/openssl/openssl/releases/download/"+
"openssl-"+version+"/openssl-"+version+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
)))
}
func init() { artifactsF[OpenSSL] = Toolchain.newOpenSSL }

View File

@@ -210,14 +210,12 @@ func (t Toolchain) New(
case toolchainStage3:
name += "-boot"
var version, checksum string
var seed string
switch runtime.GOARCH {
case "amd64":
version = "20260111T160052Z"
checksum = "c5_FwMnRN8RZpTdBLGYkL4RR8ampdaZN2JbkgrFLe8-QHQAVQy08APVvIL6eT7KW"
seed = "c5_FwMnRN8RZpTdBLGYkL4RR8ampdaZN2JbkgrFLe8-QHQAVQy08APVvIL6eT7KW"
case "arm64":
version = "20260125T234618Z"
checksum = "79uRbRI44PyknQQ9RlFUQrwqplup7vImiIk6klefL8TN-fT42TXMS_v4XszwexCb"
seed = "79uRbRI44PyknQQ9RlFUQrwqplup7vImiIk6klefL8TN-fT42TXMS_v4XszwexCb"
default:
panic("unsupported target " + runtime.GOARCH)
@@ -226,16 +224,14 @@ func (t Toolchain) New(
args[0] = "bash"
support = slices.Concat([]pkg.Artifact{
cureEtc{},
toolchainBusybox.New("stage3-"+version, 0, nil, nil, nil, `
toolchainBusybox.New("stage3", 0, nil, nil, nil, `
tar -C /work -xf /usr/src/stage3.tar.xz
rm -rf /work/dev/ /work/proc/
ln -vs ../usr/bin /work/bin
`, pkg.Path(AbsUsrSrc.Append("stage3.tar.xz"), false,
pkg.NewHTTPGet(
nil, "https://distfiles.gentoo.org/releases/"+
runtime.GOARCH+"/autobuilds/"+version+
"/stage3-"+runtime.GOARCH+"-musl-llvm-"+version+".tar.xz",
mustDecode(checksum),
nil, "https://basement.gensokyo.uk/seed/"+seed,
mustDecode(seed),
),
)),
}, extra)

View File

@@ -13,17 +13,18 @@ func (t Toolchain) newNSS() pkg.Artifact {
checksum0 = "25x2uJeQnOHIiq_zj17b4sYqKgeoU8-IsySUptoPcdHZ52PohFZfGuIisBreWzx0"
)
return t.New("nss-"+version, 0, []pkg.Artifact{
t.Load(Make),
t.Load(Perl),
t.Load(Python),
t.Load(Unzip),
t.Load(Make),
t.Load(Gawk),
t.Load(Coreutils),
t.Load(Zlib),
t.Load(KernelHeaders),
}, nil, nil, `
unzip /usr/src/nspr.zip -d /usr/src
mv '/usr/src/nspr-NSPR_`+version0+`_RTM' /usr/src/nspr
chmod -R +w /usr/src/nss
cd /usr/src/nss
make \
@@ -38,11 +39,13 @@ cp -r \
/usr/src/dist/. \
lib/ckfw/builtins/certdata.txt \
/work/system/nss
`, pkg.Path(AbsUsrSrc.Append("nss"), true, pkg.NewHTTPGetTar(
nil, "https://github.com/nss-dev/nss/archive/refs/tags/"+
"NSS_"+version+"_RTM.tar.gz",
mustDecode(checksum),
pkg.TarGzip,
`, pkg.Path(AbsUsrSrc.Append("nss"), true, t.NewPatchedSource(
"nss", version, pkg.NewHTTPGetTar(
nil, "https://github.com/nss-dev/nss/archive/refs/tags/"+
"NSS_"+version+"_RTM.tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), false,
)), pkg.Path(AbsUsrSrc.Append("nspr.zip"), false, pkg.NewHTTPGet(
nil, "https://hg-edge.mozilla.org/projects/nspr/archive/"+
"NSPR_"+version0+"_RTM.zip",
@@ -62,19 +65,20 @@ func init() { artifactsF[buildcatrust] = Toolchain.newBuildCATrust }
func (t Toolchain) newNSSCACert() pkg.Artifact {
return t.New("nss-cacert", 0, []pkg.Artifact{
t.Load(Bash),
t.Load(Python),
t.Load(NSS),
t.Load(buildcatrust),
}, nil, nil, `
mkdir -p /work/etc/ssl/{certs/unbundled,certs/hashed,trust-source}
mkdir -p /work/system/etc/ssl/{certs/unbundled,certs/hashed,trust-source}
buildcatrust \
--certdata_input /system/nss/certdata.txt \
--ca_bundle_output /work/etc/ssl/certs/ca-bundle.crt \
--ca_standard_bundle_output /work/etc/ssl/certs/ca-no-trust-rules-bundle.crt \
--ca_unpacked_output /work/etc/ssl/certs/unbundled \
--ca_hashed_unpacked_output /work/etc/ssl/certs/hashed \
--p11kit_output /work/etc/ssl/trust-source/ca-bundle.trust.p11-kit
--ca_bundle_output /work/system/etc/ssl/certs/ca-bundle.crt \
--ca_standard_bundle_output /work/system/etc/ssl/certs/ca-no-trust-rules-bundle.crt \
--ca_unpacked_output /work/system/etc/ssl/certs/unbundled \
--ca_hashed_unpacked_output /work/system/etc/ssl/certs/hashed \
--p11kit_output /work/system/etc/ssl/trust-source/ca-bundle.trust.p11-kit
`)
}
func init() { artifactsF[NSSCACert] = Toolchain.newNSSCACert }

34
internal/rosa/unzip.go Normal file
View File

@@ -0,0 +1,34 @@
package rosa
import (
"strings"
"hakurei.app/internal/pkg"
)
func (t Toolchain) newUnzip() pkg.Artifact {
const (
version = "6.0"
checksum = "fcqjB1IOVRNJ16K5gTGEDt3zCJDVBc7EDSra9w3H93stqkNwH1vaPQs_QGOpQZu1"
)
return t.New("unzip-"+version, 0, []pkg.Artifact{
t.Load(Make),
t.Load(Coreutils),
}, nil, nil, `
cd /usr/src/unzip/
unix/configure
make -f unix/Makefile generic1
mkdir -p /work/system/bin/
mv unzip /work/system/bin/
`, pkg.Path(AbsUsrSrc.Append("unzip"), true, t.NewPatchedSource(
"unzip", version, pkg.NewHTTPGetTar(
nil, "https://downloads.sourceforge.net/project/infozip/"+
"UnZip%206.x%20%28latest%29/UnZip%20"+version+"/"+
"unzip"+strings.ReplaceAll(version, ".", "")+".tar.gz",
mustDecode(checksum),
pkg.TarGzip,
), false,
)))
}
func init() { artifactsF[Unzip] = Toolchain.newUnzip }

View File

@@ -2,6 +2,20 @@ package rosa
import "hakurei.app/internal/pkg"
func (t Toolchain) newUtilMacros() pkg.Artifact {
const (
version = "1.17"
checksum = "vYPO4Qq3B_WGcsBjG0-lfwZ6DZ7ayyrOLqfDrVOgTDcyLChuMGOAAVAa_UXLu5tD"
)
return t.NewViaMake("util-macros", version, pkg.NewHTTPGetTar(
nil, "https://www.x.org/releases/X11R7.7/src/util/"+
"util-macros-"+version+".tar.bz2",
mustDecode(checksum),
pkg.TarBzip2,
), nil)
}
func init() { artifactsF[utilMacros] = Toolchain.newUtilMacros }
func (t Toolchain) newXproto() pkg.Artifact {
const (
version = "7.0.23"
@@ -13,10 +27,21 @@ func (t Toolchain) newXproto() pkg.Artifact {
mustDecode(checksum),
pkg.TarBzip2,
), &MakeAttr{
// buggy configure script
Build: `""`,
Writable: true,
// ancient configure script
ScriptEarly: `
cd /usr/src/xproto
autoreconf -if
`,
},
t.Load(M4),
t.Load(Perl),
t.Load(Autoconf),
t.Load(Automake),
t.Load(PkgConfig),
t.Load(utilMacros),
)
}
func init() { artifactsF[Xproto] = Toolchain.newXproto }
@@ -32,15 +57,26 @@ func (t Toolchain) newLibXau() pkg.Artifact {
mustDecode(checksum),
pkg.TarBzip2,
), &MakeAttr{
Writable: true,
// ancient configure script
ScriptEarly: `
cd /usr/src/libXau
autoreconf -if
`,
Configure: [][2]string{
{"enable-static"},
},
// buggy configure script
Build: `""`,
},
t.Load(M4),
t.Load(Perl),
t.Load(Autoconf),
t.Load(Automake),
t.Load(Libtool),
t.Load(PkgConfig),
t.Load(utilMacros),
t.Load(Xproto),
)
}