46 Commits

Author SHA1 Message Date
Kat
954b60dbab TODO: consider writing tests for the test runner. 2026-04-19 02:35:47 +10:00
Kat
a06e622a84 TODO: actually write tests lol. 2026-04-19 02:35:47 +10:00
Kat
a2af3a0db0 cmd/pkgserver/ui_test: implement skipping from DSL 2026-04-19 02:35:47 +10:00
Kat
4c9d1c9d87 cmd/pkgserver/ui_test: add JSON reporter for go test integration 2026-04-19 02:35:47 +10:00
Kat
178cb4d2d2 cmd/pkgserver/ui_test: implement DSL and runner 2026-04-19 02:35:47 +10:00
Kat
cd3224e4af cmd/pkgserver/ui_test: add DOM reporter 2026-04-19 02:35:47 +10:00
Kat
bae2735a0d cmd/pkgserver/ui_test: add basic CLI reporter 2026-04-19 02:35:47 +10:00
mae
8a38b614c6 cmd/pkgserver: update 2026-04-18 11:32:08 -05:00
mae
3286fff076 cmd/pkgserver: fix gitignore 2026-04-18 11:30:57 -05:00
mae
fd1884a84b cmd/pkgserver: better no results handling 2026-04-18 11:30:57 -05:00
mae
fe6424bd6d cmd/pkgserver: better no results handling 2026-04-18 11:30:57 -05:00
mae
004ac511a9 cmd/pkgserver: finish search implementation 2026-04-18 11:30:57 -05:00
mae
ba17f9d4f3 cmd/pkgserver: remove get endpoint count field 2026-04-18 11:30:56 -05:00
mae
ea62f64b8f cmd/pkgserver: search endpoint 2026-04-18 11:30:56 -05:00
mae
86669363ac cmd/pkgserver: pagination bugfix 2026-04-18 11:30:56 -05:00
6f5b7964f4 cmd/pkgserver: guard sass/ts behind build tag
Packaging nodejs and ruby is an immense burden for the Rosa OS base system, and these files diff poorly.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
mae
a195c3760c cmd/pkgserver: add size 2026-04-18 11:30:56 -05:00
cfe52dce82 cmd/pkgserver: expose size and store pre-encoded ident
This change also handles SIGSEGV correctly in newStatusHandler, and makes serving status fully zero copy.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
8483d8a005 cmd/pkgserver: look up status by name once
This has far less overhead.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
5bc5aed024 cmd/pkgserver: refer to preset in index
This enables referencing back to internal/rosa through an entry obtained via the index.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
9465649d13 cmd/pkgserver: handle unversioned value
This omits the field for an unversioned artifact, and only does so once on startup.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
33c461aa67 cmd/pkgserver: determine disposition route in mux
This removes duplicate checks and uses the more sound check in mux.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
dee0204fc0 cmd/pkgserver: format get error messages
This improves source code readability on smaller displays.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
2f916ed0c0 cmd/pkgserver: constant string in pattern
This resolves patterns at compile time.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
55ce3a2f90 cmd/pkgserver: satisfy handler signature in method
This is somewhat cleaner.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
3f6a07ef59 cmd/pkgserver: log instead of write encoding error
This message is unlikely to be useful to the user, and output may be partially written at this point, causing the error to be even less intelligible.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
02941e7c23 cmd/pkgserver: appropriately mark test helpers
This improves usefulness of test log messages.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
b9601881b7 cmd/pkgserver: do not omit report field
Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
58596f0af5 cmd/pkgserver: gracefully shut down on signal
Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
02cde40289 cmd/pkgserver: specify full addr string in flag
This allows greater flexibility.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
5014534884 cmd/pkgserver: make report argument optional
This allows serving metadata only without a populated report. This also removes the out-of-bounds read on args when no arguments are passed.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
13cf99ced4 cmd/pkgserver: embed internal/rosa metadata
This change also cleans up and reduces some unnecessary copies.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
6bfb258fd0 cmd/pkgserver: do not assume default mux
This helps with testing.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
b649645189 cmd/pkgserver: create index without report
This is useful for testing, where report testdata is not available.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-04-18 11:30:56 -05:00
mae
3ddba4e21f cmd/pkgserver: add sort orders, change pagination rules 2026-04-18 11:30:56 -05:00
mae
40a906c6c2 cmd/pkgserver: add /status endpoint 2026-04-18 11:30:56 -05:00
mae
06894e2104 cmd/pkgserver: minimum viable frontend 2026-04-18 11:30:56 -05:00
mae
56f0392b86 cmd/pkgserver: api versioning 2026-04-18 11:30:56 -05:00
mae
e2315f6c1a cmd/pkgserver: add get endpoint 2026-04-18 11:30:56 -05:00
mae
e4aee49eb0 cmd/pkgserver: add count endpoint and restructure 2026-04-18 11:30:56 -05:00
mae
6c03cc8b8a cmd/pkgserver: add status endpoint 2026-04-18 11:30:56 -05:00
mae
59ade6a86b cmd/pkgserver: add createPackageIndex 2026-04-18 11:30:56 -05:00
mae
59ab493035 cmd/pkgserver: add command handler 2026-04-18 11:30:56 -05:00
mae
d80a3346e2 cmd/pkgserver: replace favicon 2026-04-18 11:30:56 -05:00
mae
327a34aacb cmd/pkgserver: pagination 2026-04-18 11:30:56 -05:00
mae
ea7c6b3b48 cmd/pkgserver: basic web ui 2026-04-18 11:30:56 -05:00
113 changed files with 2564 additions and 3223 deletions

5
.gitignore vendored
View File

@@ -7,8 +7,9 @@
# go generate
/cmd/hakurei/LICENSE
/cmd/mbf/internal/pkgserver/ui/static
/internal/pkg/internal/testtool/testtool
/cmd/pkgserver/ui/static/*.js
/cmd/pkgserver/ui_test/static
/internal/pkg/testdata/testtool
/internal/rosa/hakurei_current.tar.gz
# cmd/dist default destination

8
all.sh
View File

@@ -2,9 +2,5 @@
TOOLCHAIN_VERSION="$(go version)"
cd "$(dirname -- "$0")/"
echo "Building cmd/dist using ${TOOLCHAIN_VERSION}."
FLAGS=''
if test -n "$VERBOSE"; then
FLAGS="$FLAGS -v"
fi
go run $FLAGS --tags=dist ./cmd/dist
echo "# Building cmd/dist using ${TOOLCHAIN_VERSION}."
go run -v --tags=dist ./cmd/dist

View File

@@ -4,23 +4,15 @@ import "strings"
const (
// SpecialOverlayEscape is the escape string for overlay mount options.
//
// Deprecated: This is no longer used and will be removed in 0.5.
SpecialOverlayEscape = `\`
// SpecialOverlayOption is the separator string between overlay mount options.
//
// Deprecated: This is no longer used and will be removed in 0.5.
SpecialOverlayOption = ","
// SpecialOverlayPath is the separator string between overlay paths.
//
// Deprecated: This is no longer used and will be removed in 0.5.
SpecialOverlayPath = ":"
)
// EscapeOverlayDataSegment escapes a string for formatting into the data
// argument of an overlay mount system call.
//
// Deprecated: This is no longer used and will be removed in 0.5.
func EscapeOverlayDataSegment(s string) string {
if s == "" {
return ""

27
cmd/dist/main.go vendored
View File

@@ -42,18 +42,14 @@ func mustRun(ctx context.Context, name string, arg ...string) {
var comp []byte
func main() {
fmt.Println()
log.SetFlags(0)
log.SetPrefix("")
log.SetPrefix("# ")
verbose := os.Getenv("VERBOSE") != ""
version := getenv("HAKUREI_VERSION", "untagged")
prefix := getenv("PREFIX", "/usr")
destdir := getenv("DESTDIR", "dist")
if verbose {
log.Println()
}
if err := os.MkdirAll(destdir, 0755); err != nil {
log.Fatal(err)
}
@@ -80,17 +76,12 @@ func main() {
ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt)
defer cancel()
verboseFlag := "-v"
if !verbose {
verboseFlag = "-buildvcs=false"
}
log.Printf("Building hakurei for %s/%s.", runtime.GOOS, runtime.GOARCH)
log.Println("Building hakurei.")
mustRun(ctx, "go", "generate", "./...")
mustRun(
ctx, "go", "build",
"-trimpath",
verboseFlag, "-o", s,
"-v", "-o", s,
"-ldflags=-s -w "+
"-buildid= -linkmode external -extldflags=-static "+
"-X hakurei.app/internal/info.buildVersion="+version+" "+
@@ -99,17 +90,17 @@ func main() {
"-X main.hakureiPath="+prefix+"/bin/hakurei",
"./...",
)
log.Println()
fmt.Println()
log.Println("##### Testing Hakurei.")
log.Println("Testing Hakurei.")
mustRun(
ctx, "go", "test",
"-ldflags=-buildid= -linkmode external -extldflags=-static",
"./...",
)
log.Println()
fmt.Println()
log.Println("##### Creating distribution.")
log.Println("Creating distribution.")
const suffix = ".tar.gz"
distName := "hakurei-" + version + "-" + runtime.GOARCH
var f *os.File
@@ -130,7 +121,7 @@ func main() {
}()
h := sha512.New()
gw, _ := gzip.NewWriterLevel(io.MultiWriter(f, h), gzip.BestCompression)
gw := gzip.NewWriter(io.MultiWriter(f, h))
tw := tar.NewWriter(gw)
mustWriteHeader := func(name string, size int64, mode os.FileMode) {

View File

@@ -21,7 +21,6 @@ type cache struct {
cures, jobs int
hostAbstract, idle bool
verboseInit bool
base string
}
@@ -32,6 +31,9 @@ func (cache *cache) open() (err error) {
return os.ErrInvalid
}
if cache.base == "" {
cache.base = "cache"
}
var base *check.Absolute
if cache.base, err = filepath.Abs(cache.base); err != nil {
return
@@ -46,9 +48,6 @@ func (cache *cache) open() (err error) {
if cache.hostAbstract {
flags |= pkg.CHostAbstract
}
if !cache.verboseInit {
flags |= pkg.CSuppressInit
}
done := make(chan struct{})
defer close(done)

View File

@@ -99,9 +99,10 @@ func cancelIdent(
var ident pkg.ID
if _, err := io.ReadFull(conn, ident[:]); err != nil {
return nil, false, errors.Join(err, conn.Close())
} else if err = conn.Close(); err != nil {
return nil, false, err
}
ok := cache.Cancel(unique.Make(ident))
return &ident, ok, conn.Close()
return &ident, cache.Cancel(unique.Make(ident)), nil
}
// serve services connections from a [net.UnixListener].
@@ -193,11 +194,11 @@ func serve(
}
case specialAbort:
log.Println("aborting all pending cures")
cm.c.Abort()
if _err := conn.Close(); _err != nil {
log.Println(_err)
}
log.Println("aborting all pending cures")
cm.c.Abort()
}
return
@@ -305,7 +306,6 @@ func cancelRemote(
ctx context.Context,
addr *net.UnixAddr,
a pkg.Artifact,
wait bool,
) error {
done, conn, err := dial(ctx, addr)
if err != nil {
@@ -324,19 +324,13 @@ func cancelRemote(
} else if n != len(id) {
return errors.Join(io.ErrShortWrite, conn.Close())
}
if wait {
if _, err = conn.Read(make([]byte, 1)); err == io.EOF {
err = nil
}
}
return errors.Join(err, conn.Close())
return conn.Close()
}
// abortRemote aborts all [pkg.Artifact] curing on a daemon.
func abortRemote(
ctx context.Context,
addr *net.UnixAddr,
wait bool,
) error {
done, conn, err := dial(ctx, addr)
if err != nil {
@@ -345,10 +339,5 @@ func abortRemote(
defer close(done)
err = writeSpecialHeader(conn, specialAbort)
if wait && err == nil {
if _, err = conn.Read(make([]byte, 1)); err == io.EOF {
err = nil
}
}
return errors.Join(err, conn.Close())
}

View File

@@ -106,11 +106,11 @@ func TestDaemon(t *testing.T) {
}
}()
if err = cancelRemote(ctx, &addr, pkg.NewFile("nonexistent", nil), true); err != nil {
if err = cancelRemote(ctx, &addr, pkg.NewFile("nonexistent", nil)); err != nil {
t.Fatalf("cancelRemote: error = %v", err)
}
if err = abortRemote(ctx, &addr, true); err != nil {
if err = abortRemote(ctx, &addr); err != nil {
t.Fatalf("abortRemote: error = %v", err)
}

View File

@@ -17,12 +17,25 @@ func commandInfo(
args []string,
w io.Writer,
writeStatus bool,
r *rosa.Report,
reportPath string,
) (err error) {
if len(args) == 0 {
return errors.New("info requires at least 1 argument")
}
var r *rosa.Report
if reportPath != "" {
if r, err = rosa.OpenReport(reportPath); err != nil {
return err
}
defer func() {
if closeErr := r.Close(); err == nil {
err = closeErr
}
}()
defer r.HandleAccess(&err)()
}
// recovered by HandleAccess
mustPrintln := func(a ...any) {
if _, _err := fmt.Fprintln(w, a...); _err != nil {

View File

@@ -95,7 +95,7 @@ status : not in report
var (
cm *cache
buf strings.Builder
r *rosa.Report
rp string
)
if tc.status != nil || tc.report != "" {
@@ -108,25 +108,14 @@ status : not in report
}
if tc.report != "" {
pathname := filepath.Join(t.TempDir(), "report")
err := os.WriteFile(
pathname,
rp = filepath.Join(t.TempDir(), "report")
if err := os.WriteFile(
rp,
unsafe.Slice(unsafe.StringData(tc.report), len(tc.report)),
0400,
)
if err != nil {
); err != nil {
t.Fatal(err)
}
r, err = rosa.OpenReport(pathname)
if err != nil {
t.Fatal(err)
}
defer func() {
if err = r.Close(); err != nil {
t.Fatal(err)
}
}()
}
if tc.status != nil {
@@ -168,7 +157,7 @@ status : not in report
tc.args,
&buf,
cm != nil,
r,
rp,
); !reflect.DeepEqual(err, wantErr) {
t.Fatalf("commandInfo: error = %v, want %v", err, wantErr)
}

View File

@@ -1,9 +0,0 @@
// Package ui holds the static web UI.
package ui
import "net/http"
// Register arranges for mux to serve the embedded frontend.
func Register(mux *http.ServeMux) {
mux.Handle("GET /", http.FileServer(http.FS(static)))
}

View File

@@ -1,21 +0,0 @@
//go:build frontend
package ui
import (
"embed"
"io/fs"
)
//go:generate tsc
//go:generate cp index.html style.css static
//go:embed static
var _static embed.FS
var static = func() fs.FS {
if f, err := fs.Sub(_static, "static"); err != nil {
panic(err)
} else {
return f
}
}()

View File

@@ -20,7 +20,6 @@ import (
"io"
"log"
"net"
"net/http"
"os"
"os/signal"
"path/filepath"
@@ -42,9 +41,6 @@ import (
"hakurei.app/internal/pkg"
"hakurei.app/internal/rosa"
"hakurei.app/message"
"hakurei.app/cmd/mbf/internal/pkgserver"
"hakurei.app/cmd/mbf/internal/pkgserver/ui"
)
func main() {
@@ -63,12 +59,17 @@ func main() {
defer stop()
var cm cache
defer func() { cm.Close() }()
defer func() {
cm.Close()
if r := recover(); r != nil {
fmt.Println(r)
log.Fatal("consider scrubbing the on-disk cache")
}
}()
var (
flagQuiet bool
flagCheck bool
flagLTO bool
addr net.UnixAddr
)
@@ -76,42 +77,18 @@ func main() {
msg.SwapVerbose(!flagQuiet)
cm.ctx, cm.msg = ctx, msg
cm.base = os.ExpandEnv(cm.base)
if cm.base == "" {
cm.base = "cache"
}
addr.Net = "unix"
addr.Name = os.ExpandEnv(addr.Name)
if addr.Name == "" {
addr.Name = filepath.Join(cm.base, "daemon")
addr.Name = "daemon"
}
var flags int
if !flagCheck {
flags |= rosa.OptSkipCheck
}
if !flagLTO {
flags |= rosa.OptLLVMNoLTO
}
rosa.DropCaches(flags)
return nil
}).Flag(
&flagQuiet,
"q", command.BoolFlag(false),
"Do not print cure messages",
).Flag(
&flagLTO,
"lto", command.BoolFlag(false),
"Enable LTO in stage2 and stage3 LLVM toolchains",
).Flag(
&flagCheck,
"check", command.BoolFlag(true),
"Run test suites",
).Flag(
&cm.verboseInit,
"v", command.BoolFlag(false),
"Do not suppress verbose output from init",
).Flag(
&cm.cures,
"cures", command.IntFlag(0),
@@ -144,17 +121,7 @@ func main() {
c.NewCommand(
"checksum", "Compute checksum of data read from standard input",
func([]string) error {
done := make(chan struct{})
defer close(done)
go func() {
select {
case <-ctx.Done():
os.Exit(1)
case <-done:
return
}
}()
go func() { <-ctx.Done(); os.Exit(1) }()
h := sha512.New384()
if _, err := io.Copy(h, os.Stdin); err != nil {
return err
@@ -188,7 +155,6 @@ func main() {
{
var (
flagBind string
flagStatus bool
flagReport string
)
@@ -196,52 +162,8 @@ func main() {
"info",
"Display out-of-band metadata of an artifact",
func(args []string) (err error) {
const shutdownTimeout = 15 * time.Second
var r *rosa.Report
if flagReport != "" {
if r, err = rosa.OpenReport(flagReport); err != nil {
return err
}
defer func() {
if closeErr := r.Close(); err == nil {
err = closeErr
}
}()
defer r.HandleAccess(&err)()
}
if flagBind == "" {
return commandInfo(&cm, args, os.Stdout, flagStatus, r)
}
var mux http.ServeMux
ui.Register(&mux)
if err = pkgserver.Register(ctx, &mux, r); err != nil {
return
}
server := http.Server{Addr: flagBind, Handler: &mux}
go func() {
<-ctx.Done()
cc, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
defer cancel()
if _err := server.Shutdown(cc); _err != nil {
log.Fatal(_err)
}
}()
msg.Verbosef("listening on %q", flagBind)
err = server.ListenAndServe()
if errors.Is(err, http.ErrServerClosed) {
err = nil
}
return
return commandInfo(&cm, args, os.Stdout, flagStatus, flagReport)
},
).Flag(
&flagBind,
"bind", command.StringFlag(""),
"TCP address for the server to listen on",
).Flag(
&flagStatus,
"status", command.BoolFlag(false),
@@ -400,7 +322,7 @@ func main() {
if err = cm.Do(func(cache *pkg.Cache) (err error) {
pathname, _, err = cache.Cure(
(t - 2).Load(rosa.LLVM),
(t - 2).Load(rosa.Clang),
)
return
}); err != nil {
@@ -410,7 +332,7 @@ func main() {
if err = cm.Do(func(cache *pkg.Cache) (err error) {
pathname, checksum[0], err = cache.Cure(
(t - 1).Load(rosa.LLVM),
(t - 1).Load(rosa.Clang),
)
return
}); err != nil {
@@ -419,7 +341,7 @@ func main() {
log.Println("stage2:", pathname)
if err = cm.Do(func(cache *pkg.Cache) (err error) {
pathname, checksum[1], err = cache.Cure(
t.Load(rosa.LLVM),
t.Load(rosa.Clang),
)
return
}); err != nil {
@@ -475,9 +397,6 @@ func main() {
flagExport string
flagRemote bool
flagNoReply bool
flagBoot bool
flagStd bool
)
c.NewCommand(
"cure",
@@ -491,18 +410,11 @@ func main() {
return fmt.Errorf("unknown artifact %q", args[0])
}
t := rosa.Std
if flagBoot {
t -= 2
} else if flagStd {
t -= 1
}
switch {
default:
var pathname *check.Absolute
err := cm.Do(func(cache *pkg.Cache) (err error) {
pathname, _, err = cache.Cure(t.Load(p))
pathname, _, err = cache.Cure(rosa.Std.Load(p))
return
})
if err != nil {
@@ -555,7 +467,7 @@ func main() {
return cm.Do(func(cache *pkg.Cache) error {
return cache.EnterExec(
ctx,
t.Load(p),
rosa.Std.Load(p),
true, os.Stdin, os.Stdout, os.Stderr,
rosa.AbsSystem.Append("bin", "mksh"),
"sh",
@@ -567,7 +479,7 @@ func main() {
if flagNoReply {
flags |= remoteNoReply
}
a := t.Load(p)
a := rosa.Std.Load(p)
pathname, err := cureRemote(ctx, &addr, a, flags)
if !flagNoReply && err == nil {
log.Println(pathname)
@@ -577,7 +489,7 @@ func main() {
cc, cancel := context.WithDeadline(context.Background(), daemonDeadline())
defer cancel()
if _err := cancelRemote(cc, &addr, a, false); _err != nil {
if _err := cancelRemote(cc, &addr, a); _err != nil {
log.Println(err)
}
}
@@ -605,21 +517,13 @@ func main() {
&flagNoReply,
"no-reply", command.BoolFlag(false),
"Do not receive a reply from the daemon",
).Flag(
&flagBoot,
"boot", command.BoolFlag(false),
"Build on the stage0 toolchain",
).Flag(
&flagStd,
"std", command.BoolFlag(false),
"Build on the intermediate toolchain",
)
}
c.NewCommand(
"abort",
"Abort all pending cures on the daemon",
func([]string) error { return abortRemote(ctx, &addr, false) },
func([]string) error { return abortRemote(ctx, &addr) },
)
{
@@ -642,7 +546,7 @@ func main() {
presets[i] = p
}
base := rosa.LLVM
base := rosa.Clang
if !flagWithToolchain {
base = rosa.Musl
}
@@ -714,7 +618,6 @@ func main() {
z.Hostname = "localhost"
z.Uid, z.Gid = (1<<10)-1, (1<<10)-1
z.Stdin, z.Stdout, z.Stderr = os.Stdin, os.Stdout, os.Stderr
z.Quiet = !cm.verboseInit
if s, ok := os.LookupEnv("TERM"); ok {
z.Env = append(z.Env, "TERM="+s)
}

View File

@@ -1,47 +0,0 @@
package main
import (
"net"
"os"
"testing"
"hakurei.app/internal/rosa"
)
func TestMain(m *testing.M) {
rosa.DropCaches(rosa.OptLLVMNoLTO)
os.Exit(m.Run())
}
func TestCureAll(t *testing.T) {
t.Parallel()
const env = "ROSA_TEST_DAEMON"
if !testing.Verbose() {
t.Skip("verbose flag not set")
}
pathname, ok := os.LookupEnv(env)
if !ok {
t.Skip(env + " not set")
}
addr := net.UnixAddr{Net: "unix", Name: pathname}
t.Cleanup(func() {
if t.Failed() {
if err := abortRemote(t.Context(), &addr, false); err != nil {
t.Fatal(err)
}
}
})
for i := range rosa.PresetEnd {
p := rosa.PArtifact(i)
t.Run(rosa.GetMetadata(p).Name, func(t *testing.T) {
_, err := cureRemote(t.Context(), &addr, rosa.Std.Load(p), 0)
if err != nil {
t.Error(err)
}
})
}
}

View File

@@ -1,8 +1,6 @@
// Package pkgserver implements the package metadata service backend.
package pkgserver
package main
import (
"context"
"encoding/json"
"log"
"net/http"
@@ -10,7 +8,6 @@ import (
"path"
"strconv"
"sync"
"time"
"hakurei.app/internal/info"
"hakurei.app/internal/rosa"
@@ -161,29 +158,6 @@ func (index *packageIndex) registerAPI(mux *http.ServeMux) {
mux.HandleFunc("GET /status/", index.newStatusHandler(true))
}
// Register arranges for mux to service API requests.
func Register(ctx context.Context, mux *http.ServeMux, report *rosa.Report) error {
var index packageIndex
index.search = make(searchCache)
if err := index.populate(report); err != nil {
return err
}
ticker := time.NewTicker(1 * time.Minute)
go func() {
for {
select {
case <-ctx.Done():
ticker.Stop()
return
case <-ticker.C:
index.search.clean()
}
}
}()
index.registerAPI(mux)
return nil
}
// writeAPIPayload sets headers common to API responses and encodes payload as
// JSON for the response body.
func writeAPIPayload(w http.ResponseWriter, payload any) {

View File

@@ -1,4 +1,4 @@
package pkgserver
package main
import (
"net/http"
@@ -118,31 +118,33 @@ func TestAPIGet(t *testing.T) {
checkStatus(t, resp, http.StatusOK)
checkAPIHeader(t, w.Header())
checkPayloadFunc(t, resp, func(got *struct {
Count int `json:"count"`
Values []*metadata `json:"values"`
}) bool {
return slices.EqualFunc(got.Values, want, func(a, b *metadata) bool {
return (a.Version == b.Version ||
a.Version == rosa.Unversioned ||
b.Version == rosa.Unversioned) &&
a.HasReport == b.HasReport &&
a.Name == b.Name &&
a.Description == b.Description &&
a.Website == b.Website
})
return got.Count == len(want) &&
slices.EqualFunc(got.Values, want, func(a, b *metadata) bool {
return (a.Version == b.Version ||
a.Version == rosa.Unversioned ||
b.Version == rosa.Unversioned) &&
a.HasReport == b.HasReport &&
a.Name == b.Name &&
a.Description == b.Description &&
a.Website == b.Website
})
})
})
}
checkWithSuffix("declarationAscending", "?limit=2&index=1&sort=0", []*metadata{
checkWithSuffix("declarationAscending", "?limit=2&index=0&sort=0", []*metadata{
{
Metadata: rosa.GetMetadata(0),
Version: rosa.Std.Version(0),
},
{
Metadata: rosa.GetMetadata(1),
Version: rosa.Std.Version(1),
},
{
Metadata: rosa.GetMetadata(2),
Version: rosa.Std.Version(2),
},
})
checkWithSuffix("declarationAscending offset", "?limit=3&index=5&sort=0", []*metadata{
{

View File

@@ -1,4 +1,4 @@
package pkgserver
package main
import (
"cmp"
@@ -50,7 +50,7 @@ type metadata struct {
}
// populate deterministically populates packageIndex, optionally with a report.
func (index *packageIndex) populate(report *rosa.Report) (err error) {
func (index *packageIndex) populate(cache *pkg.Cache, report *rosa.Report) (err error) {
if report != nil {
defer report.HandleAccess(&err)()
index.handleAccess = report.HandleAccess
@@ -58,7 +58,6 @@ func (index *packageIndex) populate(report *rosa.Report) (err error) {
var work [rosa.PresetUnexportedStart]*metadata
index.names = make(map[string]*metadata)
ir := pkg.NewIR()
for p := range rosa.PresetUnexportedStart {
m := metadata{
p: p,
@@ -73,8 +72,8 @@ func (index *packageIndex) populate(report *rosa.Report) (err error) {
m.Version = ""
}
if report != nil {
id := ir.Ident(rosa.Std.Load(p))
if cache != nil && report != nil {
id := cache.Ident(rosa.Std.Load(p))
m.ids = pkg.Encode(id.Value())
m.status, m.Size = report.ArtifactOf(id)
m.HasReport = m.Size >= 0

115
cmd/pkgserver/main.go Normal file
View File

@@ -0,0 +1,115 @@
package main
import (
"context"
"errors"
"log"
"net/http"
"os"
"os/signal"
"syscall"
"time"
"hakurei.app/check"
"hakurei.app/command"
"hakurei.app/internal/pkg"
"hakurei.app/internal/rosa"
"hakurei.app/message"
)
const shutdownTimeout = 15 * time.Second
func main() {
log.SetFlags(0)
log.SetPrefix("pkgserver: ")
var (
flagBaseDir string
flagAddr string
)
ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM, syscall.SIGHUP)
defer stop()
msg := message.New(log.Default())
c := command.New(os.Stderr, log.Printf, "pkgserver", func(args []string) error {
var (
cache *pkg.Cache
report *rosa.Report
)
switch len(args) {
case 0:
break
case 1:
baseDir, err := check.NewAbs(flagBaseDir)
if err != nil {
return err
}
cache, err = pkg.Open(ctx, msg, 0, 0, 0, baseDir)
if err != nil {
return err
}
defer cache.Close()
report, err = rosa.OpenReport(args[0])
if err != nil {
return err
}
default:
return errors.New("pkgserver requires 1 argument")
}
var index packageIndex
index.search = make(searchCache)
if err := index.populate(cache, report); err != nil {
return err
}
ticker := time.NewTicker(1 * time.Minute)
go func() {
for {
select {
case <-ctx.Done():
ticker.Stop()
return
case <-ticker.C:
index.search.clean()
}
}
}()
var mux http.ServeMux
uiRoutes(&mux)
testUIRoutes(&mux)
index.registerAPI(&mux)
server := http.Server{
Addr: flagAddr,
Handler: &mux,
}
go func() {
<-ctx.Done()
c, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
defer cancel()
if err := server.Shutdown(c); err != nil {
log.Fatal(err)
}
}()
return server.ListenAndServe()
}).Flag(
&flagBaseDir,
"b", command.StringFlag(""),
"base directory for cache",
).Flag(
&flagAddr,
"addr", command.StringFlag(":8067"),
"TCP network address to listen on",
)
c.MustParse(os.Args[1:], func(err error) {
if errors.Is(err, http.ErrServerClosed) {
os.Exit(0)
}
log.Fatal(err)
})
}

View File

@@ -1,4 +1,4 @@
package pkgserver
package main
import (
"bytes"
@@ -15,7 +15,7 @@ func newIndex(t *testing.T) *packageIndex {
t.Helper()
var index packageIndex
if err := index.populate(nil); err != nil {
if err := index.populate(nil, nil); err != nil {
t.Fatalf("populate: error = %v", err)
}
return &index

View File

@@ -1,4 +1,4 @@
package pkgserver
package main
import (
"cmp"

35
cmd/pkgserver/test_ui.go Normal file
View File

@@ -0,0 +1,35 @@
//go:build frontend && frontend_test
package main
import (
"embed"
"io/fs"
"net/http"
)
// Always remove ui_test/ui; if the previous tsc run failed, the rm never
// executes.
//go:generate sh -c "rm -r ui_test/ui/ 2>/dev/null || true"
//go:generate mkdir ui_test/ui
//go:generate sh -c "cp ui/static/*.ts ui_test/ui/"
//go:generate tsc -p ui_test
//go:generate rm -r ui_test/ui/
//go:generate cp ui_test/lib/ui.css ui_test/static/style.css
//go:generate cp ui_test/lib/ui.html ui_test/static/index.html
//go:generate sh -c "cd ui_test/lib && cp *.svg ../static/"
//go:embed ui_test/static
var _staticTest embed.FS
var staticTest = func() fs.FS {
if f, err := fs.Sub(_staticTest, "ui_test/static"); err != nil {
panic(err)
} else {
return f
}
}()
func testUIRoutes(mux *http.ServeMux) {
mux.Handle("GET /test/", http.StripPrefix("/test", http.FileServer(http.FS(staticTest))))
}

View File

@@ -0,0 +1,7 @@
//go:build !(frontend && frontend_test)
package main
import "net/http"
func testUIRoutes(mux *http.ServeMux) {}

33
cmd/pkgserver/ui.go Normal file
View File

@@ -0,0 +1,33 @@
package main
import "net/http"
func serveWebUI(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
w.Header().Set("Pragma", "no-cache")
w.Header().Set("Expires", "0")
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("X-XSS-Protection", "1")
w.Header().Set("X-Frame-Options", "DENY")
http.ServeFileFS(w, r, content, "ui/index.html")
}
func serveStaticContent(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/static/style.css":
http.ServeFileFS(w, r, content, "ui/static/style.css")
case "/favicon.ico":
http.ServeFileFS(w, r, content, "ui/static/favicon.ico")
case "/static/index.js":
http.ServeFileFS(w, r, content, "ui/static/index.js")
default:
http.NotFound(w, r)
}
}
func uiRoutes(mux *http.ServeMux) {
mux.HandleFunc("GET /{$}", serveWebUI)
mux.HandleFunc("GET /favicon.ico", serveStaticContent)
mux.HandleFunc("GET /static/", serveStaticContent)
}

View File

@@ -3,9 +3,9 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="style.css">
<link rel="stylesheet" href="static/style.css">
<title>Hakurei PkgServer</title>
<script src="index.js"></script>
<script src="static/index.js"></script>
</head>
<body>
<h1>Hakurei PkgServer</h1>

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

9
cmd/pkgserver/ui_full.go Normal file
View File

@@ -0,0 +1,9 @@
//go:build frontend
package main
import "embed"
//go:generate tsc -p ui
//go:embed ui/*
var content embed.FS

View File

@@ -1,7 +1,7 @@
//go:build !frontend
package ui
package main
import "testing/fstest"
var static fstest.MapFS
var content fstest.MapFS

View File

@@ -0,0 +1,2 @@
// Import all test files to register their test suites.
import "./index_test.js";

View File

@@ -0,0 +1,2 @@
import { suite, test } from "./lib/test.js";
import "./ui/index.js";

View File

@@ -0,0 +1,48 @@
#!/usr/bin/env node
// Many editors have terminal emulators built in, so running tests with NodeJS
// provides faster iteration, especially for those acclimated to test-driven
// development.
import "../all_tests.js";
import { StreamReporter, GLOBAL_REGISTRAR } from "./test.js";
// TypeScript doesn't like process and Deno as their type definitions aren't
// installed, but doesn't seem to complain if they're accessed through
// globalThis.
const process: any = (globalThis as any).process;
const Deno: any = (globalThis as any).Deno;
function getArgs(): string[] {
if (process) {
const [runtime, program, ...args] = process.argv;
return args;
}
if (Deno) return Deno.args;
return [];
}
function exit(code?: number): never {
if (Deno) Deno.exit(code);
if (process) process.exit(code);
throw `exited with code ${code ?? 0}`;
}
const args = getArgs();
let verbose = false;
if (args.length > 1) {
console.error("Too many arguments");
exit(1);
}
if (args.length === 1) {
if (args[0] === "-v" || args[0] === "--verbose" || args[0] === "-verbose") {
verbose = true;
} else if (args[0] !== "--") {
console.error(`Unknown argument '${args[0]}'`);
exit(1);
}
}
let reporter = new StreamReporter({ writeln: console.log }, verbose);
GLOBAL_REGISTRAR.run(reporter);
exit(reporter.succeeded() ? 0 : 1);

View File

@@ -0,0 +1,13 @@
<?xml version="1.0"?>
<!-- See failure-open.svg for an explanation of the view box dimensions. -->
<svg xmlns="http://www.w3.org/2000/svg" width="20" viewBox="-20,-20 160 130">
<!-- This triangle should match success-closed.svg, fill and stroke color notwithstanding. -->
<polygon points="0,0 100,50 0,100" fill="red" stroke="red" stroke-width="15" stroke-linejoin="round"/>
<!--
! y-coordinates go before x-coordinates here to highlight the difference
! (or, lack thereof) between these numbers and the ones in failure-open.svg;
! try a textual diff. Make sure to keep the numbers in sync!
-->
<line y1="30" x1="10" y2="70" x2="50" stroke="white" stroke-width="16"/>
<line y1="30" x1="50" y2="70" x2="10" stroke="white" stroke-width="16"/>
</svg>

After

Width:  |  Height:  |  Size: 788 B

View File

@@ -0,0 +1,35 @@
<?xml version="1.0"?>
<!--
! This view box is a bit weird: the strokes assume they're working in a view
! box that spans from the (0,0) to (100,100), and indeed that is convenient
! conceptualizing the strokes, but the stroke itself has a considerable width
! that gets clipped by restrictive view box dimensions. Hence, the view is
! shifted from (0,0)(100,100) to (-20,-20)(120,120), to make room for the
! clipped stroke, while leaving behind an illusion of working in a view box
! spanning from (0,0) to (100,100).
!
! However, the resulting SVG is too close to the summary text, and CSS
! properties to add padding do not seem to work with `content:` (likely because
! they're anonymous replaced elements); thus, the width of the view is
! increased considerably to provide padding in the SVG itself, while leaving
! the strokes oblivious.
!
! It gets worse: the summary text isn't vertically aligned with the icon! As
! a flexbox cannot be used in a summary to align the marker with the text, the
! simplest and most effective solution is to reduce the height of the view box
! from 140 to 130, thereby removing some of the bottom padding present.
!
! All six SVGs use the same view box (and indeed, they refer to this comment)
! so that they all appear to be the same size and position relative to each
! other on the DOM—indeed, the view box dimensions, alongside the width,
! directly control their placement on the DOM.
!
! TL;DR: CSS is janky, overflow is weird, and SVG is awesome!
-->
<svg xmlns="http://www.w3.org/2000/svg" width="20" viewBox="-20,-20 160 130">
<!-- This triangle should match success-open.svg, fill and stroke color notwithstanding. -->
<polygon points="0,0 100,0 50,100" fill="red" stroke="red" stroke-width="15" stroke-linejoin="round"/>
<!-- See the comment in failure-closed.svg before modifying this. -->
<line x1="30" y1="10" x2="70" y2="50" stroke="white" stroke-width="16"/>
<line x1="30" y1="50" x2="70" y2="10" stroke="white" stroke-width="16"/>
</svg>

View File

@@ -0,0 +1,3 @@
import "../all_tests.js";
import { GoTestReporter, GLOBAL_REGISTRAR } from "./test.js";
GLOBAL_REGISTRAR.run(new GoTestReporter());

View File

@@ -0,0 +1,21 @@
<?xml version="1.0"?>
<!-- See failure-open.svg for an explanation of the view box dimensions. -->
<svg xmlns="http://www.w3.org/2000/svg" width="20" viewBox="-20,-20 160 130">
<!-- This triangle should match success-closed.svg, fill and stroke color notwithstanding. -->
<polygon points="0,0 100,50 0,100" fill="blue" stroke="blue" stroke-width="15" stroke-linejoin="round"/>
<!--
! This path is extremely similar to the one in skip-open.svg; before
! making minor modifications, diff the two to understand how they should
! remain in sync.
-->
<path
d="M 50,50
A 23,23 270,1,1 30,30
l -10,20
m 10,-20
l -20,-10"
fill="none"
stroke="white"
stroke-width="12"
stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 812 B

View File

@@ -0,0 +1,21 @@
<?xml version="1.0"?>
<!-- See failure-open.svg for an explanation of the view box dimensions. -->
<svg xmlns="http://www.w3.org/2000/svg" width="20" viewBox="-20,-20 160 130">
<!-- This triangle should match success-open.svg, fill and stroke color notwithstanding. -->
<polygon points="0,0 100,0 50,100" fill="blue" stroke="blue" stroke-width="15" stroke-linejoin="round"/>
<!--
! This path is extremely similar to the one in skip-closed.svg; before
! making minor modifications, diff the two to understand how they should
! remain in sync.
-->
<path
d="M 50,50
A 23,23 270,1,1 70,30
l 10,-20
m -10,20
l -20,-10"
fill="none"
stroke="white"
stroke-width="12"
stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 812 B

View File

@@ -0,0 +1,16 @@
<?xml version="1.0"?>
<!-- See failure-open.svg for an explanation of the view box dimensions. -->
<svg xmlns="http://www.w3.org/2000/svg" width="20" viewBox="-20,-20 160 130">
<style>
.adaptive-stroke {
stroke: black;
}
@media (prefers-color-scheme: dark) {
.adaptive-stroke {
stroke: ghostwhite;
}
}
</style>
<!-- When updating this triangle, also update the other five SVGs. -->
<polygon points="0,0 100,50 0,100" fill="none" class="adaptive-stroke" stroke-width="15" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 572 B

View File

@@ -0,0 +1,16 @@
<?xml version="1.0"?>
<!-- See failure-open.svg for an explanation of the view box dimensions. -->
<svg xmlns="http://www.w3.org/2000/svg" width="20" viewBox="-20,-20 160 130">
<style>
.adaptive-stroke {
stroke: black;
}
@media (prefers-color-scheme: dark) {
.adaptive-stroke {
stroke: ghostwhite;
}
}
</style>
<!-- When updating this triangle, also update the other five SVGs. -->
<polygon points="0,0 100,0 50,100" fill="none" class="adaptive-stroke" stroke-width="15" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 572 B

View File

@@ -0,0 +1,403 @@
// =============================================================================
// DSL
type TestTree = TestGroup | Test;
type TestGroup = { name: string; children: TestTree[] };
type Test = { name: string; test: (t: TestController) => void };
export class TestRegistrar {
#suites: TestGroup[];
constructor() {
this.#suites = [];
}
suite(name: string, children: TestTree[]) {
checkDuplicates(name, children);
this.#suites.push({ name, children });
}
run(reporter: Reporter) {
reporter.register(this.#suites);
for (const suite of this.#suites) {
for (const c of suite.children) runTests(reporter, [suite.name], c);
}
reporter.finalize();
}
}
export let GLOBAL_REGISTRAR = new TestRegistrar();
// Register a suite in the global registrar.
export function suite(name: string, children: TestTree[]) {
GLOBAL_REGISTRAR.suite(name, children);
}
export function group(name: string, children: TestTree[]): TestTree {
checkDuplicates(name, children);
return { name, children };
}
export const context = group;
export const describe = group;
export function test(name: string, test: (t: TestController) => void): TestTree {
return { name, test };
}
function checkDuplicates(parent: string, names: { name: string }[]) {
let seen = new Set<string>();
for (const { name } of names) {
if (seen.has(name)) {
throw new RangeError(`duplicate name '${name}' in '${parent}'`);
}
seen.add(name);
}
}
export type TestState = "success" | "failure" | "skip";
class AbortSentinel {}
export class TestController {
#state: TestState;
logs: string[];
constructor() {
this.#state = "success";
this.logs = [];
}
getState(): TestState {
return this.#state;
}
fail() {
this.#state = "failure";
}
failed(): boolean {
return this.#state === "failure";
}
failNow(): never {
this.fail();
throw new AbortSentinel();
}
log(message: string) {
this.logs.push(message);
}
error(message: string) {
this.log(message);
this.fail();
}
fatal(message: string): never {
this.log(message);
this.failNow();
}
skip(message?: string): never {
if (message != null) this.log(message);
if (this.#state !== "failure") this.#state = "skip";
throw new AbortSentinel();
}
skipped(): boolean {
return this.#state === "skip";
}
}
// =============================================================================
// Execution
export interface TestResult {
state: TestState;
logs: string[];
}
function runTests(reporter: Reporter, parents: string[], node: TestTree) {
const path = [...parents, node.name];
if ("children" in node) {
for (const c of node.children) runTests(reporter, path, c);
return;
}
let controller = new TestController();
try {
node.test(controller);
} catch (e) {
if (!(e instanceof AbortSentinel)) {
controller.error(extractExceptionString(e));
}
}
reporter.update(path, { state: controller.getState(), logs: controller.logs });
}
function extractExceptionString(e: any): string {
// String() instead of .toString() as null and undefined don't have
// properties.
const s = String(e);
if (!(e instanceof Error && e.stack)) return s;
// v8 (Chromium, NodeJS) includes the error message, while Firefox and
// WebKit do not.
if (e.stack.startsWith(s)) return e.stack;
return `${s}\n${e.stack}`;
}
// =============================================================================
// Reporting
export interface Reporter {
register(suites: TestGroup[]): void;
update(path: string[], result: TestResult): void;
finalize(): void;
}
export class NoOpReporter implements Reporter {
suites: TestGroup[];
results: ({ path: string[] } & TestResult)[];
finalized: boolean;
constructor() {
this.suites = [];
this.results = [];
this.finalized = false;
}
register(suites: TestGroup[]) {
this.suites = suites;
}
update(path: string[], result: TestResult) {
this.results.push({ path, ...result });
}
finalize() {
this.finalized = true;
}
}
export interface Stream {
writeln(s: string): void;
}
const SEP = " ";
export class StreamReporter implements Reporter {
stream: Stream;
verbose: boolean;
#successes: ({ path: string[] } & TestResult)[];
#failures: ({ path: string[] } & TestResult)[];
#skips: ({ path: string[] } & TestResult)[];
constructor(stream: Stream, verbose: boolean = false) {
this.stream = stream;
this.verbose = verbose;
this.#successes = [];
this.#failures = [];
this.#skips = [];
}
succeeded(): boolean {
return this.#successes.length > 0 && this.#failures.length === 0;
}
register(suites: TestGroup[]) {}
update(path: string[], result: TestResult) {
if (path.length === 0) throw new RangeError("path is empty");
const pathStr = path.join(SEP);
switch (result.state) {
case "success":
this.#successes.push({ path, ...result });
if (this.verbose) this.stream.writeln(`✅️ ${pathStr}`);
break;
case "failure":
this.#failures.push({ path, ...result });
this.stream.writeln(`⚠️ ${pathStr}`);
break;
case "skip":
this.#skips.push({ path, ...result });
this.stream.writeln(`⏭️ ${pathStr}`);
break;
}
}
finalize() {
if (this.verbose) this.#displaySection("successes", this.#successes, true);
this.#displaySection("failures", this.#failures);
this.#displaySection("skips", this.#skips);
this.stream.writeln("");
this.stream.writeln(
`${this.#successes.length} succeeded, ${this.#failures.length} failed` +
(this.#skips.length ? `, ${this.#skips.length} skipped` : ""),
);
}
#displaySection(name: string, data: ({ path: string[] } & TestResult)[], ignoreEmpty: boolean = false) {
if (!data.length) return;
// Transform [{ path: ["a", "b", "c"] }, { path: ["a", "b", "d"] }]
// into { "a b": ["c", "d"] }.
let pathMap = new Map<string, ({ name: string } & TestResult)[]>();
for (const t of data) {
if (t.path.length === 0) throw new RangeError("path is empty");
const key = t.path.slice(0, -1).join(SEP);
if (!pathMap.has(key)) pathMap.set(key, []);
pathMap.get(key)!.push({ name: t.path.at(-1)!, ...t });
}
this.stream.writeln("");
this.stream.writeln(name.toUpperCase());
this.stream.writeln("=".repeat(name.length));
for (let [path, tests] of pathMap) {
if (ignoreEmpty) tests = tests.filter((t) => t.logs.length);
if (tests.length === 0) continue;
if (tests.length === 1) {
this.#writeOutput(tests[0], path ? `${path}${SEP}` : "", false);
} else {
this.stream.writeln(path);
for (const t of tests) this.#writeOutput(t, " - ", true);
}
}
}
#writeOutput(test: { name: string } & TestResult, prefix: string, nested: boolean) {
let output = "";
if (test.logs.length) {
// Individual logs might span multiple lines, so join them together
// then split it again.
const logStr = test.logs.join("\n");
const lines = logStr.split("\n");
if (lines.length <= 1) {
output = `: ${logStr}`;
} else {
const padding = nested ? " " : " ";
output = ":\n" + lines.map((line) => padding + line).join("\n");
}
}
this.stream.writeln(`${prefix}${test.name}${output}`);
}
}
function assertGetElementById(id: string): HTMLElement {
let elem = document.getElementById(id);
if (elem == null) throw new ReferenceError(`element with ID '${id}' missing from DOM`);
return elem;
}
export class DOMReporter implements Reporter {
register(suites: TestGroup[]) {}
update(path: string[], result: TestResult) {
if (path.length === 0) throw new RangeError("path is empty");
if (result.state === "skip") {
assertGetElementById("skip-counter-text").hidden = false;
}
const counter = assertGetElementById(`${result.state}-counter`);
counter.innerText = (Number(counter.innerText) + 1).toString();
let parent = assertGetElementById("root");
for (const node of path) {
let child: HTMLDetailsElement | null = null;
let summary: HTMLElement | null = null;
let d: Element;
outer: for (d of parent.children) {
if (!(d instanceof HTMLDetailsElement)) continue;
for (const s of d.children) {
if (!(s instanceof HTMLElement)) continue;
if (!(s.tagName === "SUMMARY" && s.innerText === node)) continue;
child = d;
summary = s;
break outer;
}
}
if (!child) {
child = document.createElement("details");
child.className = "test-node";
child.ariaRoleDescription = "test";
summary = document.createElement("summary");
summary.appendChild(document.createTextNode(node));
summary.ariaRoleDescription = "test name";
child.appendChild(summary);
parent.appendChild(child);
}
if (!summary) throw new Error("unreachable as assigned above");
switch (result.state) {
case "failure":
child.open = true;
child.classList.add("failure");
child.classList.remove("skip");
child.classList.remove("success");
// The summary marker does not appear in the AOM, so setting its
// alt text is fruitless; label the summary itself instead.
summary.setAttribute("aria-labelledby", "failure-description");
break;
case "skip":
if (child.classList.contains("failure")) break;
child.classList.add("skip");
child.classList.remove("success");
summary.setAttribute("aria-labelledby", "skip-description");
break;
case "success":
if (child.classList.contains("failure") || child.classList.contains("skip")) break;
child.classList.add("success");
summary.setAttribute("aria-labelledby", "success-description");
break;
}
parent = child;
}
const p = document.createElement("p");
p.classList.add("test-desc");
if (result.logs.length) {
const pre = document.createElement("pre");
pre.appendChild(document.createTextNode(result.logs.join("\n")));
p.appendChild(pre);
} else {
p.classList.add("italic");
p.appendChild(document.createTextNode("No output."));
}
parent.appendChild(p);
}
finalize() {}
}
interface GoNode {
name: string;
subtests?: GoNode[];
}
// Used to display results via `go test`, via some glue code from the Go side.
export class GoTestReporter implements Reporter {
register(suites: TestGroup[]) {
console.log(JSON.stringify(suites.map(GoTestReporter.serialize)));
}
// Convert a test tree into the one expected by the Go code.
static serialize(node: TestTree): GoNode {
return {
name: node.name,
subtests: "children" in node ? node.children.map(GoTestReporter.serialize) : undefined,
};
}
update(path: string[], result: TestResult) {
let state: number;
switch (result.state) {
case "success": state = 0; break;
case "failure": state = 1; break;
case "skip": state = 2; break;
}
console.log(JSON.stringify({ path, state, logs: result.logs }));
}
finalize() {
console.log("null");
}
}

View File

@@ -0,0 +1,87 @@
/*
* When updating the theme colors, also update them in success-closed.svg and
* success-open.svg!
*/
:root {
--bg: #d3d3d3;
--fg: black;
}
@media (prefers-color-scheme: dark) {
:root {
--bg: #2c2c2c;
--fg: ghostwhite;
}
}
html {
background-color: var(--bg);
color: var(--fg);
}
h1, p, summary, noscript {
font-family: sans-serif;
}
noscript {
font-size: 16pt;
}
.root {
margin: 1rem 0;
}
details.test-node {
margin-left: 1rem;
padding: 0.2rem 0.5rem;
border-left: 2px dashed var(--fg);
> summary {
cursor: pointer;
}
&.success > summary::marker {
/*
* WebKit only supports color and font-size properties in ::marker [1], and
* its ::-webkit-details-marker only supports hiding the marker entirely
* [2], contrary to mdn's example [3]; thus, set a color as a fallback:
* while it may not be accessible for colorblind individuals, it's better
* than no indication of a test's state for anyone, as that there's no other
* way to include an indication in the marker on WebKit.
*
* [1]: https://developer.mozilla.org/en-US/docs/Web/CSS/Reference/Selectors/::marker#browser_compatibility
* [2]: https://developer.mozilla.org/en-US/docs/Web/HTML/Reference/Elements/summary#default_style
* [3]: https://developer.mozilla.org/en-US/docs/Web/HTML/Reference/Elements/summary#changing_the_summarys_icon
*/
color: var(--fg);
content: url("/test/success-closed.svg") / "success";
}
&.success[open] > summary::marker {
content: url("/test/success-open.svg") / "success";
}
&.failure > summary::marker {
color: red;
content: url("/test/failure-closed.svg") / "failure";
}
&.failure[open] > summary::marker {
content: url("/test/failure-open.svg") / "failure";
}
&.skip > summary::marker {
color: blue;
content: url("/test/skip-closed.svg") / "skip";
}
&.skip[open] > summary::marker {
content: url("/test/skip-open.svg") / "skip";
}
}
p.test-desc {
margin: 0 0 0 1rem;
padding: 2px 0;
> pre {
margin: 0;
}
}
.italic {
font-style: italic;
}

View File

@@ -0,0 +1,39 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="/test/style.css">
<title>PkgServer Tests</title>
</head>
<body>
<noscript>
I hate JavaScript as much as you, but this page runs tests written in
JavaScript to test the functionality of code written in JavaScript, so it
wouldn't make sense for it to work without JavaScript. <strong>Please turn
JavaScript on!</strong>
</noscript>
<h1>PkgServer Tests</h1>
<main>
<p id="counters">
<span id="success-counter">0</span> succeeded, <span id="failure-counter">0</span>
failed<span id="skip-counter-text" hidden>, <span id="skip-counter">0</span> skipped</span>.
</p>
<p hidden id="success-description">Successful test</p>
<p hidden id="failure-description">Failed test</p>
<p hidden id="skip-description">Partially or fully skipped test</p>
<div id="root">
</div>
<script type="module">
import "/test/all_tests.js";
import { DOMReporter, GLOBAL_REGISTRAR } from "/test/lib/test.js";
GLOBAL_REGISTRAR.run(new DOMReporter());
</script>
</main>
</body>
</html>

View File

@@ -0,0 +1,8 @@
{
"compilerOptions": {
"target": "ES2024",
"strict": true,
"alwaysStrict": true,
"outDir": "static"
}
}

View File

@@ -67,9 +67,6 @@ type (
// Copied to the underlying [exec.Cmd].
WaitDelay time.Duration
// Suppress verbose output of init.
Quiet bool
cmd *exec.Cmd
ctx context.Context
msg message.Msg
@@ -345,6 +342,8 @@ func (p *Container) Start() error {
Err: ENOSYS,
Origin: true,
}
} else {
p.msg.Verbosef("landlock abi version %d", abi)
}
if rulesetFd, err := rulesetAttr.Create(0); err != nil {
@@ -354,6 +353,7 @@ func (p *Container) Start() error {
Err: err,
}
} else {
p.msg.Verbosef("enforcing landlock ruleset %s", rulesetAttr)
if err = landlock.RestrictSelf(rulesetFd, 0); err != nil {
_ = Close(rulesetFd)
return &StartError{
@@ -410,6 +410,7 @@ func (p *Container) Start() error {
}
}
p.msg.Verbose("starting container init")
if err := p.cmd.Start(); err != nil {
return &StartError{
Step: "start container init",
@@ -480,6 +481,7 @@ func (p *Container) Serve() (err error) {
}
case <-done:
p.msg.Verbose("setup payload took", time.Since(t))
return
}
}(p.setup[1])
@@ -489,7 +491,7 @@ func (p *Container) Serve() (err error) {
Getuid(),
Getgid(),
len(p.ExtraFiles),
p.msg.IsVerbose() && !p.Quiet,
p.msg.IsVerbose(),
})
}

View File

@@ -16,7 +16,6 @@ import (
"strings"
"syscall"
"testing"
_ "unsafe" // for go:linkname
"hakurei.app/check"
"hakurei.app/command"
@@ -234,9 +233,6 @@ func earlyMnt(mnt ...*vfs.MountInfoEntry) func(*testing.T, context.Context) []*v
return func(*testing.T, context.Context) []*vfs.MountInfoEntry { return mnt }
}
//go:linkname toHost hakurei.app/container.toHost
func toHost(name string) string
var containerTestCases = []struct {
name string
filter bool
@@ -336,15 +332,13 @@ var containerTestCases = []struct {
func(t *testing.T, ctx context.Context) []*vfs.MountInfoEntry {
return []*vfs.MountInfoEntry{
ent("/", hst.PrivateTmp, "rw", "overlay", "overlay",
"rw"+
",lowerdir+="+
toHost(ctx.Value(testVal("lower0")).(*check.Absolute).String())+
",lowerdir+="+
toHost(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
"rw,lowerdir="+
container.InternalToHostOvlEscape(ctx.Value(testVal("lower0")).(*check.Absolute).String())+":"+
container.InternalToHostOvlEscape(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
",upperdir="+
toHost(ctx.Value(testVal("upper")).(*check.Absolute).String())+
container.InternalToHostOvlEscape(ctx.Value(testVal("upper")).(*check.Absolute).String())+
",workdir="+
toHost(ctx.Value(testVal("work")).(*check.Absolute).String())+
container.InternalToHostOvlEscape(ctx.Value(testVal("work")).(*check.Absolute).String())+
",redirect_dir=nofollow,uuid=on,userxattr"),
}
},
@@ -394,11 +388,9 @@ var containerTestCases = []struct {
func(t *testing.T, ctx context.Context) []*vfs.MountInfoEntry {
return []*vfs.MountInfoEntry{
ent("/", hst.PrivateTmp, "rw", "overlay", "overlay",
"ro"+
",lowerdir+="+
toHost(ctx.Value(testVal("lower0")).(*check.Absolute).String())+
",lowerdir+="+
toHost(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
"ro,lowerdir="+
container.InternalToHostOvlEscape(ctx.Value(testVal("lower0")).(*check.Absolute).String())+":"+
container.InternalToHostOvlEscape(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
",redirect_dir=nofollow,userxattr"),
}
},

View File

@@ -65,8 +65,6 @@ type syscallDispatcher interface {
remount(msg message.Msg, target string, flags uintptr) error
// mountTmpfs provides mountTmpfs.
mountTmpfs(fsname, target string, flags uintptr, size int, perm os.FileMode) error
// mountOverlay provides mountOverlay.
mountOverlay(target string, options [][2]string) error
// ensureFile provides ensureFile.
ensureFile(name string, perm, pperm os.FileMode) error
// mustLoopback provides mustLoopback.
@@ -171,9 +169,6 @@ func (direct) remount(msg message.Msg, target string, flags uintptr) error {
func (k direct) mountTmpfs(fsname, target string, flags uintptr, size int, perm os.FileMode) error {
return mountTmpfs(k, fsname, target, flags, size, perm)
}
func (k direct) mountOverlay(target string, options [][2]string) error {
return mountOverlay(target, options)
}
func (direct) ensureFile(name string, perm, pperm os.FileMode) error {
return ensureFile(name, perm, pperm)
}

View File

@@ -468,14 +468,6 @@ func (k *kstub) mountTmpfs(fsname, target string, flags uintptr, size int, perm
stub.CheckArg(k.Stub, "perm", perm, 4))
}
func (k *kstub) mountOverlay(target string, options [][2]string) error {
k.Helper()
return k.Expects("mountOverlay").Error(
stub.CheckArg(k.Stub, "target", target, 0),
stub.CheckArgReflect(k.Stub, "options", options, 1),
)
}
func (k *kstub) ensureFile(name string, perm, pperm os.FileMode) error {
k.Helper()
return k.Expects("ensureFile").Error(

View File

@@ -118,10 +118,6 @@ func errnoFallback(op, path string, err error) (syscall.Errno, *os.PathError) {
// mount wraps syscall.Mount for error handling.
func mount(source, target, fstype string, flags uintptr, data string) error {
if max(len(source), len(target), len(data))+1 > os.Getpagesize() {
return &MountError{source, target, fstype, flags, data, syscall.ENOMEM}
}
err := syscall.Mount(source, target, fstype, flags, data)
if err == nil {
return nil

View File

@@ -223,13 +223,6 @@ func initEntrypoint(k syscallDispatcher, msg message.Msg) {
state := &setupState{process: make(map[int]WaitStatus), Params: &param.Params, Msg: msg, Context: ctx}
defer cancel()
if err := k.mount(SourceTmpfsRootfs, intermediateHostPath, FstypeTmpfs, MS_NODEV|MS_NOSUID, zeroString); err != nil {
k.fatalf(msg, "cannot mount intermediate root: %v", optionalErrorUnwrap(err))
}
if err := k.chdir(intermediateHostPath); err != nil {
k.fatalf(msg, "cannot enter intermediate host path: %v", err)
}
/* early is called right before pivot_root into intermediate root;
this step is mostly for gathering information that would otherwise be
difficult to obtain via library functions after pivot_root, and
@@ -249,6 +242,13 @@ func initEntrypoint(k syscallDispatcher, msg message.Msg) {
}
}
if err := k.mount(SourceTmpfsRootfs, intermediateHostPath, FstypeTmpfs, MS_NODEV|MS_NOSUID, zeroString); err != nil {
k.fatalf(msg, "cannot mount intermediate root: %v", optionalErrorUnwrap(err))
}
if err := k.chdir(intermediateHostPath); err != nil {
k.fatalf(msg, "cannot enter intermediate host path: %v", err)
}
if err := k.mkdir(sysrootDir, 0755); err != nil {
k.fatalf(msg, "%v", err)
}

View File

@@ -332,8 +332,6 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("fatalf", stub.ExpectArgs{"invalid op at index %d", []any{0}}, nil, nil),
/* end early */
@@ -372,8 +370,6 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("fatalf", stub.ExpectArgs{"invalid op at index %d", []any{0}}, nil, nil),
/* end early */
@@ -412,8 +408,6 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", stub.UniqueError(61)),
call("fatalf", stub.ExpectArgs{"cannot prepare op at index %d: %v", []any{0, stub.UniqueError(61)}}, nil, nil),
@@ -453,8 +447,6 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", &os.PathError{Op: "readlink", Path: "/", Err: stub.UniqueError(60)}),
call("fatal", stub.ExpectArgs{[]any{"cannot readlink /: unique error 60 injected by the test suite"}}, nil, nil),
@@ -494,6 +486,9 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, stub.UniqueError(58)),
call("fatalf", stub.ExpectArgs{"cannot mount intermediate root: %v", []any{stub.UniqueError(58)}}, nil, nil),
},
@@ -531,6 +526,9 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, stub.UniqueError(56)),
call("fatalf", stub.ExpectArgs{"cannot enter intermediate host path: %v", []any{stub.UniqueError(56)}}, nil, nil),
@@ -569,11 +567,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, stub.UniqueError(54)),
call("fatalf", stub.ExpectArgs{"%v", []any{stub.UniqueError(54)}}, nil, nil),
},
@@ -611,11 +609,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, stub.UniqueError(52)),
call("fatalf", stub.ExpectArgs{"cannot bind sysroot: %v", []any{stub.UniqueError(52)}}, nil, nil),
@@ -654,11 +652,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, stub.UniqueError(50)),
@@ -698,11 +696,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -743,11 +741,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -789,11 +787,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -844,11 +842,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -899,11 +897,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -955,11 +953,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1012,11 +1010,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1071,11 +1069,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1131,11 +1129,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1192,11 +1190,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1254,11 +1252,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1317,11 +1315,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1381,11 +1379,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1446,11 +1444,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1512,11 +1510,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1586,11 +1584,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1693,11 +1691,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1801,11 +1799,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -1910,11 +1908,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -2034,11 +2032,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(4), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -2134,11 +2132,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(4), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -2234,11 +2232,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(4), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -2325,11 +2323,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(4), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -2420,11 +2418,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(4), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -2522,11 +2520,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),
@@ -2661,11 +2659,11 @@ func TestInitEntrypoint(t *testing.T) {
call("sethostname", stub.ExpectArgs{[]byte("hakurei-check")}, nil, nil),
call("lastcap", stub.ExpectArgs{}, uintptr(40), nil),
call("mount", stub.ExpectArgs{"", "/", "", uintptr(0x8c000), ""}, nil, nil),
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
/* begin early */
call("evalSymlinks", stub.ExpectArgs{"/"}, "/", nil),
/* end early */
call("mount", stub.ExpectArgs{"rootfs", "/proc/self/fd", "tmpfs", uintptr(6), ""}, nil, nil),
call("chdir", stub.ExpectArgs{"/proc/self/fd"}, nil, nil),
call("mkdir", stub.ExpectArgs{"sysroot", os.FileMode(0755)}, nil, nil),
call("mount", stub.ExpectArgs{"sysroot", "sysroot", "", uintptr(0xd000), ""}, nil, nil),
call("mkdir", stub.ExpectArgs{"host", os.FileMode(0755)}, nil, nil),

View File

@@ -4,9 +4,9 @@ import (
"encoding/gob"
"fmt"
"slices"
"strings"
"hakurei.app/check"
"hakurei.app/ext"
"hakurei.app/fhs"
)
@@ -150,7 +150,7 @@ func (o *MountOverlayOp) early(_ *setupState, k syscallDispatcher) error {
if v, err := k.evalSymlinks(o.Upper.String()); err != nil {
return err
} else {
o.upper = toHost(v)
o.upper = check.EscapeOverlayDataSegment(toHost(v))
}
}
@@ -158,7 +158,7 @@ func (o *MountOverlayOp) early(_ *setupState, k syscallDispatcher) error {
if v, err := k.evalSymlinks(o.Work.String()); err != nil {
return err
} else {
o.work = toHost(v)
o.work = check.EscapeOverlayDataSegment(toHost(v))
}
}
}
@@ -168,39 +168,12 @@ func (o *MountOverlayOp) early(_ *setupState, k syscallDispatcher) error {
if v, err := k.evalSymlinks(a.String()); err != nil {
return err
} else {
o.lower[i] = toHost(v)
o.lower[i] = check.EscapeOverlayDataSegment(toHost(v))
}
}
return nil
}
// mountOverlay sets up an overlay mount via [ext.FS].
func mountOverlay(target string, options [][2]string) error {
fs, err := ext.OpenFS(SourceOverlay, 0)
if err != nil {
return err
}
if err = fs.SetString("source", SourceOverlay); err != nil {
_ = fs.Close()
return err
}
for _, option := range options {
if err = fs.SetString(option[0], option[1]); err != nil {
_ = fs.Close()
return err
}
}
if err = fs.SetFlag(OptionOverlayUserxattr); err != nil {
_ = fs.Close()
return err
}
if err = fs.Mount(target, 0); err != nil {
_ = fs.Close()
return err
}
return fs.Close()
}
func (o *MountOverlayOp) apply(state *setupState, k syscallDispatcher) error {
target := o.Target.String()
if !o.noPrefix {
@@ -221,7 +194,7 @@ func (o *MountOverlayOp) apply(state *setupState, k syscallDispatcher) error {
}
}
options := make([][2]string, 0, 2+len(o.lower))
options := make([]string, 0, 4)
if o.upper == zeroString && o.work == zeroString { // readonly
if len(o.Lower) < 2 {
@@ -232,16 +205,15 @@ func (o *MountOverlayOp) apply(state *setupState, k syscallDispatcher) error {
if len(o.Lower) == 0 {
return &OverlayArgumentError{OverlayEmptyLower, zeroString}
}
options = append(options, [][2]string{
{OptionOverlayUpperdir, o.upper},
{OptionOverlayWorkdir, o.work},
}...)
}
for _, lower := range o.lower {
options = append(options, [2]string{OptionOverlayLowerdir + "+", lower})
options = append(options,
OptionOverlayUpperdir+"="+o.upper,
OptionOverlayWorkdir+"="+o.work)
}
options = append(options,
OptionOverlayLowerdir+"="+strings.Join(o.lower, check.SpecialOverlayPath),
OptionOverlayUserxattr)
return k.mountOverlay(target, options)
return k.mount(SourceOverlay, target, FstypeOverlay, 0, strings.Join(options, check.SpecialOverlayOption))
}
func (o *MountOverlayOp) late(*setupState, syscallDispatcher) error { return nil }

View File

@@ -97,12 +97,13 @@ func TestMountOverlayOp(t *testing.T) {
call("mkdirAll", stub.ExpectArgs{"/sysroot", os.FileMode(0705)}, nil, nil),
call("mkdirTemp", stub.ExpectArgs{"/", "overlay.upper.*"}, "overlay.upper.32768", nil),
call("mkdirTemp", stub.ExpectArgs{"/", "overlay.work.*"}, "overlay.work.32768", nil),
call("mountOverlay", stub.ExpectArgs{"/sysroot", [][2]string{
{"upperdir", "overlay.upper.32768"},
{"workdir", "overlay.work.32768"},
{"lowerdir+", `/host/var/lib/planterette/base/debian:f92c9052`},
{"lowerdir+", `/host/var/lib/planterette/app/org.chromium.Chromium@debian:f92c9052`},
}}, nil, nil),
call("mount", stub.ExpectArgs{"overlay", "/sysroot", "overlay", uintptr(0), "" +
"upperdir=overlay.upper.32768," +
"workdir=overlay.work.32768," +
"lowerdir=" +
`/host/var/lib/planterette/base/debian\:f92c9052:` +
`/host/var/lib/planterette/app/org.chromium.Chromium@debian\:f92c9052,` +
"userxattr"}, nil, nil),
}, nil},
{"short lower ro", &Params{ParentPerm: 0755}, &MountOverlayOp{
@@ -128,10 +129,11 @@ func TestMountOverlayOp(t *testing.T) {
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store0"}, "/mnt-root/nix/.ro-store0", nil),
}, nil, []stub.Call{
call("mkdirAll", stub.ExpectArgs{"/nix/store", os.FileMode(0755)}, nil, nil),
call("mountOverlay", stub.ExpectArgs{"/nix/store", [][2]string{
{"lowerdir+", "/host/mnt-root/nix/.ro-store"},
{"lowerdir+", "/host/mnt-root/nix/.ro-store0"},
}}, nil, nil),
call("mount", stub.ExpectArgs{"overlay", "/nix/store", "overlay", uintptr(0), "" +
"lowerdir=" +
"/host/mnt-root/nix/.ro-store:" +
"/host/mnt-root/nix/.ro-store0," +
"userxattr"}, nil, nil),
}, nil},
{"success ro", &Params{ParentPerm: 0755}, &MountOverlayOp{
@@ -145,10 +147,11 @@ func TestMountOverlayOp(t *testing.T) {
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store0"}, "/mnt-root/nix/.ro-store0", nil),
}, nil, []stub.Call{
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0755)}, nil, nil),
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
{"lowerdir+", "/host/mnt-root/nix/.ro-store"},
{"lowerdir+", "/host/mnt-root/nix/.ro-store0"},
}}, nil, nil),
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "" +
"lowerdir=" +
"/host/mnt-root/nix/.ro-store:" +
"/host/mnt-root/nix/.ro-store0," +
"userxattr"}, nil, nil),
}, nil},
{"nil lower", &Params{ParentPerm: 0700}, &MountOverlayOp{
@@ -216,11 +219,7 @@ func TestMountOverlayOp(t *testing.T) {
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store"}, "/mnt-root/nix/ro-store", nil),
}, nil, []stub.Call{
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
{"upperdir", "/host/mnt-root/nix/.rw-store/.upper"},
{"workdir", "/host/mnt-root/nix/.rw-store/.work"},
{"lowerdir+", "/host/mnt-root/nix/ro-store"},
}}, nil, stub.UniqueError(0)),
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "upperdir=/host/mnt-root/nix/.rw-store/.upper,workdir=/host/mnt-root/nix/.rw-store/.work,lowerdir=/host/mnt-root/nix/ro-store,userxattr"}, nil, stub.UniqueError(0)),
}, stub.UniqueError(0)},
{"success single layer", &Params{ParentPerm: 0700}, &MountOverlayOp{
@@ -234,11 +233,11 @@ func TestMountOverlayOp(t *testing.T) {
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store"}, "/mnt-root/nix/ro-store", nil),
}, nil, []stub.Call{
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
{"upperdir", "/host/mnt-root/nix/.rw-store/.upper"},
{"workdir", "/host/mnt-root/nix/.rw-store/.work"},
{"lowerdir+", "/host/mnt-root/nix/ro-store"},
}}, nil, nil),
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "" +
"upperdir=/host/mnt-root/nix/.rw-store/.upper," +
"workdir=/host/mnt-root/nix/.rw-store/.work," +
"lowerdir=/host/mnt-root/nix/ro-store," +
"userxattr"}, nil, nil),
}, nil},
{"success", &Params{ParentPerm: 0700}, &MountOverlayOp{
@@ -262,15 +261,16 @@ func TestMountOverlayOp(t *testing.T) {
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store3"}, "/mnt-root/nix/ro-store3", nil),
}, nil, []stub.Call{
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
{"upperdir", "/host/mnt-root/nix/.rw-store/.upper"},
{"workdir", "/host/mnt-root/nix/.rw-store/.work"},
{"lowerdir+", "/host/mnt-root/nix/ro-store"},
{"lowerdir+", "/host/mnt-root/nix/ro-store0"},
{"lowerdir+", "/host/mnt-root/nix/ro-store1"},
{"lowerdir+", "/host/mnt-root/nix/ro-store2"},
{"lowerdir+", "/host/mnt-root/nix/ro-store3"},
}}, nil, nil),
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "" +
"upperdir=/host/mnt-root/nix/.rw-store/.upper," +
"workdir=/host/mnt-root/nix/.rw-store/.work," +
"lowerdir=" +
"/host/mnt-root/nix/ro-store:" +
"/host/mnt-root/nix/ro-store0:" +
"/host/mnt-root/nix/ro-store1:" +
"/host/mnt-root/nix/ro-store2:" +
"/host/mnt-root/nix/ro-store3," +
"userxattr"}, nil, nil),
}, nil},
})

View File

@@ -10,6 +10,7 @@ import (
"testing"
"unsafe"
"hakurei.app/check"
"hakurei.app/vfs"
)
@@ -49,6 +50,9 @@ func TestToHost(t *testing.T) {
}
}
// InternalToHostOvlEscape exports toHost passed to [check.EscapeOverlayDataSegment].
func InternalToHostOvlEscape(s string) string { return check.EscapeOverlayDataSegment(toHost(s)) }
func TestCreateFile(t *testing.T) {
t.Run("nonexistent", func(t *testing.T) {
t.Run("mkdir", func(t *testing.T) {

267
ext/fs.go
View File

@@ -1,267 +0,0 @@
package ext
import (
"os"
"runtime"
"syscall"
"unsafe"
)
// include/uapi/linux/mount.h
/*
* move_mount() flags.
*/
const (
MOVE_MOUNT_F_SYMLINKS = 1 << iota /* Follow symlinks on from path */
MOVE_MOUNT_F_AUTOMOUNTS /* Follow automounts on from path */
MOVE_MOUNT_F_EMPTY_PATH /* Empty from path permitted */
_
MOVE_MOUNT_T_SYMLINKS /* Follow symlinks on to path */
MOVE_MOUNT_T_AUTOMOUNTS /* Follow automounts on to path */
MOVE_MOUNT_T_EMPTY_PATH /* Empty to path permitted */
_
MOVE_MOUNT_SET_GROUP /* Set sharing group instead */
MOVE_MOUNT_BENEATH /* Mount beneath top mount */
)
/*
* fsopen() flags.
*/
const (
FSOPEN_CLOEXEC = 1 << iota
)
/*
* fspick() flags.
*/
const (
FSPICK_CLOEXEC = 1 << iota
FSPICK_SYMLINK_NOFOLLOW
FSPICK_NO_AUTOMOUNT
FSPICK_EMPTY_PATH
)
/*
* The type of fsconfig() call made.
*/
const (
FSCONFIG_SET_FLAG = iota /* Set parameter, supplying no value */
FSCONFIG_SET_STRING /* Set parameter, supplying a string value */
FSCONFIG_SET_BINARY /* Set parameter, supplying a binary blob value */
FSCONFIG_SET_PATH /* Set parameter, supplying an object by path */
FSCONFIG_SET_PATH_EMPTY /* Set parameter, supplying an object by (empty) path */
FSCONFIG_SET_FD /* Set parameter, supplying an object by fd */
FSCONFIG_CMD_CREATE /* Create new or reuse existing superblock */
FSCONFIG_CMD_RECONFIGURE /* Invoke superblock reconfiguration */
FSCONFIG_CMD_CREATE_EXCL /* Create new superblock, fail if reusing existing superblock */
)
/*
* fsmount() flags.
*/
const (
FSMOUNT_CLOEXEC = 1 << iota
)
/*
* Mount attributes.
*/
const (
MOUNT_ATTR_RDONLY = 0x00000001 /* Mount read-only */
MOUNT_ATTR_NOSUID = 0x00000002 /* Ignore suid and sgid bits */
MOUNT_ATTR_NODEV = 0x00000004 /* Disallow access to device special files */
MOUNT_ATTR_NOEXEC = 0x00000008 /* Disallow program execution */
MOUNT_ATTR__ATIME = 0x00000070 /* Setting on how atime should be updated */
MOUNT_ATTR_RELATIME = 0x00000000 /* - Update atime relative to mtime/ctime. */
MOUNT_ATTR_NOATIME = 0x00000010 /* - Do not update access times. */
MOUNT_ATTR_STRICTATIME = 0x00000020 /* - Always perform atime updates */
MOUNT_ATTR_NODIRATIME = 0x00000080 /* Do not update directory access times */
MOUNT_ATTR_IDMAP = 0x00100000 /* Idmap mount to @userns_fd in struct mount_attr. */
MOUNT_ATTR_NOSYMFOLLOW = 0x00200000 /* Do not follow symlinks */
)
// FS provides low-level wrappers around the suite of file-descriptor-based
// mount facilities in Linux.
type FS struct {
fd uintptr
c runtime.Cleanup
}
// newFS allocates a new [FS] for the specified fd.
func newFS(fd uintptr) *FS {
fs := FS{fd: fd}
fs.c = runtime.AddCleanup(&fs, func(fd uintptr) {
_ = syscall.Close(int(fd))
}, fd)
return &fs
}
// Close closes the underlying filesystem context.
func (fs *FS) Close() error {
if fs == nil {
return syscall.EINVAL
}
err := syscall.Close(int(fs.fd))
fs.c.Stop()
return err
}
// OpenFS creates a new filesystem context.
func OpenFS(fsname string, flags int) (fs *FS, err error) {
var s *byte
s, err = syscall.BytePtrFromString(fsname)
if err != nil {
return
}
fd, _, errno := syscall.Syscall(
SYS_FSOPEN,
uintptr(unsafe.Pointer(s)),
uintptr(flags|FSOPEN_CLOEXEC),
0,
)
if errno != 0 {
err = os.NewSyscallError("fsopen", errno)
} else {
fs = newFS(fd)
}
return
}
// PickFS selects filesystem for reconfiguration.
func PickFS(dirfd int, pathname string, flags int) (fs *FS, err error) {
var s *byte
s, err = syscall.BytePtrFromString(pathname)
if err != nil {
return
}
fd, _, errno := syscall.Syscall(
SYS_FSPICK,
uintptr(dirfd),
uintptr(unsafe.Pointer(s)),
uintptr(flags|FSPICK_CLOEXEC),
)
if errno != 0 {
err = os.NewSyscallError("fspick", errno)
} else {
fs = newFS(fd)
}
return
}
// config configures new or existing filesystem context.
func (fs *FS) config(cmd uint, key *byte, value unsafe.Pointer, aux int) (err error) {
_, _, errno := syscall.Syscall6(
SYS_FSCONFIG,
fs.fd,
uintptr(cmd),
uintptr(unsafe.Pointer(key)),
uintptr(value),
uintptr(aux),
0,
)
if errno != 0 {
err = os.NewSyscallError("fsconfig", errno)
}
return
}
// SetFlag sets the flag parameter named by key. ([FSCONFIG_SET_FLAG])
func (fs *FS) SetFlag(key string) (err error) {
var s *byte
s, err = syscall.BytePtrFromString(key)
if err != nil {
return
}
return fs.config(FSCONFIG_SET_FLAG, s, nil, 0)
}
// SetString sets the string parameter named by key to the value specified by
// value. ([FSCONFIG_SET_STRING])
func (fs *FS) SetString(key, value string) (err error) {
var s0 *byte
s0, err = syscall.BytePtrFromString(key)
if err != nil {
return
}
var s1 *byte
s1, err = syscall.BytePtrFromString(value)
if err != nil {
return
}
return fs.config(FSCONFIG_SET_STRING, s0, unsafe.Pointer(s1), 0)
}
// mount instantiates mount object from filesystem context.
func (fs *FS) mount(flags, attrFlags int) (fsfd int, err error) {
r, _, errno := syscall.Syscall(
SYS_FSMOUNT,
fs.fd,
uintptr(flags|FSMOUNT_CLOEXEC),
uintptr(attrFlags),
)
fsfd = int(r)
if errno != 0 {
err = os.NewSyscallError("fsmount", errno)
}
return
}
// MoveMount moves or attaches mount object to filesystem.
func MoveMount(
fromDirfd int,
fromPathname string,
toDirfd int,
toPathname string,
flags int,
) (err error) {
var s0 *byte
s0, err = syscall.BytePtrFromString(fromPathname)
if err != nil {
return
}
var s1 *byte
s1, err = syscall.BytePtrFromString(toPathname)
if err != nil {
return
}
_, _, errno := syscall.Syscall6(
SYS_MOVE_MOUNT,
uintptr(fromDirfd),
uintptr(unsafe.Pointer(s0)),
uintptr(toDirfd),
uintptr(unsafe.Pointer(s1)),
uintptr(flags),
0,
)
if errno != 0 {
err = os.NewSyscallError("move_mount", errno)
}
return
}
// Mount attaches the underlying filesystem context to the specified pathname.
func (fs *FS) Mount(pathname string, attrFlags int) error {
if err := fs.config(FSCONFIG_CMD_CREATE_EXCL, nil, nil, 0); err != nil {
return err
}
fd, err := fs.mount(0, attrFlags)
if err != nil {
return err
}
err = MoveMount(
fd, "",
-1, pathname,
MOVE_MOUNT_F_EMPTY_PATH,
)
closeErr := syscall.Close(fd)
if err == nil {
err = closeErr
}
return err
}

View File

@@ -64,6 +64,78 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C"), nil},
{"sample cache file", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX": {Mode: 0400, Data: []byte{0}},
"checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq": {Mode: 0400, Data: []byte{0, 0, 0, 0, 0xad, 0xb, 0, 4, 0xfe, 0xfe, 0, 0, 0xfe, 0xca, 0, 0}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
"identifier/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
"identifier/cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
"identifier/deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: 0400, Path: "checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq", Data: []byte{0, 0, 0, 0, 0xad, 0xb, 0, 4, 0xfe, 0xfe, 0, 0, 0xfe, 0xca, 0, 0}},
{Mode: 0400, Path: "checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX", Data: []byte{0}},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq", Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe", Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef", Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX", Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("St9rlE-mGZ5gXwiv_hzQ_B8bZP-UUvSNmf4nHUZzCMOumb6hKnheZSe0dmnuc4Q2"), nil},
{"sample http get cure", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU": {Mode: 0400, Data: []byte("\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: 0400, Path: "checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU", Data: []byte("\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_", Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("L_0RFHpr9JUS4Zp14rz2dESSRvfLzpvqsLhR1-YjQt8hYlmEdVl7vI3_-v8UNPKs"), nil},
{"sample directory step simple", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
"check": {Mode: 0400, Data: []byte{0, 0}},
"lib": {Mode: fs.ModeDir | 0700},
"lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"lib/pkgconfig": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0500, Path: "."},
{Mode: 0400, Path: "check", Data: []byte{0, 0}},
{Mode: fs.ModeDir | 0700, Path: "lib"},
{Mode: fs.ModeSymlink | 0777, Path: "lib/libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
{Mode: fs.ModeDir | 0700, Path: "lib/pkgconfig"},
}, pkg.MustDecode("qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b"), nil},
{"sample directory step garbage", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
@@ -79,6 +151,421 @@ func TestFlatten(t *testing.T) {
{Mode: fs.ModeDir | 0500, Path: "lib/pkgconfig"},
}, pkg.MustDecode("CUx-3hSbTWPsbMfDhgalG4Ni_GmR9TnVX8F99tY_P5GtkYvczg9RrF5zO0jX9XYT"), nil},
{"sample directory", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b": {Mode: fs.ModeDir | 0500},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/check": {Mode: 0400, Data: []byte{0, 0}},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib": {Mode: fs.ModeDir | 0700},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib/pkgconfig": {Mode: fs.ModeDir | 0700},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b")},
"identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b")},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b"},
{Mode: 0400, Path: "checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/check", Data: []byte{0, 0}},
{Mode: fs.ModeDir | 0700, Path: "checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib"},
{Mode: fs.ModeSymlink | 0777, Path: "checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib/libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
{Mode: fs.ModeDir | 0700, Path: "checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib/pkgconfig"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY", Data: []byte("../checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa", Data: []byte("../checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b")},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("WVpvsVqVKg9Nsh744x57h51AuWUoUR2nnh8Md-EYBQpk6ziyTuUn6PLtF2e0Eu_d"), nil},
{"sample no assume checksum", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M": {Mode: fs.ModeDir | 0500},
"checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M/check": {Mode: 0400, Data: []byte{}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/_wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M")},
"identifier/_wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M")},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M"},
{Mode: 0400, Path: "checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M/check", Data: []byte{}},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M")},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("OC290t23aimNo2Rp2pPwan5GI2KRLRdOwYxXQMD9jw0QROgHnNXWodoWdV0hwu2w"), nil},
{"sample tar step unpack", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
"checksum": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/check": {Mode: 0400, Data: []byte{0, 0}},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/pkgconfig": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"identifier": {Mode: fs.ModeDir | 0500},
"identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"work": {Mode: fs.ModeDir | 0500},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0500, Path: "."},
{Mode: fs.ModeDir | 0500, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP"},
{Mode: 0400, Path: "checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/check", Data: []byte{0, 0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib"},
{Mode: fs.ModeSymlink | 0777, Path: "checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
{Mode: fs.ModeDir | 0500, Path: "checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/pkgconfig"},
{Mode: fs.ModeDir | 0500, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY", Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa", Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
{Mode: fs.ModeDir | 0500, Path: "work"},
}, pkg.MustDecode("cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM"), nil},
{"sample tar", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM": {Mode: fs.ModeDir | 0500},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum": {Mode: fs.ModeDir | 0500},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP": {Mode: fs.ModeDir | 0500},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/check": {Mode: 0400, Data: []byte{0, 0}},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib": {Mode: fs.ModeDir | 0500},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/pkgconfig": {Mode: fs.ModeDir | 0500},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/identifier": {Mode: fs.ModeDir | 0500},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/work": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
"identifier/rg7F1D5hwv6o4xctjD5zDq4i5MD0mArTsUIWfhUbik8xC6Bsyt3mjXXOm3goojTz": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM"},
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP"},
{Mode: 0400, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/check", Data: []byte{0, 0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib"},
{Mode: fs.ModeSymlink | 0777, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/pkgconfig"},
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY", Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
{Mode: fs.ModeSymlink | 0777, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa", Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/work"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/rg7F1D5hwv6o4xctjD5zDq4i5MD0mArTsUIWfhUbik8xC6Bsyt3mjXXOm3goojTz", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("NQTlc466JmSVLIyWklm_u8_g95jEEb98PxJU-kjwxLpfdjwMWJq0G8ze9R4Vo1Vu"), nil},
{"sample tar expand step unpack", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
"libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0500, Path: "."},
{Mode: fs.ModeSymlink | 0777, Path: "libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
}, pkg.MustDecode("CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN"), nil},
{"sample tar expand", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN": {Mode: fs.ModeDir | 0500},
"checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
"identifier/_v1blm2h-_KA-dVaawdpLas6MjHc6rbhhFS8JWwx8iJxZGUu8EBbRrhr5AaZ9PJL": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN"},
{Mode: fs.ModeSymlink | 0777, Path: "checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN/libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_v1blm2h-_KA-dVaawdpLas6MjHc6rbhhFS8JWwx8iJxZGUu8EBbRrhr5AaZ9PJL", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("hSoSSgCYTNonX3Q8FjvjD1fBl-E-BQyA6OTXro2OadXqbST4tZ-akGXszdeqphRe"), nil},
{"testtool", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
"check": {Mode: 0400, Data: []byte{0}},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0500, Path: "."},
{Mode: 0400, Path: "check", Data: []byte{0}},
}, pkg.MustDecode("GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9"), nil},
{"sample exec container", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9": {Mode: fs.ModeDir | 0500},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9"},
{Mode: 0400, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check", Data: []byte{0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: 0400, Path: "checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb", Data: []byte{}},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("Q5DluWQCAeohLoiGRImurwFp3vdz9IfQCoj7Fuhh73s4KQPRHpEQEnHTdNHmB8Fx"), nil},
{"testtool net", fstest.MapFS{
".": {Mode: fs.ModeDir | 0500},
"check": {Mode: 0400, Data: []byte("net")},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0500, Path: "."},
{Mode: 0400, Path: "check", Data: []byte("net")},
}, pkg.MustDecode("a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W"), nil},
{"sample exec net container", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
"checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W": {Mode: fs.ModeDir | 0500},
"checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W/check": {Mode: 0400, Data: []byte("net")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: 0400, Path: "checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb", Data: []byte{}},
{Mode: fs.ModeDir | 0500, Path: "checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W"},
{Mode: 0400, Path: "checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W/check", Data: []byte("net")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3", Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("bPYvvqxpfV7xcC1EptqyKNK1klLJgYHMDkzBcoOyK6j_Aj5hb0mXNPwTwPSK5F6Z"), nil},
{"sample exec container overlay root", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9": {Mode: fs.ModeDir | 0500},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9"},
{Mode: 0400, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check", Data: []byte{0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("PO2DSSCa4yoSgEYRcCSZfQfwow1yRigL3Ry-hI0RDI4aGuFBha-EfXeSJnG_5_Rl"), nil},
{"sample exec container overlay work", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9": {Mode: fs.ModeDir | 0500},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9"},
{Mode: 0400, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check", Data: []byte{0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("iaRt6l_Wm2n-h5UsDewZxQkCmjZjyL8r7wv32QT2kyV55-Lx09Dq4gfg9BiwPnKs"), nil},
{"sample exec container multiple layers", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9": {Mode: fs.ModeDir | 0500},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
"checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK": {Mode: fs.ModeDir | 0500},
"checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK/check": {Mode: 0400, Data: []byte("layers")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/B-kc5iJMx8GtlCua4dz6BiJHnDAOUfPjgpbKq4e-QEn0_CZkSYs3fOA1ve06qMs2": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
"identifier/p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9"},
{Mode: 0400, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check", Data: []byte{0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: 0400, Path: "checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb", Data: []byte{}},
{Mode: fs.ModeDir | 0500, Path: "checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK"},
{Mode: 0400, Path: "checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK/check", Data: []byte("layers")},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/B-kc5iJMx8GtlCua4dz6BiJHnDAOUfPjgpbKq4e-QEn0_CZkSYs3fOA1ve06qMs2", Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("O2YzyR7IUGU5J2CADy0hUZ3A5NkP_Vwzs4UadEdn2oMZZVWRtH0xZGJ3HXiimTnZ"), nil},
{"sample exec container layer promotion", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9": {Mode: fs.ModeDir | 0500},
"checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/kvJIqZo5DKFOxC2ZQ-8_nPaQzEAz9cIm3p6guO-uLqm-xaiPu7oRkSnsu411jd_U": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: fs.ModeDir | 0500, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9"},
{Mode: 0400, Path: "checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9/check", Data: []byte{0}},
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/kvJIqZo5DKFOxC2ZQ-8_nPaQzEAz9cIm3p6guO-uLqm-xaiPu7oRkSnsu411jd_U", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
{Mode: fs.ModeDir | 0700, Path: "temp"},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("3EaW6WibLi9gl03_UieiFPaFcPy5p4x3JPxrnLJxGaTI-bh3HU9DK9IMx7c3rrNm"), nil},
{"sample file short", fstest.MapFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX": {Mode: 0400, Data: []byte{0}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
"work": {Mode: fs.ModeDir | 0700},
}, []pkg.FlatEntry{
{Mode: fs.ModeDir | 0700, Path: "."},
{Mode: fs.ModeDir | 0700, Path: "checksum"},
{Mode: 0400, Path: "checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX", Data: []byte{0}},
{Mode: fs.ModeDir | 0700, Path: "identifier"},
{Mode: fs.ModeSymlink | 0777, Path: "identifier/3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi", Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
{Mode: fs.ModeDir | 0700, Path: "work"},
}, pkg.MustDecode("iR6H5OIsyOW4EwEgtm9rGzGF6DVtyHLySEtwnFE8bnus9VJcoCbR4JIek7Lw-vwT"), nil},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {

View File

@@ -436,7 +436,6 @@ func (a *execArtifact) makeContainer(
if z.HostNet {
z.Hostname = "cure-net"
}
z.Quiet = flags&CSuppressInit != 0
z.Uid, z.Gid = (1<<10)-1, (1<<10)-1
z.Dir, z.Path, z.Args = a.dir, a.path, a.args
z.Env = slices.Concat(a.env, []string{EnvJobs + "=" + strconv.Itoa(jobs)})

View File

@@ -1,10 +1,11 @@
package pkg_test
//go:generate env CGO_ENABLED=0 go build -tags testtool -o testdata/testtool ./testdata
import (
_ "embed"
"encoding/gob"
"errors"
"io/fs"
"net"
"os"
"os/exec"
@@ -16,26 +17,20 @@ import (
"hakurei.app/hst"
"hakurei.app/internal/pkg"
"hakurei.app/internal/stub"
"hakurei.app/internal/pkg/internal/testtool/expected"
)
// testtoolBin is the container test tool binary made available to the
// execArtifact for testing its curing environment.
//
//go:generate env CGO_ENABLED=0 go build -tags testtool -o internal/testtool ./internal/testtool
//go:embed internal/testtool/testtool
//go:embed testdata/testtool
var testtoolBin []byte
func TestExec(t *testing.T) {
t.Parallel()
wantOffline := expectsFS{
".": {Mode: fs.ModeDir | 0500},
"check": {Mode: 0400, Data: []byte{0}},
}
wantOfflineEncode := pkg.Encode(wantOffline.hash())
wantChecksumOffline := pkg.MustDecode(
"GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9",
)
checkWithCache(t, []cacheTestCase{
{"offline", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
@@ -63,7 +58,7 @@ func TestExec(t *testing.T) {
},
}),
pkg.MustPath("/opt", false, testtool),
), ignorePathname, wantOffline, nil},
), ignorePathname, wantChecksumOffline, nil},
{"error passthrough", pkg.NewExec(
"", nil, 0, true,
@@ -79,7 +74,7 @@ func TestExec(t *testing.T) {
return stub.UniqueError(0xcafe)
},
}),
), nil, nil, &pkg.DependencyCureError{
), nil, pkg.Checksum{}, &pkg.DependencyCureError{
{
Ident: unique.Make(pkg.ID(pkg.MustDecode(
"Sowo6oZRmG6xVtUaxB6bDWZhVsqAJsIJWUp0OPKlE103cY0lodx7dem8J-qQF0Z1",
@@ -96,7 +91,7 @@ func TestExec(t *testing.T) {
[]string{"testtool"},
pkg.ExecPath{},
), nil, nil, pkg.ErrInvalidPaths},
), nil, pkg.Checksum{}, pkg.ErrInvalidPaths},
})
// check init failure passthrough
@@ -113,35 +108,17 @@ func TestExec(t *testing.T) {
}
testtoolDestroy(t, base, c)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + wantOfflineEncode: {Mode: fs.ModeDir | 0500},
"checksum/" + wantOfflineEncode + "/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/" + expected.Offline: {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantOfflineEncode)},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("Q5DluWQCAeohLoiGRImurwFp3vdz9IfQCoj7Fuhh73s4KQPRHpEQEnHTdNHmB8Fx")},
{"net", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
testtool, testtoolDestroy := newTesttool()
wantNet := expectsFS{
".": {Mode: fs.ModeDir | 0500},
"check": {Mode: 0400, Data: []byte("net")},
}
wantChecksum := pkg.MustDecode(
"a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W",
)
cureMany(t, c, []cureStep{
{"container", pkg.NewExec(
"exec-net", new(wantNet.hash()), 0, false,
"exec-net", &wantChecksum, 0, false,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1"},
check.MustAbs("/opt/bin/testtool"),
@@ -161,27 +138,11 @@ func TestExec(t *testing.T) {
},
}),
pkg.MustPath("/opt", false, testtool),
), ignorePathname, wantNet, nil},
), ignorePathname, wantChecksum, nil},
})
testtoolDestroy(t, base, c)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
"checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W": {Mode: fs.ModeDir | 0500},
"checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W/check": {Mode: 0400, Data: []byte("net")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/" + expected.Net: {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("bPYvvqxpfV7xcC1EptqyKNK1klLJgYHMDkzBcoOyK6j_Aj5hb0mXNPwTwPSK5F6Z")},
{"overlay root", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
testtool, testtoolDestroy := newTesttool()
@@ -202,25 +163,11 @@ func TestExec(t *testing.T) {
},
}),
pkg.MustPath("/opt", false, testtool),
), ignorePathname, wantOffline, nil},
), ignorePathname, wantChecksumOffline, nil},
})
testtoolDestroy(t, base, c)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + wantOfflineEncode: {Mode: fs.ModeDir | 0500},
"checksum/" + wantOfflineEncode + "/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/" + expected.OvlRoot: {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantOfflineEncode)},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("PO2DSSCa4yoSgEYRcCSZfQfwow1yRigL3Ry-hI0RDI4aGuFBha-EfXeSJnG_5_Rl")},
{"overlay work", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
testtool, testtoolDestroy := newTesttool()
@@ -246,25 +193,11 @@ func TestExec(t *testing.T) {
return os.MkdirAll(t.GetWorkDir().String(), 0700)
},
}), pkg.Path(pkg.AbsWork, false /* ignored */, testtool),
), ignorePathname, wantOffline, nil},
), ignorePathname, wantChecksumOffline, nil},
})
testtoolDestroy(t, base, c)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + wantOfflineEncode: {Mode: fs.ModeDir | 0500},
"checksum/" + wantOfflineEncode + "/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/" + expected.Work: {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantOfflineEncode)},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("iaRt6l_Wm2n-h5UsDewZxQkCmjZjyL8r7wv32QT2kyV55-Lx09Dq4gfg9BiwPnKs")},
{"multiple layers", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
testtool, testtoolDestroy := newTesttool()
@@ -312,30 +245,11 @@ func TestExec(t *testing.T) {
},
}),
pkg.MustPath("/opt", false, testtool),
), ignorePathname, wantOffline, nil},
), ignorePathname, wantChecksumOffline, nil},
})
testtoolDestroy(t, base, c)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + wantOfflineEncode: {Mode: fs.ModeDir | 0500},
"checksum/" + wantOfflineEncode + "/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
"checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK": {Mode: fs.ModeDir | 0500},
"checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK/check": {Mode: 0400, Data: []byte("layers")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
"identifier/B-kc5iJMx8GtlCua4dz6BiJHnDAOUfPjgpbKq4e-QEn0_CZkSYs3fOA1ve06qMs2": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
"identifier/" + expected.Layers: {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantOfflineEncode)},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("O2YzyR7IUGU5J2CADy0hUZ3A5NkP_Vwzs4UadEdn2oMZZVWRtH0xZGJ3HXiimTnZ")},
{"overlay layer promotion", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
testtool, testtoolDestroy := newTesttool()
@@ -362,26 +276,11 @@ func TestExec(t *testing.T) {
},
}),
pkg.MustPath("/opt", false, testtool),
), ignorePathname, wantOffline, nil},
), ignorePathname, wantChecksumOffline, nil},
})
testtoolDestroy(t, base, c)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + wantOfflineEncode: {Mode: fs.ModeDir | 0500},
"checksum/" + wantOfflineEncode + "/check": {Mode: 0400, Data: []byte{0}},
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/kvJIqZo5DKFOxC2ZQ-8_nPaQzEAz9cIm3p6guO-uLqm-xaiPu7oRkSnsu411jd_U": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
"identifier/" + expected.Promote: {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantOfflineEncode)},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("3EaW6WibLi9gl03_UieiFPaFcPy5p4x3JPxrnLJxGaTI-bh3HU9DK9IMx7c3rrNm")},
})
}

View File

@@ -1,7 +1,6 @@
package pkg_test
import (
"io/fs"
"testing"
"hakurei.app/check"
@@ -11,25 +10,18 @@ import (
func TestFile(t *testing.T) {
t.Parallel()
want := expectsFile{0}
checkWithCache(t, []cacheTestCase{
{"file", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
cureMany(t, c, []cureStep{
{"short", pkg.NewFile("null", []byte{0}), base.Append(
"identifier",
"3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi",
), want, nil},
), pkg.MustDecode(
"vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX",
), nil},
})
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + pkg.Encode(want.hash()): {Mode: 0400, Data: []byte{0}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode(
"iR6H5OIsyOW4EwEgtm9rGzGF6DVtyHLySEtwnFE8bnus9VJcoCbR4JIek7Lw-vwT",
)},
})
}

View File

@@ -1,2 +0,0 @@
// Package expected contains expected identifiers for exec artifact tests.
package expected

View File

@@ -1,10 +0,0 @@
package expected
const (
Offline = "dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks"
OvlRoot = "RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb"
Layers = "p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT"
Net = "G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3"
Promote = "xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ"
Work = "5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-"
)

View File

@@ -432,12 +432,6 @@ func (e InvalidKindError) Error() string {
// register is not safe for concurrent use. register must not be called after
// the first instance of [Cache] has been opened.
func register(k Kind, f IRReadFunc) {
openMu.Lock()
defer openMu.Unlock()
if opened {
panic("attempting to register after open")
}
if _, ok := irArtifact[k]; ok {
panic("attempting to register " + strconv.Itoa(int(k)) + " twice")
}

View File

@@ -3,7 +3,6 @@ package pkg_test
import (
"bytes"
"io"
"io/fs"
"reflect"
"testing"
@@ -106,12 +105,9 @@ func TestIRRoundtrip(t *testing.T) {
if err := <-done; err != nil {
t.Fatalf("EncodeAll: error = %v", err)
}
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"identifier": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
},
}, pkg.MustDecode(
"E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C",
),
}
}
checkWithCache(t, testCasesCache)

View File

@@ -3,7 +3,6 @@ package pkg_test
import (
"crypto/sha512"
"io"
"io/fs"
"net/http"
"reflect"
"testing"
@@ -86,12 +85,7 @@ func TestHTTPGet(t *testing.T) {
if _, err := f.Cure(r); !reflect.DeepEqual(err, wantErrNotFound) {
t.Fatalf("Cure: error = %#v, want %#v", err, wantErrNotFound)
}
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"identifier": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
{"cure", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
r := newRContext(t, c)
@@ -150,16 +144,6 @@ func TestHTTPGet(t *testing.T) {
if _, _, err := c.Cure(f); !reflect.DeepEqual(err, wantErrNotFound) {
t.Fatalf("Pathname: error = %#v, want %#v", err, wantErrNotFound)
}
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU": {Mode: 0400, Data: []byte("\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("L_0RFHpr9JUS4Zp14rz2dESSRvfLzpvqsLhR1-YjQt8hYlmEdVl7vI3_-v8UNPKs")},
})
}

View File

@@ -18,7 +18,6 @@ import (
"path/filepath"
"runtime"
"slices"
"strconv"
"strings"
"sync"
"sync/atomic"
@@ -71,64 +70,6 @@ func MustDecode(s string) (checksum Checksum) {
return
}
var (
// extension is a string uniquely identifying a set of custom [Artifact]
// implementations registered by calling [Register].
extension string
// openMu synchronises access to global state for initialisation.
openMu sync.Mutex
// opened is false if [Open] was never called.
opened bool
)
// Extension returns a string uniquely identifying the currently registered set
// of custom [Artifact], or the zero value if none was registered.
func Extension() string { return extension }
// ValidExtension returns whether s is valid for use in a call to SetExtension.
func ValidExtension(s string) bool {
if l := len(s); l == 0 || l > 128 {
return false
}
for _, v := range s {
if v < 'a' || v > 'z' {
return false
}
}
return true
}
// ErrInvalidExtension is returned for a variant identification string for which
// [ValidExtension] returns false.
var ErrInvalidExtension = errors.New("invalid extension variant identification string")
// SetExtension sets the extension variant identification string. SetExtension
// must be called before [Open] if custom [Artifact] implementations had been
// recorded by calling [Register].
//
// The variant identification string must be between 1 and 128 bytes long and
// consists of only bytes between 'a' and 'z'.
//
// SetExtension is not safe for concurrent use. SetExtension is called at most
// once and must not be called after the first instance of Cache has been opened.
func SetExtension(s string) {
openMu.Lock()
defer openMu.Unlock()
if opened {
panic("attempting to set extension after open")
}
if extension != "" {
panic("attempting to set extension twice")
}
if !ValidExtension(s) {
panic(ErrInvalidExtension)
}
extension = s
statusHeader = makeStatusHeader(s)
}
// common holds elements and receives methods shared between different contexts.
type common struct {
// Context specific to this [Artifact]. The toplevel context in [Cache] must
@@ -161,27 +102,19 @@ type TContext struct {
common
}
// makeStatusHeader creates the header written to every status file. This should
// not be called directly, its result is stored in statusHeader and will not
// change after the first [Cache] is opened.
func makeStatusHeader(extension string) string {
// statusHeader is the header written to all status files in dirStatus.
var statusHeader = func() string {
s := programName
if v := info.Version(); v != info.FallbackVersion {
s += " " + v
}
if extension != "" {
s += " with " + extension + " extensions"
}
s += " (" + runtime.GOARCH + ")"
if name, err := os.Hostname(); err == nil {
s += " on " + name
}
s += "\n\n"
return s
}
// statusHeader is the header written to all status files in dirStatus.
var statusHeader = makeStatusHeader("")
}()
// prepareStatus initialises the status file once.
func (t *TContext) prepareStatus() error {
@@ -494,9 +427,6 @@ const (
// KindFile is the kind of [Artifact] returned by [NewFile].
KindFile
// _kindEnd is the total number of kinds and does not denote a kind.
_kindEnd
// KindCustomOffset is the first [Kind] value reserved for implementations
// not from this package.
KindCustomOffset = 1 << 31
@@ -511,9 +441,6 @@ const (
// fileLock is the file name appended to Cache.base for guaranteeing
// exclusive access to the cache directory.
fileLock = "lock"
// fileVariant is the file name appended to Cache.base holding the variant
// identification string set by a prior call to [SetExtension].
fileVariant = "variant"
// dirIdentifier is the directory name appended to Cache.base for storing
// artifacts named after their [ID].
@@ -613,14 +540,6 @@ const (
// impurity due to [KindExecNet] being [KnownChecksum]. This flag exists
// to support kernels without Landlock LSM enabled.
CHostAbstract
// CPromoteVariant allows [pkg.Open] to promote an unextended on-disk cache
// to the current extension variant. This is a one-way operation.
CPromoteVariant
// CSuppressInit arranges for verbose output of the container init to be
// suppressed regardless of [message.Msg] state.
CSuppressInit
)
// toplevel holds [context.WithCancel] over caller-supplied context, where all
@@ -2011,20 +1930,6 @@ func (c *Cache) Close() {
c.unlock()
}
// UnsupportedVariantError describes an on-disk cache with an extension variant
// identification string that differs from the value returned by [Extension].
type UnsupportedVariantError string
func (e UnsupportedVariantError) Error() string {
return "unsupported variant " + strconv.Quote(string(e))
}
var (
// ErrWouldPromote is returned by [Open] if the [CPromoteVariant] bit is not
// set and the on-disk cache requires variant promotion.
ErrWouldPromote = errors.New("operation would promote unextended cache")
)
// Open returns the address of a newly opened instance of [Cache].
//
// Concurrent cures of a [FloodArtifact] dependency graph is limited to the
@@ -2056,14 +1961,6 @@ func open(
base *check.Absolute,
lock bool,
) (*Cache, error) {
openMu.Lock()
defer openMu.Unlock()
opened = true
if extension == "" && len(irArtifact) != int(_kindEnd) {
panic("attempting to open cache with incomplete variant setup")
}
if cures < 1 {
cures = runtime.NumCPU()
}
@@ -2077,10 +1974,8 @@ func open(
dirStatus,
dirWork,
} {
if err := os.MkdirAll(
base.Append(name).String(),
0700,
); err != nil && !errors.Is(err, os.ErrExist) {
if err := os.MkdirAll(base.Append(name).String(), 0700); err != nil &&
!errors.Is(err, os.ErrExist) {
return nil, err
}
}
@@ -2118,45 +2013,6 @@ func open(
c.unlock = func() {}
}
variantPath := base.Append(fileVariant).String()
if p, err := os.ReadFile(variantPath); err != nil {
if !errors.Is(err, os.ErrNotExist) {
c.unlock()
return nil, err
}
// nonexistence implies newly created cache, or a cache predating
// variant identification strings, in which case it is silently promoted
if err = os.WriteFile(
variantPath,
[]byte(extension),
0400,
); err != nil {
c.unlock()
return nil, err
}
} else if s := string(p); s == "" {
if extension != "" {
if flags&CPromoteVariant == 0 {
c.unlock()
return nil, ErrWouldPromote
}
if err = os.WriteFile(
variantPath,
[]byte(extension),
0400,
); err != nil {
c.unlock()
return nil, err
}
}
} else if !ValidExtension(s) {
c.unlock()
return nil, ErrInvalidExtension
} else if s != extension {
c.unlock()
return nil, UnsupportedVariantError(s)
}
return &c, nil
}

View File

@@ -16,11 +16,9 @@ import (
"path/filepath"
"reflect"
"strconv"
"strings"
"sync"
"syscall"
"testing"
"testing/fstest"
"unique"
"unsafe"
@@ -43,25 +41,6 @@ func unsafeOpen(
lock bool,
) (*pkg.Cache, error)
var (
// extension is a string uniquely identifying a set of custom [Artifact]
// implementations registered by calling [Register].
//
//go:linkname extension hakurei.app/internal/pkg.extension
extension string
// opened is false if [Open] was never called.
//
//go:linkname opened hakurei.app/internal/pkg.opened
opened bool
// irArtifact refers to artifact IR interpretation functions and must not be
// written to directly.
//
//go:linkname irArtifact hakurei.app/internal/pkg.irArtifact
irArtifact map[pkg.Kind]pkg.IRReadFunc
)
// newRContext returns the address of a new [pkg.RContext] unsafely created for
// the specified [testing.TB].
func newRContext(tb testing.TB, c *pkg.Cache) *pkg.RContext {
@@ -288,99 +267,6 @@ func TestIdent(t *testing.T) {
}
}
// An expectsKnown describes an expected file or directory.
type expectsKnown interface {
// hash returns the checksum of the represented data.
hash() (checksum pkg.Checksum)
}
// An expectsChecksum is a prepared checksum value.
type expectsChecksum pkg.Checksum
// hash returns e.
func (e expectsChecksum) hash() pkg.Checksum { return e }
// An expectsFile is the contents of a file expected by the test suite.
type expectsFile []byte
// hash computes the checksum of e.
func (e expectsFile) hash() (checksum pkg.Checksum) {
h := sha512.New384()
h.Write(e)
h.Sum(checksum[:0])
return
}
// An expectsFS describes the state of a filesystem expected by the test suite.
type expectsFS fstest.MapFS
// hash computes the checksum of e.
func (e expectsFS) hash() (checksum pkg.Checksum) {
if err := pkg.HashFS(&checksum, fstest.MapFS(e), "."); err != nil {
panic(err)
}
return
}
// expectsFrom generates expectsFS for a filesystem directory.
func expectsFrom(pathname string) string {
var buf strings.Builder
buf.WriteString("expectsFS{\n")
if err := filepath.WalkDir(pathname, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
var rel string
if rel, err = filepath.Rel(pathname, path); err != nil {
return err
}
buf.WriteString("\t" + strconv.Quote(rel) + ": {Mode: ")
var fi fs.FileInfo
if fi, err = d.Info(); err != nil {
return err
}
mode := fi.Mode()
switch {
case mode.IsDir():
buf.WriteString("fs.ModeDir | 0" +
strconv.FormatInt(int64(mode&^fs.ModeDir), 8))
case mode&fs.ModeSymlink != 0:
buf.WriteString("fs.ModeSymlink | 0" +
strconv.FormatInt(int64(mode&^fs.ModeSymlink), 8) +
", Data: []byte(")
var linkname string
if linkname, err = os.Readlink(path); err != nil {
return err
}
buf.WriteString(strconv.Quote(linkname))
buf.WriteByte(')')
case mode.IsRegular():
buf.WriteString("0" + strconv.FormatInt(int64(mode), 8))
var p []byte
if p, err = os.ReadFile(path); err != nil {
return err
}
if len(p) > 0 {
buf.WriteString(", Data: []byte(")
buf.WriteString(strconv.Quote(unsafe.String(unsafe.SliceData(p), len(p))))
buf.WriteByte(')')
}
}
buf.WriteString("},\n")
return nil
}); err != nil {
panic(err)
}
buf.WriteString("}")
return buf.String()
}
// cacheTestCase is a test case passed to checkWithCache where a new instance
// of [pkg.Cache] is prepared for the test case, and is validated and removed
// on test completion.
@@ -389,7 +275,7 @@ type cacheTestCase struct {
flags int
early func(t *testing.T, base *check.Absolute)
f func(t *testing.T, base *check.Absolute, c *pkg.Cache)
want expectsFS
want pkg.Checksum
}
// checkWithCache runs a slice of cacheTestCase.
@@ -456,33 +342,24 @@ func checkWithCache(t *testing.T, testCases []cacheTestCase) {
restoreTemp = true
}
// destroy lock and variant file to avoid changing cache checksums
for _, s := range []string{
"lock",
"variant",
} {
pathname := base.Append(s)
if p, err := os.ReadFile(pathname.String()); err != nil {
t.Fatal(err)
} else if len(p) != 0 {
t.Fatalf("file %q: %q", s, string(p))
}
if err := os.Remove(pathname.String()); err != nil {
t.Fatal(err)
}
// destroy lock file to avoid changing cache checksums
if err := os.Remove(base.Append("lock").String()); err != nil {
t.Fatal(err)
}
// destroy non-deterministic status files
if err := os.RemoveAll(base.Append("status").String()); err != nil {
t.Fatal(err)
}
want := tc.want.hash()
var checksum pkg.Checksum
if err := pkg.HashDir(&checksum, base); err != nil {
t.Fatalf("HashDir: error = %v", err)
} else if checksum != want {
t.Fatal(expectsFrom(base.String()))
} else if checksum != tc.want {
t.Fatalf("HashDir: %v", &pkg.ChecksumMismatchError{
Got: checksum,
Want: tc.want,
})
}
if err := scrubFunc(); err != nil {
@@ -501,10 +378,10 @@ func checkWithCache(t *testing.T, testCases []cacheTestCase) {
// validate again to make sure scrub did not condemn anything
if err := pkg.HashDir(&checksum, base); err != nil {
t.Fatalf("HashDir: error = %v", err)
} else if checksum != want {
} else if checksum != tc.want {
t.Fatalf("(scrubbed) HashDir: %v", &pkg.ChecksumMismatchError{
Got: checksum,
Want: want,
Want: tc.want,
})
}
})
@@ -518,7 +395,7 @@ type cureStep struct {
a pkg.Artifact
pathname *check.Absolute
output expectsKnown
checksum pkg.Checksum
err error
}
@@ -542,12 +419,14 @@ func cureMany(t *testing.T, c *pkg.Cache, steps []cureStep) {
t.Fatalf("Cure: error = %v, want %v", err, step.err)
} else if step.pathname != ignorePathname && !pathname.Is(step.pathname) {
t.Fatalf("Cure: pathname = %q, want %q", pathname, step.pathname)
} else if step.output == nil || checksum != makeChecksumH(step.output.hash()) {
if pathname != nil {
t.Fatal(expectsFrom(pathname.String()))
} else if checksum != (unique.Handle[pkg.Checksum]{}) {
t.Fatalf("Cure: unexpected checksum %s", pkg.Encode(checksum.Value()))
} else if checksum != makeChecksumH(step.checksum) {
if checksum == (unique.Handle[pkg.Checksum]{}) {
checksum = unique.Make(pkg.Checksum{})
}
t.Fatalf(
"Cure: checksum = %s, want %s",
pkg.Encode(checksum.Value()), pkg.Encode(step.checksum),
)
} else {
v := any(err)
if err == nil {
@@ -608,12 +487,18 @@ func newWantScrubError(base *check.Absolute) *pkg.ScrubError {
func TestCache(t *testing.T) {
t.Parallel()
testdata := expectsFile("" +
const testdata = "" +
"\x00\x00\x00\x00" +
"\xad\x0b\x00" +
"\x04" +
"\xfe\xfe\x00\x00" +
"\xfe\xca\x00\x00")
"\xfe\xca\x00\x00"
testdataChecksum := func() pkg.Checksum {
h := sha512.New384()
h.Write([]byte(testdata))
return (pkg.Checksum)(h.Sum(nil))
}()
testCases := []cacheTestCase{
{"file", pkg.CValidateKnown | pkg.CAssumeChecksum, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
@@ -636,31 +521,31 @@ func TestCache(t *testing.T) {
{"initial file", newStubFile(
pkg.KindHTTPGet,
identifier,
new(testdata.hash()),
testdata, nil,
), wantPathname, testdata, nil},
&testdataChecksum,
[]byte(testdata), nil,
), wantPathname, testdataChecksum, nil},
{"identical content", newStubFile(
pkg.KindHTTPGet,
identifier0,
new(testdata.hash()),
testdata, nil,
), wantPathname0, testdata, nil},
&testdataChecksum,
[]byte(testdata), nil,
), wantPathname0, testdataChecksum, nil},
{"existing entry", newStubFile(
pkg.KindHTTPGet,
identifier,
new(testdata.hash()),
testdata, nil,
), wantPathname, testdata, nil},
&testdataChecksum,
[]byte(testdata), nil,
), wantPathname, testdataChecksum, nil},
{"checksum mismatch", newStubFile(
pkg.KindHTTPGet,
pkg.ID{0xff, 0},
new(pkg.Checksum),
testdata, nil,
), nil, nil, &pkg.ChecksumMismatchError{
Got: testdata.hash(),
[]byte(testdata), nil,
), nil, pkg.Checksum{}, &pkg.ChecksumMismatchError{
Got: testdataChecksum,
}},
{"store without validation", newStubFile(
@@ -671,7 +556,7 @@ func TestCache(t *testing.T) {
), base.Append(
"identifier",
"vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX",
), expectsChecksum{
), pkg.Checksum{
0xbe, 0xc0, 0x21, 0xb4, 0xf3, 0x68,
0xe3, 0x06, 0x91, 0x34, 0xe0, 0x12,
0xc2, 0xb4, 0x30, 0x70, 0x83, 0xd3,
@@ -685,7 +570,7 @@ func TestCache(t *testing.T) {
{"incomplete implementation", struct{ pkg.Artifact }{&stubArtifact{
kind: pkg.KindExec,
params: []byte("artifact overridden to be incomplete"),
}}, nil, nil, pkg.InvalidArtifactError(pkg.MustDecode(
}}, nil, pkg.Checksum{}, pkg.InvalidArtifactError(pkg.MustDecode(
"E__uZ1sLIvb84vzSm5Uezb03RogsiaeTt1nfIVv8TKnnf4LqwtSi-smdHhlkZrUJ",
))},
@@ -694,18 +579,18 @@ func TestCache(t *testing.T) {
pkg.ID{0xff, 1},
nil,
nil, stub.UniqueError(0xcafe),
), nil, nil, stub.UniqueError(0xcafe)},
), nil, pkg.Checksum{}, stub.UniqueError(0xcafe)},
{"error caching", newStubFile(
pkg.KindHTTPGet,
pkg.ID{0xff, 1},
nil,
nil, nil,
), nil, nil, stub.UniqueError(0xcafe)},
), nil, pkg.Checksum{}, stub.UniqueError(0xcafe)},
{"cache hit bad type", overrideChecksum{testdata.hash(), overrideIdent{pkg.ID{0xff, 2}, &stubArtifact{
{"cache hit bad type", overrideChecksum{testdataChecksum, overrideIdent{pkg.ID{0xff, 2}, &stubArtifact{
kind: pkg.KindTar,
}}}, nil, nil, pkg.InvalidFileModeError(
}}}, nil, pkg.Checksum{}, pkg.InvalidFileModeError(
0400,
)},
@@ -725,7 +610,7 @@ func TestCache(t *testing.T) {
cure: func(f *pkg.FContext) error {
panic("attempting to cure impossible artifact")
},
}, nil, nil, &pkg.DependencyCureError{
}, nil, pkg.Checksum{}, &pkg.DependencyCureError{
{
Ident: unique.Make(pkg.ID{0xff, 3}),
Err: struct {
@@ -747,18 +632,18 @@ func TestCache(t *testing.T) {
cureMany(t, c0, []cureStep{
{"cache hit ident", overrideIdent{
id: identifier,
}, wantPathname, testdata, nil},
}, wantPathname, testdataChecksum, nil},
{"cache miss checksum match", newStubFile(
pkg.KindHTTPGet,
testdata.hash(),
testdataChecksum,
nil,
testdata,
[]byte(testdata),
nil,
), base.Append(
"identifier",
pkg.Encode(testdata.hash()),
), testdata, nil},
pkg.Encode(testdataChecksum),
), testdataChecksum, nil},
})
// cure after close
@@ -771,21 +656,7 @@ func TestCache(t *testing.T) {
t.Fatalf("(closed) Cure: error = %v", err)
}
}
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX": {Mode: 0400, Data: []byte{0}},
"checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq": {Mode: 0400, Data: []byte{0, 0, 0, 0, 0xad, 0xb, 0, 4, 0xfe, 0xfe, 0, 0, 0xfe, 0xca, 0, 0}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
"identifier/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
"identifier/cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
"identifier/deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/0bSFPu5Tnd-2Jj0Mv6co23PW2t3BmHc7eLFj9TgY3eIBg8zislo7xZYNBqovVLcq")},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("St9rlE-mGZ5gXwiv_hzQ_B8bZP-UUvSNmf4nHUZzCMOumb6hKnheZSe0dmnuc4Q2")},
{"directory", pkg.CAssumeChecksum, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
id := pkg.MustDecode(
@@ -820,16 +691,9 @@ func TestCache(t *testing.T) {
).String(),
)
}
want := expectsFS{
".": {Mode: fs.ModeDir | 0500},
"check": {Mode: 0400, Data: []byte{0, 0}},
"lib": {Mode: fs.ModeDir | 0700},
"lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"lib/pkgconfig": {Mode: fs.ModeDir | 0700},
}
wantChecksum := pkg.MustDecode(
"qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b",
)
wantPathname := base.Append(
"identifier",
pkg.Encode(id),
@@ -883,33 +747,33 @@ func TestCache(t *testing.T) {
}
cureMany(t, c, []cureStep{
{"initial directory", overrideChecksum{want.hash(), overrideIdent{id, &stubArtifact{
{"initial directory", overrideChecksum{wantChecksum, overrideIdent{id, &stubArtifact{
kind: pkg.KindTar,
cure: makeSample,
}}}, wantPathname, want, nil},
}}}, wantPathname, wantChecksum, nil},
{"identical identifier", overrideChecksum{want.hash(), overrideIdent{id, &stubArtifact{
{"identical identifier", overrideChecksum{wantChecksum, overrideIdent{id, &stubArtifact{
kind: pkg.KindTar,
}}}, wantPathname, want, nil},
}}}, wantPathname, wantChecksum, nil},
{"identical checksum", overrideIdent{id0, &stubArtifact{
kind: pkg.KindTar,
cure: makeSample,
}}, wantPathname0, want, nil},
}}, wantPathname0, wantChecksum, nil},
{"cure fault", overrideIdent{pkg.ID{0xff, 0}, &stubArtifact{
kind: pkg.KindTar,
cure: func(t *pkg.TContext) error {
return makeGarbage(t.GetWorkDir(), stub.UniqueError(0xcafe))
},
}}, nil, nil, stub.UniqueError(0xcafe)},
}}, nil, pkg.Checksum{}, stub.UniqueError(0xcafe)},
{"checksum mismatch", overrideChecksum{pkg.Checksum{}, overrideIdent{pkg.ID{0xff, 1}, &stubArtifact{
kind: pkg.KindTar,
cure: func(t *pkg.TContext) error {
return makeGarbage(t.GetWorkDir(), nil)
},
}}}, nil, nil, &pkg.ChecksumMismatchError{
}}}, nil, pkg.Checksum{}, &pkg.ChecksumMismatchError{
Got: pkg.MustDecode(
"CUx-3hSbTWPsbMfDhgalG4Ni_GmR9TnVX8F99tY_P5GtkYvczg9RrF5zO0jX9XYT",
),
@@ -918,27 +782,27 @@ func TestCache(t *testing.T) {
{"cache hit bad type", newStubFile(
pkg.KindHTTPGet,
pkg.ID{0xff, 2},
new(want.hash()),
testdata, nil,
), nil, nil, pkg.InvalidFileModeError(
&wantChecksum,
[]byte(testdata), nil,
), nil, pkg.Checksum{}, pkg.InvalidFileModeError(
fs.ModeDir | 0500,
)},
{"openFile directory", overrideIdent{pkg.ID{0xff, 3}, &stubArtifact{
kind: pkg.KindTar,
cure: func(t *pkg.TContext) error {
r, err := t.Open(overrideChecksumFile{checksum: want.hash()})
r, err := t.Open(overrideChecksumFile{checksum: wantChecksum})
if err != nil {
panic(err)
}
_, err = io.ReadAll(r)
return err
},
}}, nil, nil, &os.PathError{
}}, nil, pkg.Checksum{}, &os.PathError{
Op: "read",
Path: base.Append(
"checksum",
pkg.Encode(want.hash()),
pkg.Encode(wantChecksum),
).String(),
Err: syscall.EISDIR,
}},
@@ -948,14 +812,14 @@ func TestCache(t *testing.T) {
cure: func(t *pkg.TContext) error {
return nil
},
}}, nil, nil, pkg.NoOutputError{}},
}}, nil, pkg.Checksum{}, pkg.NoOutputError{}},
{"file output", overrideIdent{pkg.ID{0xff, 5}, &stubArtifact{
kind: pkg.KindTar,
cure: func(t *pkg.TContext) error {
return os.WriteFile(t.GetWorkDir().String(), []byte{0}, 0400)
},
}}, nil, nil, errors.New("non-file artifact produced regular file")},
}}, nil, pkg.Checksum{}, errors.New("non-file artifact produced regular file")},
{"symlink output", overrideIdent{pkg.ID{0xff, 6}, &stubArtifact{
kind: pkg.KindTar,
@@ -965,26 +829,11 @@ func TestCache(t *testing.T) {
t.GetWorkDir().String(),
)
},
}}, nil, nil, pkg.InvalidFileModeError(
}}, nil, pkg.Checksum{}, pkg.InvalidFileModeError(
fs.ModeSymlink | 0777,
)},
})
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b": {Mode: fs.ModeDir | 0500},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/check": {Mode: 0400, Data: []byte{0, 0}},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib": {Mode: fs.ModeDir | 0700},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib/pkgconfig": {Mode: fs.ModeDir | 0700},
"checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b/lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b")},
"identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/qRN6in76LndiiOZJheHkwyW8UT1N5-f-bXvHfDvwrMw2fSkOoZdh8pWE1qhLk65b")},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("WVpvsVqVKg9Nsh744x57h51AuWUoUR2nnh8Md-EYBQpk6ziyTuUn6PLtF2e0Eu_d")},
{"pending", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
wantErr := stub.UniqueError(0xcafe)
@@ -1020,7 +869,7 @@ func TestCache(t *testing.T) {
pkg.ID{0xff, 1},
nil,
nil, stub.UniqueError(0xbad),
), nil, nil, stub.UniqueError(0xbad)},
), nil, pkg.Checksum{}, stub.UniqueError(0xbad)},
{"file output", overrideIdent{pkg.ID{0xff, 2}, &stubArtifact{
kind: pkg.KindTar,
@@ -1031,7 +880,7 @@ func TestCache(t *testing.T) {
0400,
)
},
}}, nil, nil, errors.New(
}}, nil, pkg.Checksum{}, errors.New(
"non-file artifact produced regular file",
)},
})
@@ -1053,12 +902,7 @@ func TestCache(t *testing.T) {
for c.Done(unique.Make(pkg.ID{0xff})) != nil {
}
<-wCureDone
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"identifier": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
{"cancel abort block", pkg.CValidateKnown, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
var wg sync.WaitGroup
@@ -1111,12 +955,7 @@ func TestCache(t *testing.T) {
c.Close()
c.Abort()
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"identifier": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
{"no assume checksum", 0, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
makeGarbage := func(work *check.Absolute, wantErr error) error {
@@ -1133,12 +972,10 @@ func TestCache(t *testing.T) {
return wantErr
}
want := expectsChecksum(pkg.MustDecode(
"Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M",
))
wantChecksum := pkg.MustDecode("Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M")
cureMany(t, c, []cureStep{
{"create", overrideChecksum{want.hash(), overrideIdent{pkg.ID{0xff, 0}, &stubArtifact{
{"create", overrideChecksum{wantChecksum, overrideIdent{pkg.ID{0xff, 0}, &stubArtifact{
kind: pkg.KindTar,
cure: func(t *pkg.TContext) error {
return makeGarbage(t.GetWorkDir(), nil)
@@ -1146,16 +983,16 @@ func TestCache(t *testing.T) {
}}}, base.Append(
"identifier",
pkg.Encode(pkg.ID{0xff, 0}),
), want, nil},
), wantChecksum, nil},
{"reject", overrideChecksum{want.hash(), overrideIdent{pkg.ID{0xfe, 1}, &stubArtifact{
{"reject", overrideChecksum{wantChecksum, overrideIdent{pkg.ID{0xfe, 1}, &stubArtifact{
kind: pkg.KindTar,
cure: func(t *pkg.TContext) error {
return makeGarbage(t.GetWorkDir(), stub.UniqueError(0xbad))
},
}}}, nil, nil, stub.UniqueError(0xbad)},
}}}, nil, pkg.Checksum{}, stub.UniqueError(0xbad)},
{"match", overrideChecksum{want.hash(), overrideIdent{pkg.ID{0xff, 1}, &stubArtifact{
{"match", overrideChecksum{wantChecksum, overrideIdent{pkg.ID{0xff, 1}, &stubArtifact{
kind: pkg.KindTar,
cure: func(t *pkg.TContext) error {
return makeGarbage(t.GetWorkDir(), nil)
@@ -1163,21 +1000,9 @@ func TestCache(t *testing.T) {
}}}, base.Append(
"identifier",
pkg.Encode(pkg.ID{0xff, 1}),
), want, nil},
), wantChecksum, nil},
})
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M": {Mode: fs.ModeDir | 0500},
"checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M/check": {Mode: 0400, Data: []byte{}},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/_wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M")},
"identifier/_wEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/Aubi5EG4_Y8DhL9bQ3Q4HFBhLRF7X5gt9D3CNCQfT-TeBtlRXc7Zi_JYZEMoCC7M")},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("OC290t23aimNo2Rp2pPwan5GI2KRLRdOwYxXQMD9jw0QROgHnNXWodoWdV0hwu2w")},
{"scrub", 0, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
cureMany(t, c, []cureStep{
@@ -1189,7 +1014,7 @@ func TestCache(t *testing.T) {
), base.Append(
"identifier",
pkg.Encode(pkg.Checksum{0xfe, 0}),
), expectsChecksum{0xff, 0}, nil},
), pkg.Checksum{0xff, 0}, nil},
})
for _, p := range [][]string{
@@ -1222,12 +1047,7 @@ func TestCache(t *testing.T) {
if err := c.Scrub(1 << 6); !reflect.DeepEqual(err, wantErr) {
t.Fatalf("Scrub: error =\n%s\nwant\n%s", err, wantErr)
}
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"identifier": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
}
checkWithCache(t, testCases)
}
@@ -1281,10 +1101,6 @@ func TestErrors(t *testing.T) {
Want: pkg.IRKindIdent,
Ancillary: 0xcafe,
}, "got invalid kind 48879 IR value (0xcafe) instead of ident"},
{"UnsupportedVariantError", pkg.UnsupportedVariantError(
"rosa",
), `unsupported variant "rosa"`},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
@@ -1493,24 +1309,17 @@ func (a earlyFailureF) Cure(*pkg.FContext) error {
}
func TestDependencyCureErrorEarly(t *testing.T) {
t.Parallel()
checkWithCache(t, []cacheTestCase{
{"early", 0, nil, func(t *testing.T, _ *check.Absolute, c *pkg.Cache) {
_, _, err := c.Cure(earlyFailureF(8))
if !errors.Is(err, stub.UniqueError(0xcafe)) {
t.Fatalf("Cure: error = %v", err)
}
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"identifier": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
})
}
func TestOpen(t *testing.T) {
func TestNew(t *testing.T) {
t.Parallel()
t.Run("nonexistent", func(t *testing.T) {
@@ -1558,219 +1367,3 @@ func TestOpen(t *testing.T) {
}
})
}
func TestExtensionRegister(t *testing.T) {
extensionOld := extension
openedOld := opened
t.Cleanup(func() { extension = extensionOld; opened = openedOld })
extension = ""
opened = false
t.Run("set", func(t *testing.T) {
t.Cleanup(func() { extension = "" })
const want = "rosa"
pkg.SetExtension(want)
if got := pkg.Extension(); got != want {
t.Fatalf("Extension: %q, want %q", got, want)
}
})
t.Run("twice", func(t *testing.T) {
t.Cleanup(func() { extension = "" })
defer func() {
const wantPanic = "attempting to set extension twice"
if r := recover(); r != wantPanic {
t.Errorf("panic: %#v, want %q", r, wantPanic)
}
}()
pkg.SetExtension("rosa")
pkg.SetExtension("rosa")
})
t.Run("invalid", func(t *testing.T) {
defer func() {
var wantPanic = pkg.ErrInvalidExtension
if r := recover(); r != wantPanic {
t.Errorf("panic: %#v, want %#v", r, wantPanic)
}
}()
pkg.SetExtension(" ")
})
t.Run("opened", func(t *testing.T) {
t.Cleanup(func() { opened = false })
if _, err := pkg.Open(
t.Context(),
message.New(log.Default()),
0, 0, 0,
check.MustAbs(container.Nonexistent),
); !errors.Is(err, os.ErrNotExist) {
t.Fatalf("Open: error = %v", err)
}
t.Run("variant", func(t *testing.T) {
defer func() {
const wantPanic = "attempting to set extension after open"
if r := recover(); r != wantPanic {
t.Errorf("panic: %#v, want %q", r, wantPanic)
}
}()
pkg.SetExtension("rosa")
})
t.Run("register", func(t *testing.T) {
defer func() {
const wantPanic = "attempting to register after open"
if r := recover(); r != wantPanic {
t.Errorf("panic: %#v, want %q", r, wantPanic)
}
}()
pkg.Register(pkg.KindCustomOffset, nil)
})
})
t.Run("incomplete", func(t *testing.T) {
t.Cleanup(func() { delete(irArtifact, pkg.KindCustomOffset) })
defer func() {
const wantPanic = "attempting to open cache with incomplete variant setup"
if r := recover(); r != wantPanic {
t.Errorf("panic: %#v, want %q", r, wantPanic)
}
}()
pkg.Register(pkg.KindCustomOffset, nil)
t.Cleanup(func() { opened = false })
_, _ = pkg.Open(nil, nil, 0, 0, 0, nil)
panic("unreachable")
})
t.Run("create", func(t *testing.T) {
t.Cleanup(func() { extension = "" })
const want = "rosa"
pkg.SetExtension(want)
base := check.MustAbs(t.TempDir())
t.Cleanup(func() { opened = false })
if c, err := pkg.Open(
t.Context(), nil,
0, 0, 0,
base,
); err != nil {
t.Fatal(err)
} else {
c.Close()
}
if got, err := os.ReadFile(base.Append("variant").String()); err != nil {
t.Fatal(err)
} else if string(got) != want {
t.Fatalf("variant: %q", string(got))
}
})
t.Run("access", func(t *testing.T) {
base := check.MustAbs(t.TempDir())
t.Cleanup(func() { opened = false })
if err := os.WriteFile(base.Append("variant").String(), nil, 0); err != nil {
t.Fatal(err)
}
wantErr := &os.PathError{
Op: "open",
Path: base.Append("variant").String(),
Err: syscall.EACCES,
}
if _, err := pkg.Open(
t.Context(), nil,
0, 0, 0,
base,
); !reflect.DeepEqual(err, wantErr) {
t.Fatalf("Open: error = %v, want %v", err, wantErr)
}
})
t.Run("promote", func(t *testing.T) {
t.Cleanup(func() { extension = "" })
const want = "rosa"
pkg.SetExtension(want)
base := check.MustAbs(t.TempDir())
t.Cleanup(func() { opened = false })
variantPath := base.Append("variant")
if err := os.WriteFile(variantPath.String(), nil, 0600); err != nil {
t.Fatal(err)
}
if _, err := pkg.Open(
t.Context(), nil,
0, 0, 0,
base,
); !reflect.DeepEqual(err, pkg.ErrWouldPromote) {
t.Fatalf("Open: error = %v", err)
}
if p, err := os.ReadFile(variantPath.String()); err != nil {
t.Fatal(err)
} else if len(p) != 0 {
t.Fatalf("variant: %q", string(p))
}
if c, err := pkg.Open(
t.Context(), nil,
pkg.CPromoteVariant, 0, 0,
base,
); err != nil {
t.Fatalf("Open: error = %v", err)
} else {
c.Close()
}
if p, err := os.ReadFile(variantPath.String()); err != nil {
t.Fatal(err)
} else if string(p) != want {
t.Fatalf("variant: %q, want %q", string(p), want)
}
})
t.Run("open invalid", func(t *testing.T) {
base := check.MustAbs(t.TempDir())
t.Cleanup(func() { opened = false })
variantPath := base.Append("variant")
if err := os.WriteFile(variantPath.String(), make([]byte, 129), 0400); err != nil {
t.Fatal(err)
}
if _, err := pkg.Open(
t.Context(), nil,
0, 0, 0,
base,
); !reflect.DeepEqual(err, pkg.ErrInvalidExtension) {
t.Fatalf("Open: error = %v", err)
}
})
t.Run("unsupported", func(t *testing.T) {
base := check.MustAbs(t.TempDir())
t.Cleanup(func() { opened = false })
variantPath := base.Append("variant")
if err := os.WriteFile(variantPath.String(), []byte("rosa"), 0400); err != nil {
t.Fatal(err)
}
if _, err := pkg.Open(
t.Context(), nil,
0, 0, 0,
base,
); !reflect.DeepEqual(err, pkg.UnsupportedVariantError("rosa")) {
t.Fatalf("Open: error = %v", err)
}
})
}

View File

@@ -20,31 +20,6 @@ import (
func TestTar(t *testing.T) {
t.Parallel()
want := expectsFS{
".": {Mode: fs.ModeDir | 0500},
"checksum": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/check": {Mode: 0400, Data: []byte{0, 0}},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/pkgconfig": {Mode: fs.ModeDir | 0500},
"checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"identifier": {Mode: fs.ModeDir | 0500},
"identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"work": {Mode: fs.ModeDir | 0500},
}
wantEncode := pkg.Encode(want.hash())
wantExpand := expectsFS{
".": {Mode: fs.ModeDir | 0500},
"libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
}
wantExpandEncode := pkg.Encode(wantExpand.hash())
checkWithCache(t, []cacheTestCase{
{"http", 0, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
checkTarHTTP(t, base, c, fstest.MapFS{
@@ -62,30 +37,10 @@ func TestTar(t *testing.T) {
"identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"work": {Mode: fs.ModeDir | 0700},
}, want)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + wantEncode: {Mode: fs.ModeDir | 0500},
"checksum/" + wantEncode + "/checksum": {Mode: fs.ModeDir | 0500},
"checksum/" + wantEncode + "/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP": {Mode: fs.ModeDir | 0500},
"checksum/" + wantEncode + "/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/check": {Mode: 0400, Data: []byte{0, 0}},
"checksum/" + wantEncode + "/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib": {Mode: fs.ModeDir | 0500},
"checksum/" + wantEncode + "/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"checksum/" + wantEncode + "/checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP/lib/pkgconfig": {Mode: fs.ModeDir | 0500},
"checksum/" + wantEncode + "/identifier": {Mode: fs.ModeDir | 0500},
"checksum/" + wantEncode + "/identifier/HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"checksum/" + wantEncode + "/identifier/Zx5ZG9BAwegNT3zQwCySuI2ktCXxNgxirkGLFjW4FW06PtojYVaCdtEw8yuntPLa": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/1TL00Qb8dcqayX7wTO8WNaraHvY6b-KCsctLDTrb64QBCmxj_-byK1HdIUwMaFEP")},
"checksum/" + wantEncode + "/work": {Mode: fs.ModeDir | 0500},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantEncode)},
"identifier/rg7F1D5hwv6o4xctjD5zDq4i5MD0mArTsUIWfhUbik8xC6Bsyt3mjXXOm3goojTz": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantEncode)},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode(
"cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM",
))
}, pkg.MustDecode("NQTlc466JmSVLIyWklm_u8_g95jEEb98PxJU-kjwxLpfdjwMWJq0G8ze9R4Vo1Vu")},
{"http expand", 0, nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
checkTarHTTP(t, base, c, fstest.MapFS{
@@ -93,21 +48,10 @@ func TestTar(t *testing.T) {
"lib": {Mode: fs.ModeDir | 0700},
"lib/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
}, wantExpand)
}, expectsFS{
".": {Mode: fs.ModeDir | 0700},
"checksum": {Mode: fs.ModeDir | 0700},
"checksum/" + wantExpandEncode: {Mode: fs.ModeDir | 0500},
"checksum/" + wantExpandEncode + "/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
"identifier": {Mode: fs.ModeDir | 0700},
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantExpandEncode)},
"identifier/_v1blm2h-_KA-dVaawdpLas6MjHc6rbhhFS8JWwx8iJxZGUu8EBbRrhr5AaZ9PJL": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/" + wantExpandEncode)},
"temp": {Mode: fs.ModeDir | 0700},
"work": {Mode: fs.ModeDir | 0700},
}},
}, pkg.MustDecode(
"CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN",
))
}, pkg.MustDecode("hSoSSgCYTNonX3Q8FjvjD1fBl-E-BQyA6OTXro2OadXqbST4tZ-akGXszdeqphRe")},
})
}
@@ -116,7 +60,7 @@ func checkTarHTTP(
base *check.Absolute,
c *pkg.Cache,
testdataFsys fs.FS,
want expectsKnown,
wantChecksum pkg.Checksum,
) {
var testdata string
{
@@ -250,24 +194,24 @@ func checkTarHTTP(
{"file", a, base.Append(
"identifier",
pkg.Encode(wantIdent),
), want, nil},
), wantChecksum, nil},
{"directory", pkg.NewTar(
&tarDir,
pkg.TarGzip,
), ignorePathname, want, nil},
), ignorePathname, wantChecksum, nil},
{"multiple entries", pkg.NewTar(
&tarDirMulti,
pkg.TarGzip,
), nil, nil, errors.New(
), nil, pkg.Checksum{}, errors.New(
"input directory does not contain a single regular file",
)},
{"bad type", pkg.NewTar(
&tarDirType,
pkg.TarGzip,
), nil, nil, errors.New(
), nil, pkg.Checksum{}, errors.New(
"input directory does not contain a single regular file",
)},
@@ -277,6 +221,6 @@ func checkTarHTTP(
cure: func(t *pkg.TContext) error {
return stub.UniqueError(0xcafe)
},
}, pkg.TarGzip), nil, nil, stub.UniqueError(0xcafe)},
}, pkg.TarGzip), nil, pkg.Checksum{}, stub.UniqueError(0xcafe)},
})
}

View File

@@ -14,10 +14,9 @@ import (
"strconv"
"strings"
"hakurei.app/check"
"hakurei.app/fhs"
"hakurei.app/vfs"
"hakurei.app/internal/pkg/internal/testtool/expected"
)
func main() {
@@ -149,40 +148,59 @@ func main() {
}
const checksumEmptyDir = "MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"
ident := expected.Offline
ident := "dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks"
log.Println(m)
next := func() { m = m.Next; log.Println(m) }
if overlayRoot {
ident = expected.OvlRoot
ident = "RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb"
if m.Root != "/" || m.Target != "/" ||
m.Source != "overlay" || m.FsType != "overlay" {
log.Fatal("unexpected root mount entry")
}
var lowerdir []string
var lowerdir string
for _, o := range strings.Split(m.FsOptstr, ",") {
const lowerdirKey = "lowerdir+="
const lowerdirKey = "lowerdir="
if strings.HasPrefix(o, lowerdirKey) {
lowerdir = append(lowerdir, o[len(lowerdirKey):])
lowerdir = o[len(lowerdirKey):]
}
}
if !layers {
if len(lowerdir) != 1 || filepath.Base(lowerdir[0]) != checksumEmptyDir {
if filepath.Base(lowerdir) != checksumEmptyDir {
log.Fatal("unexpected artifact checksum")
}
} else {
ident = expected.Layers
ident = "p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT"
if len(lowerdir) != 2 ||
filepath.Base(lowerdir[0]) != "MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU" ||
filepath.Base(lowerdir[1]) != "nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK" {
log.Fatalf("unexpected lowerdirs %s", strings.Join(lowerdir, ", "))
lowerdirsEscaped := strings.Split(lowerdir, ":")
lowerdirs := lowerdirsEscaped[:0]
// ignore the option separator since it does not appear in ident
for i, e := range lowerdirsEscaped {
if len(e) > 0 &&
e[len(e)-1] == check.SpecialOverlayEscape[0] &&
(len(e) == 1 || e[len(e)-2] != check.SpecialOverlayEscape[0]) {
// ignore escaped pathname separator since it does not
// appear in ident
e = e[:len(e)-1]
if len(lowerdirsEscaped) != i {
lowerdirsEscaped[i+1] = e + lowerdirsEscaped[i+1]
continue
}
}
lowerdirs = append(lowerdirs, e)
}
if len(lowerdirs) != 2 ||
filepath.Base(lowerdirs[0]) != "MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU" ||
filepath.Base(lowerdirs[1]) != "nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK" {
log.Fatalf("unexpected lowerdirs %s", strings.Join(lowerdirs, ", "))
}
}
} else {
if hostNet {
ident = expected.Net
ident = "G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3"
}
if m.Root != "/sysroot" || m.Target != "/" {
@@ -201,14 +219,14 @@ func main() {
}
if promote {
ident = expected.Promote
ident = "xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ"
}
next() // testtool artifact
next()
if overlayWork {
ident = expected.Work
ident = "5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-"
if m.Root != "/" || m.Target != "/work" ||
m.Source != "overlay" || m.FsType != "overlay" {
log.Fatal("unexpected work mount entry")

View File

@@ -16,7 +16,9 @@ import (
type PArtifact int
const (
LLVM PArtifact = iota
CompilerRT PArtifact = iota
LLVMRuntimes
Clang
// EarlyInit is the Rosa OS init program.
EarlyInit
@@ -72,15 +74,10 @@ const (
HakureiDist
IPTables
Kmod
LIT
LibX11
LibXau
LibXext
LibXrandr
LibXrender
LibXxf86vm
Libbsd
Libcap
Libclc
Libdrm
Libev
Libexpat
@@ -90,19 +87,16 @@ const (
Libiconv
Libmd
Libmnl
Libnftnl
Libpciaccess
Libpng
Libnftnl
Libpsl
Libseccomp
Libtasn1
Libtool
Libucontext
Libunistring
Libxshmfence
Libxml2
Libxslt
Libxtrans
M4
MPC
MPFR
@@ -135,30 +129,21 @@ const (
PkgConfig
Procps
Python
PythonFlitCore
PythonHatchling
PythonIniConfig
PythonMako
PythonMarkupSafe
PythonPackaging
PythonPathspec
PythonPluggy
PythonPyTest
PythonPyYAML
PythonPycparser
PythonPygments
PythonSetuptools
PythonSetuptoolsSCM
PythonTroveClassifiers
PythonVCSVersioning
PythonWheel
QEMU
Rdfind
Readline
Rsync
Sed
Setuptools
SPIRVHeaders
SPIRVLLVMTranslator
SPIRVTools
SquashfsTools
Strace
@@ -175,7 +160,7 @@ const (
XCBProto
XDGDBusProxy
XZ
XorgProto
Xproto
Zlib
Zstd
@@ -183,25 +168,11 @@ const (
PresetUnexportedStart
llvmSource = iota - 1
// earlyCompilerRT is an early, standalone compiler-rt installation for the
// standalone runtimes build.
//
// earlyCompilerRT must only be loaded by [LLVM].
earlyCompilerRT
// earlyRuntimes is an early, standalone installation of LLVM runtimes to
// work around the cmake build system leaking the system LLVM installation
// when invoking the newly built toolchain.
//
// earlyRuntimes must only be loaded by [LLVM].
earlyRuntimes
buildcatrust
utilMacros
// Musl is a standalone libc that does not depend on the toolchain.
Musl
// muslHeaders is a system installation of [Musl] headers.
muslHeaders
// gcc is a hacked-to-pieces GCC toolchain meant for use in intermediate
// stages only. This preset and its direct output must never be exposed.
@@ -346,29 +317,15 @@ var (
}
// artifactsOnce is for lazy initialisation of artifacts.
artifactsOnce [_toolchainEnd][len(artifactsM)]sync.Once
// presetOpts globally modifies behaviour of presets.
presetOpts int
)
const (
// OptSkipCheck skips running all test suites.
OptSkipCheck = 1 << iota
// OptLLVMNoLTO disables LTO in all [LLVM] stages.
OptLLVMNoLTO
)
// Flags returns the current preset flags
func Flags() int { return presetOpts }
// zero zeros the value pointed to by p.
func zero[T any](p *T) { var v T; *p = v }
// DropCaches arranges for all cached [pkg.Artifact] to be freed some time after
// it returns. Must not be used concurrently with any other function from this
// package.
func DropCaches(flags int) {
presetOpts = flags
func DropCaches() {
zero(&artifacts)
zero(&artifactsOnce)
}

View File

@@ -20,16 +20,13 @@ func TestLoad(t *testing.T) {
}
func BenchmarkAll(b *testing.B) {
flags := rosa.Flags()
b.Cleanup(func() { rosa.DropCaches(flags) })
for b.Loop() {
for i := range rosa.PresetEnd {
rosa.Std.Load(rosa.PArtifact(i))
}
b.StopTimer()
rosa.DropCaches(0)
rosa.DropCaches()
b.StartTimer()
}
}

View File

@@ -10,8 +10,8 @@ import (
func (t Toolchain) newCMake() (pkg.Artifact, string) {
const (
version = "4.3.2"
checksum = "6QylwRVKletndTSkZTV2YBRwgd_9rUVgav_QW23HpjUgV21AVYZOUOal8tdBDmO7"
version = "4.3.1"
checksum = "RHpzZiM1kJ5bwLjo9CpXSeHJJg3hTtV9QxBYpQoYwKFtRh5YhGWpShrqZCSOzQN6"
)
return t.NewPackage("cmake", version, newFromGitHubRelease(
"Kitware/CMake",
@@ -122,18 +122,11 @@ type CMakeHelper struct {
// Path elements joined with source.
Append []string
// Value of CMAKE_BUILD_TYPE. The zero value is equivalent to "Release".
BuildType string
// CMake CACHE entries.
Cache []KV
// Runs after install.
Script string
// Replaces the default test command.
Test string
// Whether to skip running tests.
SkipTest bool
// Whether to generate Makefile instead.
Make bool
}
@@ -166,30 +159,20 @@ func (*CMakeHelper) wantsDir() string { return "/cure/" }
// script generates the cure script.
func (attr *CMakeHelper) script(name string) string {
if attr == nil {
attr = new(CMakeHelper)
attr = &CMakeHelper{
Cache: []KV{
{"CMAKE_BUILD_TYPE", "Release"},
},
}
}
if len(attr.Cache) == 0 {
panic("CACHE must be non-empty")
}
generate := "Ninja"
test := "ninja " + jobsFlagE + " test"
if attr.Make {
generate = "'Unix Makefiles'"
test = "make " + jobsFlagE + " test"
}
if attr.Test != "" {
test = attr.Test
}
script := attr.Script
if !attr.SkipTest && presetOpts&OptSkipCheck == 0 {
script += "\n" + test
}
cache := make([]KV, 1, 1+len(attr.Cache))
cache[0] = KV{"CMAKE_BUILD_TYPE", "Release"}
if attr.BuildType != "" {
cache[0][1] = attr.BuildType
}
cache = append(cache, attr.Cache...)
return `
cmake -G ` + generate + ` \
@@ -198,7 +181,7 @@ cmake -G ` + generate + ` \
-DCMAKE_ASM_COMPILER_TARGET="${ROSA_TRIPLE}" \
-DCMAKE_INSTALL_LIBDIR=lib \
` + strings.Join(slices.Collect(func(yield func(string) bool) {
for _, v := range cache {
for _, v := range attr.Cache {
if !yield("-D" + v[0] + "=" + v[1]) {
return
}
@@ -208,5 +191,5 @@ cmake -G ` + generate + ` \
'/usr/src/` + name + `/` + filepath.Join(attr.Append...) + `'
cmake --build . --parallel=` + jobsE + `
cmake --install . --prefix=/work/system
` + script
` + attr.Script
}

View File

@@ -9,8 +9,8 @@ import (
func (t Toolchain) newGit() (pkg.Artifact, string) {
const (
version = "2.54.0"
checksum = "7vGKtFOJGqY8DO4e8UMRax7dLgImXKQz5MMalec6MlgYrsarffSJjgOughwRFpSH"
version = "2.53.0"
checksum = "rlqSTeNgSeVKJA7nvzGqddFH8q3eFEPB4qRZft-4zth8wTHnbTbm7J90kp_obHGm"
)
return t.NewPackage("git", version, newTar(
"https://www.kernel.org/pub/software/scm/git/"+
@@ -20,9 +20,6 @@ func (t Toolchain) newGit() (pkg.Artifact, string) {
), &PackageAttr{
ScriptEarly: `
ln -s ../../system/bin/perl /usr/bin/ || true
# test suite assumes apache
rm -f /system/bin/httpd
`,
// uses source tree as scratch space
@@ -41,7 +38,6 @@ function disable_test {
fi
}
disable_test t1800-hook
disable_test t5319-multi-pack-index
disable_test t1305-config-include
disable_test t3900-i18n-commit
@@ -67,9 +63,6 @@ disable_test t2200-add-update
NO_INSTALL_HARDLINKS=1 \
install`,
},
// test suite hangs on mksh
Bash,
Diffutils,
Autoconf,
Gettext,

View File

@@ -17,8 +17,9 @@ func (t Toolchain) newSPIRVHeaders() (pkg.Artifact, string) {
"vulkan-sdk-"+version,
checksum,
), nil, &CMakeHelper{
// upstream has no tests
SkipTest: true,
Cache: []KV{
{"CMAKE_BUILD_TYPE", "Release"},
},
}), version
}
func init() {
@@ -63,6 +64,7 @@ func (t Toolchain) newSPIRVTools() (pkg.Artifact, string) {
checksum,
), nil, &CMakeHelper{
Cache: []KV{
{"CMAKE_BUILD_TYPE", "Release"},
{"SPIRV-Headers_SOURCE_DIR", "/system"},
},
},
@@ -84,8 +86,6 @@ func init() {
},
ID: 14894,
latest: (*Versions).getStable,
}
}
@@ -104,9 +104,11 @@ func (t Toolchain) newGlslang() (pkg.Artifact, string) {
Chmod: true,
}, &CMakeHelper{
Cache: []KV{
{"CMAKE_BUILD_TYPE", "Release"},
{"BUILD_SHARED_LIBS", "ON"},
{"ALLOW_EXTERNAL_SPIRV_TOOLS", "ON"},
},
Script: "ctest",
},
Python,
Bash,
@@ -126,65 +128,3 @@ func init() {
ID: 205796,
}
}
func (t Toolchain) newSPIRVLLVMTranslator() (pkg.Artifact, string) {
const (
version = "22.1.2"
checksum = "JZAaV5ewYcm-35YA_U2BM2IcsQouZtX1BLZR0zh2vSlfEXMsT5OCtY4Gh5RJkcGy"
)
return t.NewPackage("spirv-llvm-translator", version, newFromGitHub(
"KhronosGroup/SPIRV-LLVM-Translator",
"v"+version, checksum,
), &PackageAttr{
Patches: []KV{
{"remove-early-prefix", `diff --git a/CMakeLists.txt b/CMakeLists.txt
index c000a77e..86f79b03 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -172,5 +172,5 @@ install(
FILES
${CMAKE_BINARY_DIR}/LLVMSPIRVLib.pc
DESTINATION
- ${CMAKE_INSTALL_PREFIX}/lib${LLVM_LIBDIR_SUFFIX}/pkgconfig
+ lib${LLVM_LIBDIR_SUFFIX}/pkgconfig
)
`},
},
// litArgs emits shell syntax
ScriptEarly: `
export LIT_OPTS=` + litArgs(true,
// error: line 13: OpTypeCooperativeMatrixKHR Scope is limited to Workgroup and Subgroup
"cooperative_matrix_constant_null.spvasm") + `
`,
}, &CMakeHelper{
Cache: []KV{
{"CMAKE_SKIP_BUILD_RPATH", "ON"},
{"BUILD_SHARED_LIBS", "ON"},
{"LLVM_SPIRV_ENABLE_LIBSPIRV_DIS", "ON"},
{"LLVM_EXTERNAL_SPIRV_HEADERS_SOURCE_DIR", "/system"},
{"LLVM_EXTERNAL_LIT", "/system/bin/lit"},
{"LLVM_INCLUDE_TESTS", "ON"},
},
},
Bash,
LIT,
SPIRVTools,
), version
}
func init() {
artifactsM[SPIRVLLVMTranslator] = Metadata{
f: Toolchain.newSPIRVLLVMTranslator,
Name: "spirv-llvm-translator",
Description: "bi-directional translation between SPIR-V and LLVM IR",
Website: "https://github.com/KhronosGroup/SPIRV-LLVM-Translator",
Dependencies: P{
SPIRVTools,
},
ID: 227273,
}
}

View File

@@ -2,47 +2,10 @@ package rosa
import (
"runtime"
"slices"
"strconv"
"strings"
"hakurei.app/internal/pkg"
)
// skipGNUTests generates a string for skipping specific tests by number in a
// GNU test suite. This is nontrivial because the test suite does not support
// excluding tests in any way, so ranges for all but the skipped tests have to
// be specified instead.
//
// For example, to skip test 764, ranges around the skipped test must be
// specified:
//
// 1-763 765-
//
// Tests are numbered starting from 1. The resulting string is unquoted.
func skipGNUTests(tests ...int) string {
tests = slices.Clone(tests)
slices.Sort(tests)
var buf strings.Builder
if tests[0] != 1 {
buf.WriteString("1-")
}
for i, n := range tests {
if n != 1 && (i == 0 || tests[i-1] != n-1) {
buf.WriteString(strconv.Itoa(n - 1))
buf.WriteString(" ")
}
if i == len(tests)-1 || tests[i+1] != n+1 {
buf.WriteString(strconv.Itoa(n + 1))
buf.WriteString("-")
}
}
return buf.String()
}
func (t Toolchain) newM4() (pkg.Artifact, string) {
const (
version = "1.4.21"
@@ -84,15 +47,7 @@ func (t Toolchain) newBison() (pkg.Artifact, string) {
"https://ftpmirror.gnu.org/gnu/bison/bison-"+version+".tar.gz",
checksum,
pkg.TarGzip,
), nil, &MakeHelper{
Check: []string{
"TESTSUITEFLAGS=" + jobsFlagE + "' " + skipGNUTests(
// clang miscompiles (SIGILL)
764,
) + "'",
"check",
},
},
), nil, (*MakeHelper)(nil),
M4,
Diffutils,
Sed,
@@ -112,8 +67,8 @@ func init() {
func (t Toolchain) newSed() (pkg.Artifact, string) {
const (
version = "4.10"
checksum = "TXTRFQJCyflb-bpBRI2S5Y1DpplwvT7-KfXtpqN4AdZgZ5OtI6yStn1-bkhDKx51"
version = "4.9"
checksum = "pe7HWH4PHNYrazOTlUoE1fXmhn2GOPFN_xE62i0llOr3kYGrH1g2_orDz0UtZ9Nt"
)
return t.NewPackage("sed", version, newTar(
"https://ftpmirror.gnu.org/gnu/sed/sed-"+version+".tar.gz",
@@ -121,8 +76,6 @@ func (t Toolchain) newSed() (pkg.Artifact, string) {
pkg.TarGzip,
), nil, (*MakeHelper)(nil),
Diffutils,
KernelHeaders,
), version
}
func init() {
@@ -231,9 +184,6 @@ func (t Toolchain) newLibtool() (pkg.Artifact, string) {
checksum,
pkg.TarGzip,
), nil, &MakeHelper{
// _Z2a2c: symbol not found
SkipCheck: t.isStage0(),
Check: []string{
"TESTSUITEFLAGS=" + jobsFlagE,
"check",
@@ -424,8 +374,8 @@ func init() {
func (t Toolchain) newCoreutils() (pkg.Artifact, string) {
const (
version = "9.11"
checksum = "t8UMed5wpFEoC56aa42_yidfOAaRGzOfj7MRtQkkqgGbpXiskNA8bd-EmVSQkZie"
version = "9.10"
checksum = "o-B9wssRnZySzJUI1ZJAgw-bZtj1RC67R9po2AcM2OjjS8FQIl16IRHpC6IwO30i"
)
return t.NewPackage("coreutils", version, newTar(
"https://ftpmirror.gnu.org/gnu/coreutils/coreutils-"+version+".tar.gz",
@@ -437,13 +387,106 @@ func (t Toolchain) newCoreutils() (pkg.Artifact, string) {
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
test_disable '#!/bin/sh' gnulib-tests/test-c32ispunct.sh
test_disable '#!/bin/sh' tests/split/line-bytes.sh
test_disable '#!/bin/sh' tests/ls/hyperlink.sh
test_disable '#!/bin/sh' tests/misc/user.sh
test_disable 'int main(){return 0;}' gnulib-tests/test-chown.c
test_disable 'int main(){return 0;}' gnulib-tests/test-fchownat.c
test_disable 'int main(){return 0;}' gnulib-tests/test-lchown.c
`,
Patches: []KV{
{"tests-fix-job-control", `From 21d287324aa43aa3a31f39619ade0deac7fd6013 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?P=C3=A1draig=20Brady?= <P@draigBrady.com>
Date: Tue, 24 Feb 2026 15:44:41 +0000
Subject: [PATCH] tests: fix job control triggering test termination
This avoids the test harness being terminated like:
make[1]: *** [Makefile:24419: check-recursive] Hangup
make[3]: *** [Makefile:24668: check-TESTS] Hangup
make: *** [Makefile:24922: check] Hangup
make[2]: *** [Makefile:24920: check-am] Hangup
make[4]: *** [Makefile:24685: tests/misc/usage_vs_refs.log] Error 129
...
This happened sometimes when the tests were being run non interactively.
For example when run like:
setsid make TESTS="tests/timeout/timeout.sh \
tests/tail/overlay-headers.sh" SUBDIRS=. -j2 check
Note the race window can be made bigger by adding a sleep
after tail is stopped in overlay-headers.sh
The race can trigger the kernel to induce its job control
mechanism to prevent stuck processes.
I.e. where it sends SIGHUP + SIGCONT to a process group
when it determines that group may become orphaned,
and there are stopped processes in that group.
* tests/tail/overlay-headers.sh: Use setsid(1) to keep the stopped
tail process in a separate process group, thus avoiding any kernel
job control protection mechanism.
* tests/timeout/timeout.sh: Use setsid(1) to avoid the kernel
checking the main process group when sleep(1) is reparented.
Fixes https://bugs.gnu.org/80477
---
tests/tail/overlay-headers.sh | 8 +++++++-
tests/timeout/timeout.sh | 11 ++++++++---
2 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/tests/tail/overlay-headers.sh b/tests/tail/overlay-headers.sh
index be9b6a7df..1e6da0a3f 100755
--- a/tests/tail/overlay-headers.sh
+++ b/tests/tail/overlay-headers.sh
@@ -20,6 +20,8 @@
. "${srcdir=.}/tests/init.sh"; path_prepend_ ./src
print_ver_ tail sleep
+setsid true || skip_ 'setsid required to control groups'
+
# Function to count number of lines from tail
# while ignoring transient errors due to resource limits
countlines_ ()
@@ -54,7 +56,11 @@ echo start > file2 || framework_failure_
env sleep 60 & sleep=$!
# Note don't use timeout(1) here as it currently
-# does not propagate SIGCONT
+# does not propagate SIGCONT.
+# Note use setsid here to ensure we're in a separate process group
+# as we're going to STOP this tail process, and this can trigger
+# the kernel to send SIGHUP to a group if other tests have
+# processes that are reparented. (See tests/timeout/timeout.sh).
tail $fastpoll --pid=$sleep -f file1 file2 > out & pid=$!
# Ensure tail is running
diff --git a/tests/timeout/timeout.sh b/tests/timeout/timeout.sh
index 9a395416b..fbb043312 100755
--- a/tests/timeout/timeout.sh
+++ b/tests/timeout/timeout.sh
@@ -56,9 +56,14 @@ returns_ 124 timeout --foreground -s0 -k1 .1 sleep 10 && fail=1
) || fail=1
# Don't be confused when starting off with a child (Bug#9098).
-out=$(sleep .1 & exec timeout .5 sh -c 'sleep 2; echo foo')
-status=$?
-test "$out" = "" && test $status = 124 || fail=1
+# Use setsid to avoid sleep being in the test's process group, as
+# upon reparenting it can trigger an orphaned process group SIGHUP
+# (if there were stopped processes in other tests).
+if setsid true; then
+ out=$(setsid sleep .1 & exec timeout .5 sh -c 'sleep 2; echo foo')
+ status=$?
+ test "$out" = "" && test $status = 124 || fail=1
+fi
# Verify --verbose output
cat > exp <<\EOF
--
2.53.0
`},
},
Flag: TEarly,
}, &MakeHelper{
Configure: []KV{
@@ -714,20 +757,15 @@ func init() {
func (t Toolchain) newParallel() (pkg.Artifact, string) {
const (
version = "20260422"
checksum = "eTsepxgqhXpMEhPd55qh-W5y4vjKn0x9TD2mzbJCNZYtFf4lT4Wzoqr74HGJYBEH"
version = "20260322"
checksum = "gHoPmFkOO62ev4xW59HqyMlodhjp8LvTsBOwsVKHUUdfrt7KwB8koXmSVqQ4VOrB"
)
return t.NewPackage("parallel", version, newTar(
"https://ftpmirror.gnu.org/gnu/parallel/parallel-"+version+".tar.bz2",
checksum,
pkg.TarBzip2,
), &PackageAttr{
ScriptEarly: `
ln -s ../system/bin/bash /bin/
`,
}, (*MakeHelper)(nil),
), nil, (*MakeHelper)(nil),
Perl,
Bash,
), version
}
func init() {
@@ -1105,11 +1143,10 @@ func init() {
func (t Toolchain) newMPC() (pkg.Artifact, string) {
const (
version = "1.4.1"
checksum = "ZffaZyWkvIw0iPvRe5EJ7O-VvHtSkbbb3K_7SgPtK810NvGan7nbF0T5-6tozjQN"
checksum = "wdXAhplnS89FjVp20m2nC2CmLFQeyQqLpQAfViTy4vPxFdv2WYOTtfBKeIk5_Rec"
)
return t.NewPackage("mpc", version, newFromGitLab(
"gitlab.inria.fr",
"mpc/mpc",
return t.NewPackage("mpc", version, t.newTagRemote(
"https://gitlab.inria.fr/mpc/mpc.git",
version, checksum,
), &PackageAttr{
// does not find mpc-impl.h otherwise

View File

@@ -1,35 +0,0 @@
package rosa
import (
"slices"
"strconv"
"strings"
"testing"
)
func TestSkipGNUTests(t *testing.T) {
t.Parallel()
testCases := []struct {
tests []int
want string
}{
{[]int{764}, "1-763 765-"},
{[]int{764, 0xcafe, 37, 9}, "1-8 10-36 38-763 765-51965 51967-"},
{[]int{1, 2, 0xbed}, "3-3052 3054-"},
{[]int{3, 4}, "1-2 5-"},
}
for _, tc := range testCases {
t.Run(strings.Join(slices.Collect(func(yield func(string) bool) {
for _, n := range tc.tests {
yield(strconv.Itoa(n))
}
}), ","), func(t *testing.T) {
t.Parallel()
if got := skipGNUTests(tc.tests...); got != tc.want {
t.Errorf("skipGNUTests: %q, want %q", got, tc.want)
}
})
}
}

View File

@@ -10,9 +10,9 @@ import (
// newGoBootstrap returns the Go bootstrap toolchain.
func (t Toolchain) newGoBootstrap() pkg.Artifact {
const checksum = "8o9JL_ToiQKadCTb04nvBDkp8O1xiWOolAxVEqaTGodieNe4lOFEjlOxN3bwwe23"
return t.New("go1.4-bootstrap", 0, t.AppendPresets(nil,
Bash,
), nil, []string{
return t.New("go1.4-bootstrap", 0, []pkg.Artifact{
t.Load(Bash),
}, nil, []string{
"CGO_ENABLED=0",
}, `
mkdir -p /var/tmp/ /work/system/
@@ -35,9 +35,9 @@ func (t Toolchain) newGo(
script string,
extra ...pkg.Artifact,
) pkg.Artifact {
return t.New("go"+version, 0, t.AppendPresets(extra,
Bash,
), nil, slices.Concat([]string{
return t.New("go"+version, 0, slices.Concat([]pkg.Artifact{
t.Load(Bash),
}, extra), nil, slices.Concat([]string{
"CC=cc",
"GOCACHE=/tmp/gocache",
"GOROOT_BOOTSTRAP=/system/go",
@@ -127,8 +127,8 @@ sed -i \
)
go125 := t.newGo(
"1.25.9",
"gShJb9uOMk5AxqPSwvn53ZO56S6PyP6nfojzrHUiJ3krAvrgjJpYa6-DPA-jxbpN",
"1.25.7",
"fyylHdBVRUobnBjYj3NKBaYPUw3kGmo2mEELiZonOYurPfbarNU1x77B99Fjut7Q",
[]string{"CGO_ENABLED=0"}, `
sed -i \
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
@@ -151,14 +151,9 @@ rm \
sed -i \
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
cmd/link/internal/`+runtime.GOARCH+`/obj.go
sed -i \
's/cpu.X86.HasAVX512VBMI/& \&\& cpu.X86.HasPOPCNT/' \
internal/runtime/gc/scan/scan_amd64.go
rm \
os/root_unix_test.go \
cmd/cgo/internal/testsanitizers/tsan_test.go \
cmd/cgo/internal/testsanitizers/cshared_test.go
os/root_unix_test.go
`, go125,
), version
}

View File

@@ -7,8 +7,9 @@ func (t Toolchain) newHakurei(
withHostname bool,
) pkg.Artifact {
hostname := `
echo 'Building test helper (hostname).'
go build -o /bin/hostname /usr/src/hostname/main.go
echo '# Building test helper (hostname).'
go build -v -o /bin/hostname /usr/src/hostname/main.go
echo
`
if !withHostname {
hostname = ""
@@ -63,9 +64,9 @@ func init() {
return t.newHakurei("", `
mkdir -p /work/system/libexec/hakurei/
echo "Building hakurei for $(go env GOOS)/$(go env GOARCH)."
go generate ./...
go build -trimpath -tags=rosa -o /work/system/libexec/hakurei -ldflags="-s -w
echo '# Building hakurei.'
go generate -v ./...
go build -trimpath -v -tags=rosa -o /work/system/libexec/hakurei -ldflags="-s -w
-buildid=
-linkmode external
-extldflags=-static
@@ -76,7 +77,7 @@ go build -trimpath -tags=rosa -o /work/system/libexec/hakurei -ldflags="-s -w
" ./...
echo
echo '##### Testing hakurei.'
echo '# Testing hakurei.'
go test -ldflags='-buildid= -linkmode external -extldflags=-static' ./...
echo

View File

@@ -4,13 +4,13 @@ package rosa
import "hakurei.app/internal/pkg"
const hakureiVersion = "0.4.1"
const hakureiVersion = "0.4.0"
// hakureiSource is the source code of a hakurei release.
var hakureiSource = newTar(
"https://git.gensokyo.uk/rosa/hakurei/archive/"+
"v"+hakureiVersion+".tar.gz",
"8bHvZcjUQOXUPbKL-qq99pHFTPnn-h7j1fkJudbGs8waLm3OmkI6eHfQev5bug2y",
"wfQ9DqCW0Fw9o91wj-I55waoqzB-UqzzuC0_2h-P-1M78SgZ1WHSPCDJMth6EyC2",
pkg.TarGzip,
)

View File

@@ -2,12 +2,12 @@ package rosa
import "hakurei.app/internal/pkg"
const kernelVersion = "6.12.84"
const kernelVersion = "6.12.81"
var kernelSource = newTar(
"https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/"+
"snapshot/linux-"+kernelVersion+".tar.gz",
"GJLUEu68r3DpLYoTcMl4wA_ThMBs_Zwc0gZsp82ii_3AOfcVxpI639IKfq2jAAY2",
"fBkNwf82DQXh74in6gaF2Jot7Vg-Vlcp9BUtCEipL9mvcM1EXLVFdV7FcrO20Eve",
pkg.TarGzip,
)

View File

@@ -1,16 +1,16 @@
#
# Automatically generated file; DO NOT EDIT.
# Linux/x86 6.12.84 Kernel Configuration
# Linux/x86 6.12.80 Kernel Configuration
#
CONFIG_CC_VERSION_TEXT="clang version 22.1.4"
CONFIG_CC_VERSION_TEXT="clang version 22.1.2"
CONFIG_GCC_VERSION=0
CONFIG_CC_IS_CLANG=y
CONFIG_CLANG_VERSION=220104
CONFIG_CLANG_VERSION=220102
CONFIG_AS_IS_LLVM=y
CONFIG_AS_VERSION=220104
CONFIG_AS_VERSION=220102
CONFIG_LD_VERSION=0
CONFIG_LD_IS_LLD=y
CONFIG_LLD_VERSION=220104
CONFIG_LLD_VERSION=220102
CONFIG_RUSTC_VERSION=0
CONFIG_RUSTC_LLVM_VERSION=0
CONFIG_CC_HAS_ASM_GOTO_OUTPUT=y
@@ -3175,8 +3175,14 @@ CONFIG_PATA_ACPI=y
CONFIG_ATA_GENERIC=y
CONFIG_PATA_LEGACY=m
CONFIG_MD=y
# CONFIG_BLK_DEV_MD is not set
CONFIG_BLK_DEV_MD=m
CONFIG_MD_BITMAP_FILE=y
CONFIG_MD_LINEAR=m
CONFIG_MD_RAID0=m
CONFIG_MD_RAID1=m
CONFIG_MD_RAID10=m
CONFIG_MD_RAID456=m
CONFIG_MD_CLUSTER=m
CONFIG_BCACHE=m
# CONFIG_BCACHE_DEBUG is not set
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
@@ -3199,7 +3205,7 @@ CONFIG_DM_ERA=m
CONFIG_DM_CLONE=m
CONFIG_DM_MIRROR=m
CONFIG_DM_LOG_USERSPACE=m
# CONFIG_DM_RAID is not set
CONFIG_DM_RAID=m
CONFIG_DM_ZERO=m
CONFIG_DM_MULTIPATH=m
CONFIG_DM_MULTIPATH_QL=m
@@ -11630,7 +11636,10 @@ CONFIG_RANDSTRUCT_NONE=y
CONFIG_XOR_BLOCKS=m
CONFIG_ASYNC_CORE=m
CONFIG_ASYNC_MEMCPY=m
CONFIG_ASYNC_XOR=m
CONFIG_ASYNC_PQ=m
CONFIG_ASYNC_RAID6_RECOV=m
CONFIG_CRYPTO=y
#
@@ -11916,6 +11925,8 @@ CONFIG_BINARY_PRINTF=y
#
# Library routines
#
CONFIG_RAID6_PQ=m
CONFIG_RAID6_PQ_BENCHMARK=y
CONFIG_LINEAR_RANGES=y
CONFIG_PACKING=y
CONFIG_BITREVERSE=y
@@ -12460,6 +12471,7 @@ CONFIG_RUNTIME_TESTING_MENU=y
# CONFIG_INTERVAL_TREE_TEST is not set
# CONFIG_PERCPU_TEST is not set
# CONFIG_ATOMIC64_SELFTEST is not set
# CONFIG_ASYNC_RAID6_TEST is not set
# CONFIG_TEST_HEXDUMP is not set
# CONFIG_TEST_KSTRTOX is not set
# CONFIG_TEST_PRINTF is not set

View File

@@ -1,16 +1,16 @@
#
# Automatically generated file; DO NOT EDIT.
# Linux/arm64 6.12.83 Kernel Configuration
# Linux/arm64 6.12.80 Kernel Configuration
#
CONFIG_CC_VERSION_TEXT="clang version 22.1.4"
CONFIG_CC_VERSION_TEXT="clang version 21.1.8"
CONFIG_GCC_VERSION=0
CONFIG_CC_IS_CLANG=y
CONFIG_CLANG_VERSION=220104
CONFIG_CLANG_VERSION=210108
CONFIG_AS_IS_LLVM=y
CONFIG_AS_VERSION=220104
CONFIG_AS_VERSION=210108
CONFIG_LD_VERSION=0
CONFIG_LD_IS_LLD=y
CONFIG_LLD_VERSION=220104
CONFIG_LLD_VERSION=210108
CONFIG_RUSTC_VERSION=0
CONFIG_RUSTC_LLVM_VERSION=0
CONFIG_CC_HAS_ASM_GOTO_OUTPUT=y
@@ -3253,8 +3253,14 @@ CONFIG_PATA_ACPI=y
CONFIG_ATA_GENERIC=y
CONFIG_PATA_LEGACY=m
CONFIG_MD=y
# CONFIG_BLK_DEV_MD is not set
CONFIG_BLK_DEV_MD=m
CONFIG_MD_BITMAP_FILE=y
CONFIG_MD_LINEAR=m
CONFIG_MD_RAID0=m
CONFIG_MD_RAID1=m
CONFIG_MD_RAID10=m
CONFIG_MD_RAID456=m
CONFIG_MD_CLUSTER=m
CONFIG_BCACHE=m
# CONFIG_BCACHE_DEBUG is not set
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
@@ -3277,7 +3283,7 @@ CONFIG_DM_ERA=m
CONFIG_DM_CLONE=m
CONFIG_DM_MIRROR=m
CONFIG_DM_LOG_USERSPACE=m
# CONFIG_DM_RAID is not set
CONFIG_DM_RAID=m
CONFIG_DM_ZERO=m
CONFIG_DM_MULTIPATH=m
CONFIG_DM_MULTIPATH_QL=m
@@ -10294,6 +10300,7 @@ CONFIG_ALTERA_MSGDMA=m
# CONFIG_AMBA_PL08X is not set
CONFIG_APPLE_ADMAC=m
CONFIG_AXI_DMAC=m
CONFIG_BCM_SBA_RAID=m
CONFIG_DMA_BCM2835=m
CONFIG_DMA_SUN6I=m
CONFIG_DW_AXI_DMAC=m
@@ -13285,7 +13292,12 @@ CONFIG_RANDSTRUCT_NONE=y
CONFIG_XOR_BLOCKS=m
CONFIG_ASYNC_CORE=m
CONFIG_ASYNC_MEMCPY=m
CONFIG_ASYNC_XOR=m
CONFIG_ASYNC_PQ=m
CONFIG_ASYNC_RAID6_RECOV=m
CONFIG_ASYNC_TX_DISABLE_PQ_VAL_DMA=y
CONFIG_ASYNC_TX_DISABLE_XOR_VAL_DMA=y
CONFIG_CRYPTO=y
#
@@ -13628,6 +13640,8 @@ CONFIG_BINARY_PRINTF=y
#
# Library routines
#
CONFIG_RAID6_PQ=m
CONFIG_RAID6_PQ_BENCHMARK=y
CONFIG_LINEAR_RANGES=y
CONFIG_PACKING=y
CONFIG_BITREVERSE=y
@@ -14158,6 +14172,7 @@ CONFIG_RUNTIME_TESTING_MENU=y
# CONFIG_INTERVAL_TREE_TEST is not set
# CONFIG_PERCPU_TEST is not set
# CONFIG_ATOMIC64_SELFTEST is not set
# CONFIG_ASYNC_RAID6_TEST is not set
# CONFIG_TEST_HEXDUMP is not set
# CONFIG_TEST_KSTRTOX is not set
# CONFIG_TEST_PRINTF is not set

View File

@@ -1,6 +1,6 @@
#
# Automatically generated file; DO NOT EDIT.
# Linux/riscv 6.12.80 Kernel Configuration
# Linux/riscv 6.12.77 Kernel Configuration
#
CONFIG_CC_VERSION_TEXT="clang version 22.1.2"
CONFIG_GCC_VERSION=0
@@ -37,6 +37,11 @@ CONFIG_BUILD_SALT=""
CONFIG_HAVE_KERNEL_GZIP=y
CONFIG_HAVE_KERNEL_ZSTD=y
# CONFIG_KERNEL_GZIP is not set
# CONFIG_KERNEL_BZIP2 is not set
# CONFIG_KERNEL_LZMA is not set
# CONFIG_KERNEL_XZ is not set
# CONFIG_KERNEL_LZO is not set
# CONFIG_KERNEL_LZ4 is not set
CONFIG_KERNEL_ZSTD=y
CONFIG_DEFAULT_INIT=""
CONFIG_DEFAULT_HOSTNAME="rosa-early"
@@ -2843,8 +2848,14 @@ CONFIG_PATA_ACPI=y
CONFIG_ATA_GENERIC=y
CONFIG_PATA_LEGACY=m
CONFIG_MD=y
# CONFIG_BLK_DEV_MD is not set
CONFIG_BLK_DEV_MD=m
CONFIG_MD_BITMAP_FILE=y
CONFIG_MD_LINEAR=m
CONFIG_MD_RAID0=m
CONFIG_MD_RAID1=m
CONFIG_MD_RAID10=m
CONFIG_MD_RAID456=m
CONFIG_MD_CLUSTER=m
CONFIG_BCACHE=m
# CONFIG_BCACHE_DEBUG is not set
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
@@ -2867,7 +2878,7 @@ CONFIG_DM_ERA=m
CONFIG_DM_CLONE=m
CONFIG_DM_MIRROR=m
CONFIG_DM_LOG_USERSPACE=m
# CONFIG_DM_RAID is not set
CONFIG_DM_RAID=m
CONFIG_DM_ZERO=m
CONFIG_DM_MULTIPATH=m
CONFIG_DM_MULTIPATH_QL=m
@@ -10644,7 +10655,10 @@ CONFIG_RANDSTRUCT_NONE=y
CONFIG_XOR_BLOCKS=m
CONFIG_ASYNC_CORE=m
CONFIG_ASYNC_MEMCPY=m
CONFIG_ASYNC_XOR=m
CONFIG_ASYNC_PQ=m
CONFIG_ASYNC_RAID6_RECOV=m
CONFIG_CRYPTO=y
#
@@ -10904,6 +10918,8 @@ CONFIG_BINARY_PRINTF=y
#
# Library routines
#
CONFIG_RAID6_PQ=m
CONFIG_RAID6_PQ_BENCHMARK=y
CONFIG_LINEAR_RANGES=y
CONFIG_PACKING=y
CONFIG_BITREVERSE=y
@@ -11392,6 +11408,7 @@ CONFIG_RUNTIME_TESTING_MENU=y
# CONFIG_INTERVAL_TREE_TEST is not set
# CONFIG_PERCPU_TEST is not set
# CONFIG_ATOMIC64_SELFTEST is not set
# CONFIG_ASYNC_RAID6_TEST is not set
# CONFIG_TEST_HEXDUMP is not set
# CONFIG_TEST_KSTRTOX is not set
# CONFIG_TEST_PRINTF is not set

View File

@@ -8,8 +8,8 @@ import (
func (t Toolchain) newLibexpat() (pkg.Artifact, string) {
const (
version = "2.8.0"
checksum = "pnwZ_JSif-OfoWIwk2JYXWHagOWMA3Sh-Ea0p-4Rz9U9mDEeAebhyvnfD7OYOMCk"
version = "2.7.5"
checksum = "vTRUjjg-qbHSXUBYKXgzVHkUO7UNyuhrkSYrE7ikApQm0g-OvQ8tspw4w55M-1Tp"
)
return t.NewPackage("libexpat", version, newFromGitHubRelease(
"libexpat/libexpat",

View File

@@ -1,38 +0,0 @@
package rosa
import (
"strings"
"hakurei.app/internal/pkg"
)
func (t Toolchain) newLibpng() (pkg.Artifact, string) {
const (
version = "1.6.58"
checksum = "m_a5lROJH7vmF3cMjqwTUqURuQLhV1JQx2ySPzcN3VPdgDB9pG3UINsIx_mtkr-t"
)
return t.NewPackage("libpng", version, newTar(
"https://downloads.sourceforge.net/project/libpng/libpng"+
strings.Join(strings.SplitN(version, ".", 3)[:2], "")+
"/"+version+"/libpng-"+version+".tar.gz",
checksum,
pkg.TarGzip,
), nil, (*MakeHelper)(nil),
Zlib,
), version
}
func init() {
artifactsM[Libpng] = Metadata{
f: Toolchain.newLibpng,
Name: "libpng",
Description: "the official PNG reference library",
Website: "https://www.libpng.org/pub/png/libpng.html",
Dependencies: P{
Zlib,
},
ID: 1705,
}
}

View File

@@ -5,11 +5,10 @@ import "hakurei.app/internal/pkg"
func (t Toolchain) newLibxml2() (pkg.Artifact, string) {
const (
version = "2.15.3"
checksum = "oJy74htGlEpf70KPvpW18fYJo0RQQkCXZRwqUz6NoXborS3HCq3Nm4gsyaSeNmUH"
checksum = "oWkNe53c3d4Lt4OzrXPHBcOLHJ3TWqpa0x7B7bh_DyZ-uIMiplpdZjQRgRWVal2h"
)
return t.NewPackage("libxml2", version, newFromGitLab(
"gitlab.gnome.org",
"GNOME/libxml2",
return t.NewPackage("libxml2", version, t.newTagRemote(
"https://gitlab.gnome.org/GNOME/libxml2.git",
"v"+version, checksum,
), &PackageAttr{
// can't create shell.out: Read-only file system

View File

@@ -5,11 +5,10 @@ import "hakurei.app/internal/pkg"
func (t Toolchain) newLibxslt() (pkg.Artifact, string) {
const (
version = "1.1.45"
checksum = "67ks7v8od2oWaEGf23Sst_Xbn_8brQyolQjqxPoO-lK35k_WJhi2Px5JJgbk-nfn"
checksum = "MZc_dyUWpHChkWDKa5iycrECxBsRd4ZMbYfL4VojTbung593mlH2tHGmxYB6NFYT"
)
return t.NewPackage("libxslt", version, newFromGitLab(
"gitlab.gnome.org",
"GNOME/libxslt",
return t.NewPackage("libxslt", version, t.newTagRemote(
"https://gitlab.gnome.org/GNOME/libxslt.git",
"v"+version, checksum,
), nil, &MakeHelper{
Generate: "NOCONFIGURE=1 ./autogen.sh",

View File

@@ -1,61 +1,50 @@
package rosa
import (
"regexp"
"runtime"
"slices"
"strings"
import "hakurei.app/internal/pkg"
"hakurei.app/internal/pkg"
)
func init() {
artifactsM[llvmSource] = Metadata{
f: func(t Toolchain) (pkg.Artifact, string) {
return t.NewPatchedSource("llvm", llvmVersion, newFromGitHub(
"llvm/llvm-project",
"llvmorg-"+llvmVersion,
llvmChecksum,
), true, llvmPatches...), llvmVersion
},
// litArgs returns [LIT] arguments for optional verbosity and check skipping.
func litArgs(verbose bool, skipChecks ...string) string {
args := []string{"-sv"}
if verbose {
args[0] = "--verbose"
Name: "llvm-project",
Description: "LLVM monorepo with Rosa OS patches",
ID: 1830,
}
if len(skipChecks) > 0 {
skipChecks = slices.Clone(skipChecks)
for i, s := range skipChecks {
s = regexp.QuoteMeta(s)
s = strings.ReplaceAll(s, "/", "\\/")
skipChecks[i] = s
}
args = append(args,
"--filter-out='\\''"+strings.Join(skipChecks, "|")+"'\\''")
}
return "'" + strings.Join(args, " ") + "'"
}
func (t Toolchain) newEarlyCompilerRT() (pkg.Artifact, string) {
version := t.Version(llvmSource)
major, _, _ := strings.Cut(version, ".")
return t.NewPackage("early-compiler-rt", version, t.Load(llvmSource), &PackageAttr{
func (t Toolchain) newCompilerRT() (pkg.Artifact, string) {
muslHeaders, _ := t.newMusl(true)
return t.NewPackage("compiler-rt", llvmVersion, t.Load(llvmSource), &PackageAttr{
NonStage0: []pkg.Artifact{
muslHeaders,
},
Env: stage0ExclConcat(t, []string{},
"LDFLAGS="+earlyLDFLAGS(false),
),
Flag: TExclusive,
}, &CMakeHelper{
Append: []string{"compiler-rt"},
Cache: []KV{
{"ENABLE_LINKER_BUILD_ID", "ON"},
{"CMAKE_BUILD_TYPE", "Release"},
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
// libc++ not yet available
{"CMAKE_CXX_COMPILER_TARGET", ""},
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_ENABLE_RTTI", "ON"},
{"LLVM_BUILD_LLVM_DYLIB", "ON"},
{"LLVM_LINK_LLVM_DYLIB", "ON"},
{"LLVM_ENABLE_PER_TARGET_RUNTIME_DIR", "ON"},
{"COMPILER_RT_BUILD_BUILTINS", "ON"},
{"COMPILER_RT_DEFAULT_TARGET_ONLY", "ON"},
{"COMPILER_RT_USE_BUILTINS_LIBRARY", "ON"},
{"COMPILER_RT_DEFAULT_TARGET_ONLY", "OFF"},
{"COMPILER_RT_SANITIZERS_TO_BUILD", "asan"},
{"COMPILER_RT_BUILD_GWP_ASAN", "OFF"},
{"LLVM_ENABLE_PER_TARGET_RUNTIME_DIR", "ON"},
// does not work without libunwind
{"COMPILER_RT_BUILD_CTX_PROFILE", "OFF"},
@@ -64,12 +53,11 @@ func (t Toolchain) newEarlyCompilerRT() (pkg.Artifact, string) {
{"COMPILER_RT_BUILD_PROFILE", "OFF"},
{"COMPILER_RT_BUILD_XRAY", "OFF"},
},
SkipTest: true,
Script: `
mkdir -p "/work/system/lib/clang/` + major + `/lib/"
mkdir -p "/work/system/lib/clang/` + llvmVersionMajor + `/lib/"
ln -s \
"../../../${ROSA_TRIPLE}" \
"/work/system/lib/clang/` + major + `/lib/"
"/work/system/lib/clang/` + llvmVersionMajor + `/lib/"
ln -s \
"clang_rt.crtbegin-` + linuxArch() + `.o" \
@@ -81,16 +69,16 @@ ln -s \
},
Python,
muslHeaders,
KernelHeaders,
), version
), llvmVersion
}
func init() {
artifactsM[earlyCompilerRT] = Metadata{
f: Toolchain.newEarlyCompilerRT,
artifactsM[CompilerRT] = Metadata{
f: Toolchain.newCompilerRT,
Name: "early-compiler-rt",
Description: "early LLVM runtime: compiler-rt",
Name: "compiler-rt",
Description: "LLVM runtime: compiler-rt",
Website: "https://llvm.org/",
Dependencies: P{
Musl,
@@ -98,208 +86,110 @@ func init() {
}
}
func (t Toolchain) newEarlyRuntimes() (pkg.Artifact, string) {
version := t.Version(llvmSource)
return t.NewPackage("early-runtimes", version, t.Load(llvmSource), &PackageAttr{
func (t Toolchain) newLLVMRuntimes() (pkg.Artifact, string) {
return t.NewPackage("llvm-runtimes", llvmVersion, t.Load(llvmSource), &PackageAttr{
NonStage0: t.AppendPresets(nil, CompilerRT),
Env: stage0ExclConcat(t, []string{},
"LDFLAGS="+earlyLDFLAGS(false),
),
Flag: TExclusive,
}, &CMakeHelper{
Append: []string{"runtimes"},
Cache: []KV{
{"ENABLE_LINKER_BUILD_ID", "ON"},
{"CMAKE_BUILD_TYPE", "Release"},
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_ENABLE_RUNTIMES", "'libunwind;libcxx;libcxxabi'"},
{"LIBUNWIND_USE_COMPILER_RT", "ON"},
{"LIBCXX_HAS_MUSL_LIBC", "ON"},
{"LIBCXX_USE_COMPILER_RT", "ON"},
{"LIBCXXABI_USE_COMPILER_RT", "ON"},
{"LIBCXXABI_USE_LLVM_UNWINDER", "ON"},
// libc++ not yet available
{"CMAKE_CXX_COMPILER_WORKS", "ON"},
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_ENABLE_RTTI", "ON"},
{"LLVM_BUILD_LLVM_DYLIB", "ON"},
{"LLVM_LINK_LLVM_DYLIB", "ON"},
{"LLVM_ENABLE_RUNTIMES", "'libunwind;libcxx;libcxxabi'"},
{"LIBUNWIND_ENABLE_ASSERTIONS", "OFF"},
{"LIBUNWIND_USE_COMPILER_RT", "ON"},
{"LIBCXX_HAS_MUSL_LIBC", "ON"},
{"LIBCXX_USE_COMPILER_RT", "ON"},
{"LIBCXX_CXX_ABI", "libcxxabi"},
{"LIBCXX_ENABLE_STATIC_ABI_LIBRARY", "OFF"},
{"LIBCXX_HARDENING_MODE", "fast"},
{"LIBCXX_HAS_ATOMIC_LIB", "OFF"},
{"LIBCXXABI_USE_COMPILER_RT", "ON"},
{"LIBCXXABI_USE_LLVM_UNWINDER", "ON"},
{"LIBCXXABI_ENABLE_STATIC_UNWINDER", "OFF"},
{"LIBCXXABI_HAS_CXA_THREAD_ATEXIT_IMPL", "OFF"},
{"LLVM_ENABLE_ZLIB", "FORCE_ON"},
{"LLVM_ENABLE_ZSTD", "FORCE_ON"},
{"LLVM_ENABLE_ZLIB", "OFF"},
{"LLVM_ENABLE_ZSTD", "OFF"},
{"LLVM_ENABLE_LIBXML2", "OFF"},
},
SkipTest: true,
},
Python,
Zlib,
Zstd,
earlyCompilerRT,
KernelHeaders,
), version
), llvmVersion
}
func init() {
artifactsM[earlyRuntimes] = Metadata{
f: Toolchain.newEarlyRuntimes,
artifactsM[LLVMRuntimes] = Metadata{
f: Toolchain.newLLVMRuntimes,
Name: "early-runtimes",
Description: "early LLVM runtimes: libunwind, libcxx, libcxxabi",
Name: "llvm-runtimes",
Description: "LLVM runtimes: libunwind, libcxx, libcxxabi",
Website: "https://llvm.org/",
Dependencies: P{
earlyCompilerRT,
CompilerRT,
},
}
}
func (t Toolchain) newLLVM() (pkg.Artifact, string) {
var early PArtifact = muslHeaders
func (t Toolchain) newClang() (pkg.Artifact, string) {
target := "'AArch64;RISCV;X86'"
if t.isStage0() {
// The LLVM build system uses the system installation when building with
// LLVM_LINK_LLVM_DYLIB, since it builds runtimes after the fact, using
// the just-built toolchain. This is unacceptable in stage0 due to the
// potential version difference. Later stages bootstrap off of runtimes
// of its previous stage via 3-stage determinism.
early = earlyRuntimes
target = "Native"
}
cache := []KV{
{"ENABLE_LINKER_BUILD_ID", "ON"},
{"COMPILER_RT_USE_BUILTINS_LIBRARY", "ON"},
{"COMPILER_RT_DEFAULT_TARGET_ONLY", "ON"},
{"COMPILER_RT_BUILD_GWP_ASAN", "OFF"},
{"LIBCXX_CXX_ABI", "libcxxabi"},
{"LIBCXX_USE_COMPILER_RT", "ON"},
{"LIBCXX_ENABLE_STATIC_ABI_LIBRARY", "OFF"},
{"LIBCXX_HAS_MUSL_LIBC", "ON"},
{"LIBCXX_HARDENING_MODE", "fast"},
{"LIBCXXABI_USE_LLVM_UNWINDER", "ON"},
{"LIBCXXABI_ENABLE_STATIC_UNWINDER", "OFF"},
{"LIBCXXABI_USE_COMPILER_RT", "ON"},
{"LLVM_INSTALL_BINUTILS_SYMLINKS", "ON"},
{"LLVM_INSTALL_UTILS", "ON"},
{"LLVM_BUILD_LLVM_DYLIB", "ON"},
{"LLVM_LINK_LLVM_DYLIB", "ON"},
{"LLVM_APPEND_VC_REV", "OFF"},
{"LLVM_ENABLE_RTTI", "ON"},
{"LLVM_ENABLE_ZLIB", "FORCE_ON"},
{"LLVM_ENABLE_ZSTD", "FORCE_ON"},
{"LLVM_ENABLE_PER_TARGET_RUNTIME_DIR", "ON"},
{"LLVM_INCLUDE_BENCHMARKS", "OFF"},
{"CLANG_DEFAULT_RTLIB", "compiler-rt"},
{"CLANG_DEFAULT_UNWINDLIB", "libunwind"},
{"CLANG_DEFAULT_CXX_STDLIB", "libc++"},
{"CLANG_CONFIG_FILE_SYSTEM_DIR", "/system/etc/clang"},
{"LLVM_ENABLE_FFI", "OFF"},
{"LLVM_ENABLE_LIBXML2", "OFF"},
{"LLVM_ENABLE_LIBCXX", "ON"},
{"LLVM_ENABLE_LLD", "ON"},
{"LIBUNWIND_ENABLE_ASSERTIONS", "OFF"},
{"LIBUNWIND_USE_COMPILER_RT", "ON"},
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_ENABLE_PROJECTS", "'" + strings.Join([]string{
"clang",
"lld",
}, ";") + "'"},
{"LLVM_ENABLE_RUNTIMES", "'" + strings.Join([]string{
"compiler-rt",
"libcxx",
"libcxxabi",
"libunwind",
"libclc",
}, ";") + "'"},
}
if !t.isStage0() {
skipChecks := []string{
// expensive, pointless to run here
"benchmarks",
// LLVM ERROR: Tried to execute an unknown external function: roundevenf
"ExecutionEngine/Interpreter/intrinsics.ll",
// clang: deadlocks with LLVM_BUILD_LLVM_DYLIB
"crash-recovery-modules",
// clang: fatal error: '__config_site' file not found
"CodeGen/PowerPC/ppc-xmmintrin.c",
"CodeGen/PowerPC/ppc-mmintrin.c",
"CodeGen/PowerPC/ppc-emmintrin.c",
"CodeGen/PowerPC/ppc-pmmintrin.c",
"CodeGen/PowerPC/ppc-tmmintrin.c",
"CodeGen/PowerPC/ppc-smmintrin.c",
"CodeGenCUDA/amdgpu-alias-undef-symbols.cu",
// cxx: fails on musl
"close.dont-get-rid-of-buffer",
"re/re.traits",
"std/time",
"localization/locales",
"localization/locale.categories",
"selftest/dsl/dsl.sh.py",
"input.output/iostream.format",
"locale-specific_form",
// cxx: deadlocks
"std/thread/thread.jthread",
// unwind: fails on musl
"eh_frame_fde_pc_range",
}
switch runtime.GOARCH {
case "arm64":
skipChecks = append(skipChecks,
// LLVM: intermittently crashes
"ExecutionEngine/OrcLazy/multiple-compile-threads-basic.ll",
// unwind: unexpectedly passes
"unwind_leaffunction",
)
}
if presetOpts&OptLLVMNoLTO == 0 {
cache = append(cache, []KV{
// very expensive
{"LLVM_ENABLE_LTO", "Thin"},
}...)
}
cache = append(cache, []KV{
// symbols: clock_gettime, mallopt
{"COMPILER_RT_INCLUDE_TESTS", "OFF"},
{"LLVM_BUILD_TESTS", "ON"},
{"LLVM_LIT_ARGS", litArgs(true, skipChecks...)},
}...)
}
version := t.Version(llvmSource)
return t.NewPackage("llvm", version, t.Load(llvmSource), nil, &CMakeHelper{
return t.NewPackage("clang", llvmVersion, t.Load(llvmSource), &PackageAttr{
NonStage0: t.AppendPresets(nil, LLVMRuntimes),
Env: stage0ExclConcat(t, []string{},
"CFLAGS="+earlyCFLAGS,
"CXXFLAGS="+earlyCXXFLAGS(),
"LDFLAGS="+earlyLDFLAGS(false),
),
Flag: TExclusive,
}, &CMakeHelper{
Append: []string{"llvm"},
Cache: cache,
Cache: []KV{
{"CMAKE_BUILD_TYPE", "Release"},
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_ENABLE_PROJECTS", "'clang;lld'"},
{"LLVM_ENABLE_LIBCXX", "ON"},
{"LLVM_USE_LINKER", "lld"},
{"LLVM_INSTALL_BINUTILS_SYMLINKS", "ON"},
{"LLVM_INSTALL_CCTOOLS_SYMLINKS", "ON"},
{"LLVM_LIT_ARGS", "'--verbose'"},
{"CLANG_DEFAULT_LINKER", "lld"},
{"CLANG_DEFAULT_CXX_STDLIB", "libc++"},
{"CLANG_DEFAULT_RTLIB", "compiler-rt"},
{"CLANG_DEFAULT_UNWINDLIB", "libunwind"},
{"LLVM_TARGETS_TO_BUILD", target},
{"CMAKE_CROSSCOMPILING", "OFF"},
{"CXX_SUPPORTS_CUSTOM_LINKER", "ON"},
{"LLVM_ENABLE_ZLIB", "OFF"},
{"LLVM_ENABLE_ZSTD", "OFF"},
{"LLVM_ENABLE_LIBXML2", "OFF"},
},
Script: `
ln -s ld.lld /work/system/bin/ld
ln -s clang /work/system/bin/cc
ln -s clang /work/system/bin/cpp
ln -s clang++ /work/system/bin/c++
`,
// LLVM_LINK_LLVM_DYLIB causes llvm test suite to leak system
// installation into test environment, and the tests end up testing the
// system installation instead. Tests are disabled on stage0 and relies
// on 3-stage determinism to test later stages.
SkipTest: t.isStage0(),
Test: `
chmod +w /bin && ln -s \
../system/bin/chmod \
../system/bin/mkdir \
../system/bin/rm \
../system/bin/tr \
../system/bin/awk \
/bin
ninja ` + jobsFlagE + ` check-all
`,
},
@@ -311,44 +201,44 @@ ninja ` + jobsFlagE + ` check-all
Coreutils,
Findutils,
Zlib,
Zstd,
early,
KernelHeaders,
), version
), llvmVersion
}
func init() {
const (
version = "22.1.4"
checksum = "Bk3t-tV5sD5T0bqefFMcLeFuAwXnhFipywZmqst5hAZs97QQWGKB_5XyAFjj5tDB"
)
artifactsM[Clang] = Metadata{
f: Toolchain.newClang,
artifactsM[llvmSource] = Metadata{
f: func(t Toolchain) (pkg.Artifact, string) {
return t.NewPatchedSource("llvm", version, newFromGitHub(
"llvm/llvm-project",
"llvmorg-"+version,
checksum,
), true, llvmPatches...), version
},
Name: "llvm-project",
Description: "LLVM monorepo with Rosa OS patches",
ID: 1830,
}
artifactsM[LLVM] = Metadata{
f: Toolchain.newLLVM,
Name: "llvm",
Description: "a collection of modular and reusable compiler and toolchain technologies",
Website: "https://llvm.org",
Name: "clang",
Description: `an "LLVM native" C/C++/Objective-C compiler`,
Website: "https://llvm.org/",
Dependencies: P{
Zlib,
Zstd,
Musl,
LLVMRuntimes,
},
}
}
func (t Toolchain) newLibclc() (pkg.Artifact, string) {
return t.NewPackage("libclc", llvmVersion, t.Load(llvmSource), nil, &CMakeHelper{
Append: []string{"libclc"},
Cache: []KV{
{"CMAKE_BUILD_TYPE", "Release"},
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
{"LIBCLC_TARGETS_TO_BUILD", "all"},
},
Script: "ninja " + jobsFlagE + " test",
}), llvmVersion
}
func init() {
artifactsM[Libclc] = Metadata{
f: Toolchain.newLibclc,
Name: "libclc",
Description: "an open source, BSD/MIT dual licensed implementation of the library requirements of the OpenCL C programming language",
Website: "https://libclc.llvm.org/",
}
}

View File

@@ -0,0 +1,9 @@
package rosa
// latest version of LLVM, conditional to temporarily avoid broken new releases
const (
llvmVersionMajor = "22"
llvmVersion = llvmVersionMajor + ".1.3"
llvmChecksum = "CUwnpzua_y28HZ9oI0NmcKL2wClsSjFpgY9do5-7cCZJHI5KNF64vfwGvY0TYyR3"
)

View File

@@ -90,26 +90,11 @@ index 8ac8d4eb9181..e46b04a898ca 100644
addSystemInclude(DriverArgs, CC1Args, ResourceDirInclude);
`},
{"path-system-libraries", `diff --git a/clang/lib/Driver/ToolChains/Gnu.cpp b/clang/lib/Driver/ToolChains/Gnu.cpp
index cb6a9b242421..b8d31690d1af 100644
--- a/clang/lib/Driver/ToolChains/Gnu.cpp
+++ b/clang/lib/Driver/ToolChains/Gnu.cpp
@@ -2314,6 +2314,10 @@ void Generic_GCC::GCCInstallationDetector::AddDefaultGCCPrefixes(
Prefixes.push_back("/opt/rh/devtoolset-2/root/usr");
}
+ if (TargetTriple.getVendor() == llvm::Triple::Rosa) {
+ Prefixes.push_back(concat(SysRoot, "/system"));
+ }
+
// Fall back to /usr which is used by most non-Solaris systems.
Prefixes.push_back(concat(SysRoot, "/usr"));
}
diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
index d525b417b4ea..2b93f401733e 100644
{"path-system-libraries", `diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
index 8ac8d4eb9181..f4d1347ab64d 100644
--- a/clang/lib/Driver/ToolChains/Linux.cpp
+++ b/clang/lib/Driver/ToolChains/Linux.cpp
@@ -302,6 +302,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
@@ -282,6 +282,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
const bool IsHexagon = Arch == llvm::Triple::hexagon;
const bool IsRISCV = Triple.isRISCV();
const bool IsCSKY = Triple.isCSKY();
@@ -117,7 +102,18 @@ index d525b417b4ea..2b93f401733e 100644
if (IsCSKY && !SelectedMultilibs.empty())
SysRoot = SysRoot + SelectedMultilibs.back().osSuffix();
@@ -341,8 +342,12 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
@@ -318,12 +319,23 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
const std::string OSLibDir = std::string(getOSLibDir(Triple, Args));
const std::string MultiarchTriple = getMultiarchTriple(D, Triple, SysRoot);
+ if (IsRosa) {
+ ExtraOpts.push_back("-rpath");
+ ExtraOpts.push_back("/system/lib");
+ ExtraOpts.push_back("-rpath");
+ ExtraOpts.push_back(concat("/system/lib", MultiarchTriple));
+ }
+
// mips32: Debian multilib, we use /libo32, while in other case, /lib is
// used. We need add both libo32 and /lib.
if (Arch == llvm::Triple::mips || Arch == llvm::Triple::mipsel) {
Generic_GCC::AddMultilibPaths(D, SysRoot, "libo32", MultiarchTriple, Paths);
@@ -132,7 +128,7 @@ index d525b417b4ea..2b93f401733e 100644
}
Generic_GCC::AddMultilibPaths(D, SysRoot, OSLibDir, MultiarchTriple, Paths);
@@ -360,18 +365,30 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
@@ -341,18 +353,30 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
Paths);
}
@@ -168,7 +164,7 @@ index d525b417b4ea..2b93f401733e 100644
}
ToolChain::RuntimeLibType Linux::GetDefaultRuntimeLibType() const {
@@ -572,6 +589,9 @@ std::string Linux::getDynamicLinker(const ArgList &Args) const {
@@ -457,6 +481,9 @@ std::string Linux::getDynamicLinker(const ArgList &Args) const {
return Triple.isArch64Bit() ? "/system/bin/linker64" : "/system/bin/linker";
}
if (Triple.isMusl()) {
@@ -178,5 +174,18 @@ index d525b417b4ea..2b93f401733e 100644
std::string ArchName;
bool IsArm = false;
diff --git a/clang/tools/clang-installapi/Options.cpp b/clang/tools/clang-installapi/Options.cpp
index 64324a3f8b01..15ce70b68217 100644
--- a/clang/tools/clang-installapi/Options.cpp
+++ b/clang/tools/clang-installapi/Options.cpp
@@ -515,7 +515,7 @@ bool Options::processFrontendOptions(InputArgList &Args) {
FEOpts.FwkPaths = std::move(FrameworkPaths);
// Add default framework/library paths.
- PathSeq DefaultLibraryPaths = {"/usr/lib", "/usr/local/lib"};
+ PathSeq DefaultLibraryPaths = {"/usr/lib", "/system/lib", "/usr/local/lib"};
PathSeq DefaultFrameworkPaths = {"/Library/Frameworks",
"/System/Library/Frameworks"};
`},
}

View File

@@ -19,7 +19,7 @@ cd "$(mktemp -d)"
--build="${ROSA_TRIPLE}" \
--disable-dependency-tracking
./build.sh
./make DESTDIR=/work install
./make DESTDIR=/work install check
`, pkg.Path(AbsUsrSrc.Append("make"), false, newTar(
"https://ftpmirror.gnu.org/gnu/make/make-"+version+".tar.gz",
checksum,
@@ -194,7 +194,7 @@ make \
}
scriptMake += "\n"
if !attr.SkipCheck && presetOpts&OptSkipCheck == 0 {
if !attr.SkipCheck {
scriptMake += attr.ScriptCheckEarly + `make \
` + jobsFlagE + ` \
`

View File

@@ -30,8 +30,8 @@ func init() {
func (t Toolchain) newLibdrm() (pkg.Artifact, string) {
const (
version = "2.4.133"
checksum = "bfj296NcR9DndO11hqDbSRFPqaweSLMqRk3dlCPZpM6FONX1WZ9J4JdbTDMUd1rU"
version = "2.4.131"
checksum = "riHPSpvTnvCPbR-iT4jt7_X-z4rpwm6oNh9ZN2zP6RBFkFVxBRKmedG4eEXSADIh"
)
return t.NewPackage("libdrm", version, newFromGitLab(
"gitlab.freedesktop.org",

View File

@@ -9,8 +9,8 @@ import (
func (t Toolchain) newMeson() (pkg.Artifact, string) {
const (
version = "1.11.1"
checksum = "uvILRxdopwc6Dy17UbIeClcQr0qHqyTaqyk1M9OqWKN9PwB9N6UVAiyN8kSSz3r2"
version = "1.11.0"
checksum = "QJolMPzypTiS65GReSNPPlkUjHI6b1EDpZ-avIk3n6b6TQ93KfUM57DVUpY97Hf7"
)
return t.NewPackage("meson", version, newFromGitHub(
"mesonbuild/meson",
@@ -22,7 +22,7 @@ func (t Toolchain) newMeson() (pkg.Artifact, string) {
},
}, &PipHelper{
EnterSource: true,
Check: `
Script: `
cd 'test cases'
rm -rf \
'common/32 has header' \
@@ -44,7 +44,7 @@ python3 ./run_project_tests.py \
--backend=ninja
`,
},
PythonSetuptools,
Setuptools,
PkgConfig,
CMake,
Ninja,
@@ -117,7 +117,7 @@ func (attr *MesonHelper) script(name string) string {
}
var scriptTest string
if !attr.SkipTest && presetOpts&OptSkipCheck == 0 {
if !attr.SkipTest {
scriptTest = `
meson test \
--print-errorlogs`
@@ -128,7 +128,7 @@ cd "$(mktemp -d)"
meson setup \
` + strings.Join(slices.Collect(func(yield func(string) bool) {
for _, v := range append([]KV{
{"wrap-mode", "nofallback"},
{"wrap-mode", "nodownload"},
{"prefix", "/system"},
{"buildtype", "release"},
}, attr.Setup...) {

View File

@@ -7,9 +7,9 @@ func (t Toolchain) newMksh() (pkg.Artifact, string) {
version = "59c"
checksum = "0Zj-k4nXEu3IuJY4lvwD2OrC2t27GdZj8SPy4DoaeuBRH1padWb7oREpYgwY8JNq"
)
return t.New("mksh-"+version, 0, t.AppendPresets(nil,
Perl,
Coreutils,
return t.New("mksh-"+version, 0, stage0Concat(t, []pkg.Artifact{},
t.Load(Perl),
t.Load(Coreutils),
), nil, []string{
"LDSTATIC=-static",
"CPPFLAGS=-DMKSH_DEFAULT_PROFILEDIR=\\\"/system/etc\\\"",

View File

@@ -2,7 +2,10 @@ package rosa
import "hakurei.app/internal/pkg"
func (t Toolchain) newMusl(headers bool) (pkg.Artifact, string) {
func (t Toolchain) newMusl(
headers bool,
extra ...pkg.Artifact,
) (pkg.Artifact, string) {
const (
version = "1.2.6"
checksum = "WtWb_OV_XxLDAB5NerOL9loLlHVadV00MmGk65PPBU1evaolagoMHfvpZp_vxEzS"
@@ -33,43 +36,25 @@ rmdir -v /work/lib
helper.Script = ""
}
env := []string{
"LDFLAGS=" + earlyLDFLAGS(false),
}
if t.isStage0() {
env = append(env,
"CC=clang",
"AR=ar",
"RANLIB=ranlib",
)
}
return t.NewPackage(name, version, newTar(
"https://musl.libc.org/releases/musl-"+version+".tar.gz",
checksum,
pkg.TarGzip,
), &PackageAttr{
NonStage0: extra,
// expected to be writable in copies
Chmod: true,
Env: env,
Patches: []KV{
{"ldso-rosa", `diff --git a/ldso/dynlink.c b/ldso/dynlink.c
index 715948f4..c2fece68 100644
--- a/ldso/dynlink.c
+++ b/ldso/dynlink.c
@@ -1157,7 +1157,7 @@ static struct dso *load_library(const char *name, struct dso *needed_by)
sys_path = "";
}
}
- if (!sys_path) sys_path = "/lib:/usr/local/lib:/usr/lib";
+ if (!sys_path) sys_path = "/system/lib:/system/lib/" LDSO_ARCH "-rosa-linux-musl:/lib:/usr/local/lib:/usr/lib";
fd = path_open(name, sys_path, buf, sizeof buf);
}
pathname = buf;
`},
Env: stage0ExclConcat(t, []string{
"CC=clang",
"LIBCC=/system/lib/clang/" + llvmVersionMajor + "/lib/" +
triplet() + "/libclang_rt.builtins.a",
"AR=ar",
"RANLIB=ranlib",
},
"LDFLAGS="+earlyLDFLAGS(false),
),
}, &helper,
Coreutils,
), version
@@ -77,7 +62,7 @@ index 715948f4..c2fece68 100644
func init() {
artifactsM[Musl] = Metadata{
f: func(t Toolchain) (pkg.Artifact, string) {
return t.newMusl(false)
return t.newMusl(false, t.Load(CompilerRT))
},
Name: "musl",
@@ -86,13 +71,4 @@ func init() {
ID: 11688,
}
artifactsM[muslHeaders] = Metadata{
f: func(t Toolchain) (pkg.Artifact, string) {
return t.newMusl(true)
},
Name: "musl-headers",
Description: "system installation of musl headers",
}
}

View File

@@ -8,8 +8,8 @@ import (
func (t Toolchain) newNSS() (pkg.Artifact, string) {
const (
version = "3.123.1"
checksum = "g811Z_fc74ssg-s6BeXRG-ipSfJggD6hrxjVJxrOBIz98CE7piv0OLwzIRLMQpwR"
version = "3.123"
checksum = "pwBz0FO8jmhejPblfzNQLGsqBBGT0DwAw-z9yBJH3V3hVJBMKSc1l0R8GC0_BnzF"
version0 = "4_38_2"
checksum0 = "25x2uJeQnOHIiq_zj17b4sYqKgeoU8-IsySUptoPcdHZ52PohFZfGuIisBreWzx0"
@@ -82,24 +82,30 @@ func init() {
}
}
func init() {
func (t Toolchain) newBuildCATrust() (pkg.Artifact, string) {
const (
version = "0.5.1"
checksum = "oxjnuIrPVMPvD6x8VFLqB7EdbfuhouGQdtPuHDpEHGzoyH5nkxqtYN9UthMY9noA"
)
artifactsM[buildcatrust] = newPythonPackage(
"buildcatrust", 233988,
"transform certificate stores between formats",
"https://github.com/nix-community/buildcatrust",
version, newFromGitHub(
"nix-community/buildcatrust",
"v"+version, checksum,
), &PackageAttr{
ScriptEarly: `
rm buildcatrust/tests/test_nonhermetic.py
`,
}, nil, P{PythonFlitCore},
checksum = "g9AqIksz-hvCUceSR7ZKwfqf8Y_UsJU_3_zLUIdc4IkxFVkgdv9kKVvhFjE4s1-7"
)
return t.newViaPip("buildcatrust", version,
"https://github.com/nix-community/buildcatrust/releases/"+
"download/v"+version+"/buildcatrust-"+version+"-py3-none-any.whl",
checksum), version
}
func init() {
artifactsM[buildcatrust] = Metadata{
f: Toolchain.newBuildCATrust,
Name: "buildcatrust",
Description: "transform certificate stores between formats",
Website: "https://github.com/nix-community/buildcatrust",
Dependencies: P{
Python,
},
ID: 233988,
}
}
func (t Toolchain) newNSSCACert() (pkg.Artifact, string) {

View File

@@ -23,7 +23,7 @@ func (t Toolchain) newPerl() (pkg.Artifact, string) {
ScriptEarly: `
echo 'print STDOUT "1..0 # Skip broken test\n";' > ext/Pod-Html/t/htmldir3.t
chmod +w /system/bin && rm -f /system/bin/ps # perl does not like toybox ps
rm -f /system/bin/ps # perl does not like toybox ps
`,
Flag: TEarly,

View File

@@ -1,6 +1,7 @@
package rosa
import (
"path"
"slices"
"strings"
@@ -22,32 +23,6 @@ func (t Toolchain) newPython() (pkg.Artifact, string) {
Writable: true,
Chmod: true,
Patches: []KV{
{"zipfile-no-default-strict_timestamps", `diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py
index 19aea290b58..51603ba9510 100644
--- a/Lib/zipfile/__init__.py
+++ b/Lib/zipfile/__init__.py
@@ -617,7 +617,7 @@ def _decodeExtra(self, filename_crc):
extra = extra[ln+4:]
@classmethod
- def from_file(cls, filename, arcname=None, *, strict_timestamps=True):
+ def from_file(cls, filename, arcname=None, *, strict_timestamps=False):
"""Construct an appropriate ZipInfo for a file on the filesystem.
filename should be the path to a file or directory on the filesystem.
@@ -1412,7 +1412,7 @@ class ZipFile:
_windows_illegal_name_trans_table = None
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True,
- compresslevel=None, *, strict_timestamps=True, metadata_encoding=None):
+ compresslevel=None, *, strict_timestamps=False, metadata_encoding=None):
"""Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',
or append 'a'."""
if mode not in ('r', 'w', 'x', 'a'):
`},
},
Env: []string{
"EXTRATESTOPTS=-j0 -x " + strings.Join([]string{
// requires internet access (http://www.pythontest.net/)
@@ -61,7 +36,7 @@ index 19aea290b58..51603ba9510 100644
"test_os",
"test_subprocess",
// patched out insane strict_timestamps default
// somehow picks up mtime of source code
"test_zipfile",
// requires gcc
@@ -108,31 +83,18 @@ func init() {
// PipHelper is the [Python] pip packaging helper.
type PipHelper struct {
// Path elements joined with source.
Append []string
// Whether to omit --no-build-isolation.
BuildIsolation bool
// Whether to enter source after install.
EnterSource bool
// Whether to install to build environment after install.
Install bool
// Whether to skip running tests.
SkipCheck bool
// Replaces pytest if non-empty.
Check string
// Runs after install.
Script string
}
var _ Helper = new(PipHelper)
// extra returns python, or pytest if defaults are assumed.
func (attr *PipHelper) extra(int) P {
if attr == nil || (!attr.SkipCheck && attr.Check == "") {
return P{PythonPyTest}
}
return P{Python}
}
// extra returns python.
func (*PipHelper) extra(int) P { return P{Python} }
// wantsChmod returns true.
func (*PipHelper) wantsChmod() bool { return true }
@@ -154,7 +116,6 @@ func (attr *PipHelper) script(name string) string {
if attr == nil {
attr = new(PipHelper)
}
sourcePath := AbsUsrSrc.Append(name).Append(attr.Append...)
var extra string
if !attr.BuildIsolation {
@@ -162,56 +123,61 @@ func (attr *PipHelper) script(name string) string {
--no-build-isolation \`
}
var script string
if attr.Install {
script += `pip3 install \
--no-index \
--prefix=/system \
--no-build-isolation \
'` + sourcePath.String() + `'
`
}
script := attr.Script
if attr.EnterSource {
script += "cd '/usr/src/" + name + "'\n"
script = "cd '/usr/src/" + name + "'\n" + script
}
if !attr.SkipCheck {
if attr.Check == "" {
// some test suites fall apart when ran out-of-tree
script += "(cd '" + sourcePath.String() + "' && pytest)\n"
} else {
script += attr.Check
}
}
script += attr.Script
return `
pip3 install \
--no-index \
--prefix=/system \
--root=/work \` + extra + `
'` + sourcePath.String() + `'
'/usr/src/` + name + `'
` + script
}
// newPythonPackage creates [Metadata] for a [Python] package.
func newPythonPackage(
name string, id int, description, website, version string,
source pkg.Artifact, attrP *PackageAttr, attr *PipHelper,
build P, extra ...PArtifact,
// newViaPip installs a pip wheel from a url.
func (t Toolchain) newViaPip(
name, version, url, checksum string,
extra ...PArtifact,
) pkg.Artifact {
return t.New(name+"-"+version, 0, t.AppendPresets(nil,
slices.Concat(P{Python}, extra)...,
), nil, nil, `
pip3 install \
--no-index \
--prefix=/system \
--root=/work \
'/usr/src/`+path.Base(url)+`'
`, pkg.Path(AbsUsrSrc.Append(path.Base(url)), false, pkg.NewHTTPGet(
nil, url,
mustDecode(checksum),
)))
}
// newPypi creates [Metadata] for a [pypi] package.
//
// [pypi]: https://pypi.org/
func newPypi(
name string, id int,
description, version, interpreter, abi, platform, checksum string,
extra ...PArtifact,
) Metadata {
name = "python-" + name
return Metadata{
f: func(t Toolchain) (pkg.Artifact, string) {
return t.NewPackage(name, version, source, attrP, attr, slices.Concat(
P{Python},
extra,
build,
)...), version
return t.newViaPip(name, version, "https://files.pythonhosted.org/"+path.Join(
"packages",
interpreter,
string(name[0]),
name,
name+"-"+version+"-"+interpreter+"-"+abi+"-"+platform+".whl",
), checksum, extra...), version
},
Name: name,
Name: "python-" + name,
Description: description,
Website: website,
Website: "https://pypi.org/project/" + name + "/",
Dependencies: slices.Concat(P{Python}, extra),
@@ -219,389 +185,100 @@ func newPythonPackage(
}
}
func init() {
const (
version = "0.47.0"
checksum = "HZ-MvkUP8mbbx2YmsRNswj_bbOCIiXckuHqL5Qbvb5NxN5DYfWnqwkGNyS7OrId0"
)
artifactsM[PythonWheel] = newPythonPackage(
"wheel", 11428,
"the official binary distribution format for Python",
"https://peps.python.org/pep-0427/",
version, newFromGitHub(
"pypa/wheel",
version, checksum,
), nil, &PipHelper{
Install: true,
}, P{PythonFlitCore, PythonSetuptools},
)
}
func init() {
func (t Toolchain) newSetuptools() (pkg.Artifact, string) {
const (
version = "82.0.1"
checksum = "nznP46Tj539yqswtOrIM4nQgwLA1h-ApKX7z7ghazROCpyF5swtQGwsZoI93wkhc"
)
artifactsM[PythonSetuptools] = newPythonPackage(
"setuptools", 4021,
"the autotools of the Python ecosystem",
"https://pypi.org/project/setuptools/",
version, newFromGitHub(
"pypa/setuptools",
"v"+version, checksum,
), nil, &PipHelper{
// error: invalid command 'dist_info'
BuildIsolation: true,
// pytest circular dependency
SkipCheck: true,
}, nil)
return t.NewPackage("setuptools", version, newFromGitHub(
"pypa/setuptools",
"v"+version, checksum,
), nil, &PipHelper{
BuildIsolation: true,
}), version
}
func init() {
const (
version = "1.1.1"
checksum = "rXZixTsZcRcIoUC1LvWrjySsiXSv5uhW6ng2P-yXZrbdj7FrSrDeJLCfC2b-ladV"
)
artifactsM[PythonVCSVersioning] = newPythonPackage(
"vcs-versioning", 389421,
"core VCS versioning functionality extracted as a standalone library",
"https://setuptools-scm.readthedocs.io/en/latest/",
version, newFromGitHub(
"pypa/setuptools-scm",
"vcs-versioning-v"+version, checksum,
), &PackageAttr{
Env: []string{
"SETUPTOOLS_SCM_PRETEND_VERSION=" + version,
},
}, &PipHelper{
// upstream is monorepo of two packages (setuptools-scm)
Append: []string{"vcs-versioning"},
// pytest circular dependency
SkipCheck: true,
}, nil,
PythonSetuptools,
PythonPackaging,
)
}
artifactsM[Setuptools] = Metadata{
f: Toolchain.newSetuptools,
func init() {
const (
version = "10.0.5"
checksum = "vTN_TPd-b4Wbsw5WmAcsWjrs-FNXXznOeVTDnb54NtXve9Oy-eb2HPy-RG3FzNqp"
)
artifactsM[PythonSetuptoolsSCM] = newPythonPackage(
"setuptools-scm", 7874,
"extracts Python package versions from Git or Mercurial metadata",
"https://setuptools-scm.readthedocs.io/en/latest/",
version, newFromGitHub(
"pypa/setuptools-scm",
"setuptools-scm-v"+version, checksum,
), &PackageAttr{
Env: []string{
"SETUPTOOLS_SCM_PRETEND_VERSION=" + version,
},
}, &PipHelper{
// upstream is monorepo of two packages
Append: []string{"setuptools-scm"},
// pytest circular dependency
SkipCheck: true,
}, nil,
PythonSetuptools,
PythonVCSVersioning,
)
}
func init() {
const (
version = "3.12.0"
checksum = "VcTsiGiDU1aPLbjSPe38f9OjJDCLcxFz9loObJqUI1ZxDHXAaQMxBpNyLz_G1Rff"
)
artifactsM[PythonFlitCore] = newPythonPackage(
"flit-core", 44841,
"a PEP 517 build backend for packages using Flit",
"https://flit.pypa.io/",
version, newFromGitHub(
"pypa/flit",
version, checksum,
), nil, &PipHelper{
// upstream has other unused packages with many dependencies
Append: []string{"flit_core"},
// pytest circular dependency
SkipCheck: true,
}, nil,
)
}
func init() {
const (
version = "26.2"
checksum = "rdpGa2EkPFbj1mFtLKLnSwIX9gPfELcuneiICjRVDNw6By49szTFVoW8gtMMZ6ZS"
)
artifactsM[PythonPackaging] = newPythonPackage(
"packaging", 60461,
"reusable core utilities for various Python Packaging interoperability specifications",
"https://packaging.pypa.io/",
version, newFromGitHub(
"pypa/packaging",
version, checksum,
), nil, &PipHelper{
// pytest circular dependency
SkipCheck: true,
}, P{PythonFlitCore},
)
}
func init() {
artifactsM[LIT] = Metadata{
f: func(t Toolchain) (pkg.Artifact, string) {
version := t.Version(LLVM)
return t.NewPackage("lit", version, t.Load(llvmSource), nil, &PipHelper{
Append: []string{"llvm", "utils", "lit"},
// already checked during llvm
SkipCheck: true,
},
PythonSetuptools,
), version
},
Name: "lit",
Description: "a portable tool for executing LLVM and Clang style test suites",
Website: "https://llvm.org/docs/CommandGuide/lit.html",
Name: "python-setuptools",
Description: "the autotools of the Python ecosystem",
Website: "https://pypi.org/project/setuptools/",
Dependencies: P{
Python,
},
ID: 4021,
}
}
func init() {
const (
version = "1.1.1"
checksum = "1fVwoal6FoKXczoG3qRUi87TxSWESSGcgvnbEZDYuaOgsO25o36iF3SbAhwkr4Va"
artifactsM[PythonPygments] = newPypi(
"pygments", 3986,
" a syntax highlighting package written in Python",
"2.20.0", "py3", "none", "any",
"qlyqX2YSXcV0Z8XgGaPttc_gkq-xsu_nYs6NFOcYnk-CX7qmcj45gG-h6DpwPIcO",
)
artifactsM[PythonPathspec] = newPythonPackage(
"pathspec", 23424,
"utility library for gitignore style pattern matching of file paths",
"https://github.com/cpburnz/python-pathspec",
version, newFromGitHub(
"cpburnz/python-pathspec",
"v"+version, checksum,
), nil, &PipHelper{
// pytest circular dependency
SkipCheck: true,
}, P{PythonFlitCore},
)
}
func init() {
const (
version = "2026.4.28.13"
checksum = "Z3MbmMXtmWHCM3-EvJehb9MzDqX7Ce_Xg86D5g5nxFRWMKqwHwnQ8R-AlKf-32HU"
)
artifactsM[PythonTroveClassifiers] = newPythonPackage(
"trove-classifiers", 88298,
"canonical source for classifiers on PyPI",
"https://pypi.org/p/trove-classifiers/",
version, newFromGitHub(
"pypa/trove-classifiers",
version, checksum,
), nil, &PipHelper{
// pytest circular dependency
SkipCheck: true,
}, P{PythonSetuptools},
)
}
func init() {
const (
version = "1.6.0"
checksum = "GiUgDkKjF8Xn1cmq6iMhTGXzcPIYeaJrvQpHBSAJapNVx4UyuiTXqd5eVlxSClJu"
)
artifactsM[PythonPluggy] = newPythonPackage(
artifactsM[PythonPluggy] = newPypi(
"pluggy", 7500,
"the core framework used by the pytest, tox, and devpi projects",
"https://pluggy.readthedocs.io/en/latest/",
version, newFromGitHub(
"pytest-dev/pluggy",
version, checksum,
), &PackageAttr{
Env: []string{
"SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PLUGGY=" + version,
},
}, &PipHelper{
// pytest circular dependency
SkipCheck: true,
}, P{PythonSetuptoolsSCM},
"1.6.0", "py3", "none", "any",
"2HWYBaEwM66-y1hSUcWI1MyE7dVVuNNRW24XD6iJBey4YaUdAK8WeXdtFMQGC-4J",
)
}
func init() {
const (
version = "1.16.5"
checksum = "V2eREtqZLZeV85yb4O-bfAJCUluHcQP76Qfs0QH5s7RF_Oc8xIP8jD0jl85qFyWk"
artifactsM[PythonPackaging] = newPypi(
"packaging", 60461,
"reusable core utilities for various Python Packaging interoperability specifications",
"26.1", "py3", "none", "any",
"6WZjBJeRb0eZZavxM8cLPcgD-ch-1FblsHoCFKC_9VUC5XAmd397LwliVhsnQcSN",
)
artifactsM[PythonHatchling] = newPythonPackage(
"hatchling", 16137,
"the extensible, standards compliant build backend used by Hatch",
"https://hatch.pypa.io/latest/",
version, newFromGitHub(
"pypa/hatch",
"hatch-v"+version, checksum,
), nil, &PipHelper{
// upstream has other unused packages with many dependencies
Append: []string{"backend"},
// pytest circular dependency
SkipCheck: true,
}, nil,
PythonPackaging,
PythonPathspec,
PythonTroveClassifiers,
PythonPluggy,
)
}
func init() {
const (
version = "2.20.0"
checksum = "L-2P6vn7c_CNZYliE5CJAWLxO1ziDQVVkf8bnZbHj8aSCQ43oWv11wC9KzU9MeCa"
)
artifactsM[PythonPygments] = newPythonPackage(
"pygments", 3986,
"a syntax highlighting package written in Python",
"https://pygments.org/",
version, newFromGitHub(
"pygments/pygments",
version, checksum,
), nil, &PipHelper{
// pytest circular dependency
SkipCheck: true,
}, P{PythonHatchling},
)
}
func init() {
const (
version = "2.3.0"
checksum = "mH7VBZaXcYatBPE3RQQZvSzz_Ay8IPPek60NpPHZulPq4ReAFUUsA4EPWfiyMknZ"
)
artifactsM[PythonIniConfig] = newPythonPackage(
artifactsM[PythonIniConfig] = newPypi(
"iniconfig", 114778,
"a small and simple INI-file parser module",
"https://github.com/pytest-dev/iniconfig",
version, newFromGitHub(
"pytest-dev/iniconfig",
"v"+version, checksum,
), &PackageAttr{
Env: []string{
"SETUPTOOLS_SCM_PRETEND_VERSION_FOR_INICONFIG=" + version,
},
}, &PipHelper{
// pytest circular dependency
SkipCheck: true,
}, P{PythonSetuptoolsSCM},
"2.3.0", "py3", "none", "any",
"SDgs4S5bXi77aVOeKTPv2TUrS3M9rduiK4DpU0hCmDsSBWqnZcWInq9lsx6INxut",
)
}
func init() {
const (
version = "9.0.3"
checksum = "qfLL_znWhbJCDbNJvrx9H3-orJ86z4ifhaW0bIn21jl2sDP-FVoX_1yieOypArQe"
)
artifactsM[PythonPyTest] = newPythonPackage(
artifactsM[PythonPyTest] = newPypi(
"pytest", 3765,
"the pytest framework",
"https://pytest.org",
version, newFromGitHub(
"pytest-dev/pytest",
version, checksum,
), &PackageAttr{
Env: []string{
"SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST=" + version,
},
}, &PipHelper{
// many dependencies
SkipCheck: true,
}, P{PythonSetuptoolsSCM},
"9.0.3", "py3", "none", "any",
"57WLrIVOfyoRDjt5qD6LGOaDcDCtzQnKDSTUb7GzHyJDtry_nGHHs4-0tW0tiIJr",
PythonIniConfig,
PythonPackaging,
PythonPluggy,
PythonPygments,
)
}
func init() {
const (
version = "3.0.3"
checksum = "txRGYdWE3his1lHHRI-lZADw0-ILvUg2l5OGdFHtFXIb_QowGxwdxHCUSJIgmjQs"
)
artifactsM[PythonMarkupSafe] = newPythonPackage(
artifactsM[PythonMarkupSafe] = newPypi(
"markupsafe", 3918,
"implements a text object that escapes characters so it is safe to use in HTML and XML",
"https://markupsafe.palletsprojects.com/",
version, newFromGitHub(
"pallets/markupsafe",
version, checksum,
), nil, &PipHelper{
// ModuleNotFoundError: No module named 'markupsafe'
Install: true,
}, P{PythonSetuptools},
"3.0.3", "cp314", "cp314", "musllinux_1_2_"+linuxArch(),
perArch[string]{
"amd64": "E2mo9ig_FKgTpGon_8qqviSEULwhnmxTIqd9vfyNxNpK4yofVYM7eLW_VE-LKbtO",
"arm64": "iG_hqsncOs8fA7bCaAg0x9XenXWlo9sqblyPcSG7yA9sfGLvM9KZznCpwWfOCwFC",
"riscv64": "7DI7U0M3jvr7U4uZml25GLw3m3EvMubCtNukZmss1gkVJ_DVkhV5DgX3Wt_sztbv",
}.unwrap(),
)
}
func init() {
const (
version = "1.3.12"
checksum = "OZbBsQe2MzRuAo5Mr4qRwWHGqU1EEZeBuSprDDIceAtMLIUJtO7SbERlxHIxNhLk"
)
artifactsM[PythonMako] = newPythonPackage(
artifactsM[PythonMako] = newPypi(
"mako", 3915,
"a template library written in Python",
"https://www.makotemplates.org/",
version, newFromGitHub(
"sqlalchemy/mako",
"rel_"+strings.Join(strings.SplitN(version, ".", 3), "_"),
checksum,
), nil, nil, P{PythonSetuptools},
"1.3.11", "py3", "none", "any",
"WJ_hxYI-nNiuDiM6QhfAG84uO5U-M2aneB0JS9AQ2J2Oi6YXAbBxIdOeOEng6CoS",
PythonMarkupSafe,
)
}
func init() {
const (
version = "6.0.3"
checksum = "7wDv0RW9chBdu9l5Q4Hun5F2HHdo105ZSIixwdFPKbEYbftW9YxmsegfL-zafnbJ"
)
artifactsM[PythonPyYAML] = newPythonPackage(
artifactsM[PythonPyYAML] = newPypi(
"pyyaml", 4123,
"a YAML parser and emitter for Python",
"https://pyyaml.org/",
version, newFromGitHub(
"yaml/pyyaml",
version, checksum,
), nil, &PipHelper{
// ModuleNotFoundError: No module named 'yaml'
Install: true,
}, P{PythonSetuptools},
)
}
func init() {
const (
version = "3.00"
checksum = "4qfCMFKp0fLsRsloOAF780tXX_Ce_68RwinCmjNGObAX32WpF_iBafIKW1S1bYlA"
)
artifactsM[PythonPycparser] = newPythonPackage(
"pycparser", 8175,
"complete C99 parser in pure Python",
"https://github.com/eliben/pycparser",
version, newFromGitHub(
"eliben/pycparser",
"release_v"+version, checksum,
), &PackageAttr{
// test case hard codes gcc
ScriptEarly: `
ln -s clang /system/bin/gcc
`,
}, nil, P{PythonSetuptools},
"6.0.3", "cp314", "cp314", "musllinux_1_2_"+linuxArch(),
perArch[string]{
"amd64": "4_jhCFpUNtyrFp2HOMqUisR005u90MHId53eS7rkUbcGXkoaJ7JRsY21dREHEfGN",
"arm64": "sQ818ZYSmC7Vj9prIPx3sEYqSDhZlWvLbgHV9w4GjxsfQ63ZSzappctKM7Lb0Whw",
}.unwrap(),
)
}

View File

@@ -4,8 +4,8 @@ import "hakurei.app/internal/pkg"
func (t Toolchain) newQEMU() (pkg.Artifact, string) {
const (
version = "11.0.0"
checksum = "C64gdi_Tkdg2fTwD9ERxtWGcf8vNn_6UvczW0c-x0KW1NZtd3NbEOIrlDhYGn15n"
version = "10.2.2"
checksum = "uNzRxlrVoLWe-EmZmBp75SezymgE512iE5XN90Bl7wi6CjE_oQGQB-9ocs7E16QG"
)
return t.NewPackage("qemu", version, newTar(
"https://download.qemu.org/qemu-"+version+".tar.bz2",
@@ -73,8 +73,6 @@ EOF
},
Bash,
Python,
PythonSetuptools,
PythonWheel,
Ninja,
PkgConfig,
Diffutils,

View File

@@ -3,6 +3,7 @@ package rosa
import (
"errors"
"log"
"path"
"runtime"
"slices"
@@ -14,12 +15,6 @@ import (
"hakurei.app/internal/pkg"
)
// Extension is the variant identification string of custom artifact
// implementations registered by package rosa.
const Extension = "rosa"
func init() { pkg.SetExtension(Extension) }
const (
// kindEtc is the kind of [pkg.Artifact] of cureEtc.
kindEtc = iota + pkg.KindCustomOffset
@@ -33,7 +28,7 @@ const (
func mustDecode(s string) pkg.Checksum {
var fallback = pkg.Checksum{}
if s == "" {
println(
log.Println(
"falling back to",
pkg.Encode(fallback),
"for unpopulated checksum",
@@ -107,6 +102,21 @@ func earlyLDFLAGS(static bool) string {
return s
}
// earlyCFLAGS is reference CFLAGS for the stage0 toolchain.
const earlyCFLAGS = "-Qunused-arguments " +
"-isystem/system/include"
// earlyCXXFLAGS returns reference CXXFLAGS for the stage0 toolchain
// corresponding to [runtime.GOARCH].
func earlyCXXFLAGS() string {
return "--start-no-unused-arguments " +
"-stdlib=libc++ " +
"--end-no-unused-arguments " +
"-isystem/system/include/c++/v1 " +
"-isystem/system/include/" + triplet() + "/c++/v1 " +
"-isystem/system/include "
}
// Toolchain denotes the infrastructure to compile a [pkg.Artifact] on.
type Toolchain uint32
@@ -176,6 +186,24 @@ func (t Toolchain) isStd() bool {
}
}
// stage0Concat concatenates s and values. If the current toolchain is
// toolchainStage0, stage0Concat returns s as is.
func stage0Concat[S ~[]E, E any](t Toolchain, s S, values ...E) S {
if t.isStage0() {
return s
}
return slices.Concat(s, values)
}
// stage0ExclConcat concatenates s and values. If the current toolchain is not
// toolchainStage0, stage0ExclConcat returns s as is.
func stage0ExclConcat[S ~[]E, E any](t Toolchain, s S, values ...E) S {
if t.isStage0() {
return slices.Concat(s, values)
}
return s
}
// lastIndexFunc is like [strings.LastIndexFunc] but for [slices].
func lastIndexFunc[S ~[]E, E any](s S, f func(E) bool) (i int) {
if i = slices.IndexFunc(s, f); i < 0 {
@@ -265,7 +293,6 @@ func (t Toolchain) New(
case toolchainGentoo, toolchainStage0:
name += "-boot"
support = append(support, extra...)
support = append(support, cureEtc{})
if t == toolchainStage0 {
support = append(support, NewStage0())
@@ -286,6 +313,7 @@ mkdir -vp /work/system/bin
),
)))
}
support = slices.Concat(support, extra)
env = fixupEnviron(env, []string{
EnvTriplet + "=" + triplet(),
lcMessages,
@@ -305,7 +333,7 @@ mkdir -vp /work/system/bin
toybox = toyboxEarly
}
base := LLVM
base := Clang
if flag&TNoToolchain != 0 {
base = Musl
}
@@ -320,6 +348,11 @@ mkdir -vp /work/system/bin
env = fixupEnviron(env, []string{
EnvTriplet + "=" + triplet(),
lcMessages,
"AR=ar",
"RANLIB=ranlib",
"LIBCC=/system/lib/clang/" + llvmVersionMajor + "/lib/" + triplet() +
"/libclang_rt.builtins.a",
}, "/system/bin", "/bin")
default:
@@ -377,8 +410,8 @@ cat /usr/src/` + name + `-patches/* | \
`
aname += "-patched"
}
return t.New(aname, 0, t.AppendPresets(nil,
Patch,
return t.New(aname, 0, stage0Concat(t, []pkg.Artifact{},
t.Load(Patch),
), nil, nil, script, paths...)
}
@@ -425,6 +458,9 @@ type PackageAttr struct {
// Passed to [Toolchain.NewPatchedSource].
Patches []KV
// Dependencies not provided by stage0.
NonStage0 []pkg.Artifact
// Passed through to [Toolchain.New], before source.
Paths []pkg.ExecPath
// Passed through to [Toolchain.New].
@@ -492,8 +528,14 @@ func (t Toolchain) NewPackage(
panic("source must be non-nil")
}
wantsChmod, wantsWrite := helper.wantsChmod(), helper.wantsWrite()
extraRes := make([]pkg.Artifact, 0, 1<<3+len(extra))
{
dc := len(attr.NonStage0)
if !t.isStage0() {
dc += 1<<3 + len(extra)
}
extraRes := make([]pkg.Artifact, 0, dc)
extraRes = append(extraRes, attr.NonStage0...)
if !t.isStage0() {
pv := paGet()
for _, p := range helper.extra(attr.Flag) {
extraRes = t.appendPreset(extraRes, pv, p)

View File

@@ -28,7 +28,6 @@ var (
)
func TestMain(m *testing.M) {
rosa.DropCaches(rosa.OptLLVMNoLTO)
container.TryArgv0(nil)
code := m.Run()
@@ -61,7 +60,7 @@ func getCache(t *testing.T) *pkg.Cache {
msg := message.New(log.New(os.Stderr, "rosa: ", 0))
msg.SwapVerbose(true)
if buildTestCache, err = pkg.Open(ctx, msg, pkg.CSuppressInit, 0, 0, a); err != nil {
if buildTestCache, err = pkg.Open(ctx, msg, 0, 0, 0, a); err != nil {
t.Fatal(err)
}
}
@@ -94,14 +93,11 @@ func TestCureAll(t *testing.T) {
}
func BenchmarkStage3(b *testing.B) {
flags := rosa.Flags()
b.Cleanup(func() { rosa.DropCaches(flags) })
for b.Loop() {
rosa.Std.Load(rosa.LLVM)
rosa.Std.Load(rosa.Clang)
b.StopTimer()
rosa.DropCaches(0)
rosa.DropCaches()
b.StartTimer()
}
}

View File

@@ -4,8 +4,8 @@ import "hakurei.app/internal/pkg"
func (t Toolchain) newRsync() (pkg.Artifact, string) {
const (
version = "3.4.2"
checksum = "t7PxS4WHXzefLMKKc_3hJgxUmlGG6KgHMZ8i4DZvCQAUAizxbclNKwfLyOHyq5BX"
version = "3.4.1"
checksum = "VBlTsBWd9z3r2-ex7GkWeWxkUc5OrlgDzikAC0pK7ufTjAJ0MbmC_N04oSVTGPiv"
)
return t.NewPackage("rsync", version, newTar(
"https://download.samba.org/pub/rsync/src/"+

Some files were not shown because too many files have changed in this diff Show More