Compare commits
3 Commits
master
...
75fd820946
| Author | SHA1 | Date | |
|---|---|---|---|
| 75fd820946 | |||
| 6cedf857ad | |||
| 71852e84c6 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,7 +7,6 @@
|
|||||||
|
|
||||||
# go generate
|
# go generate
|
||||||
/cmd/hakurei/LICENSE
|
/cmd/hakurei/LICENSE
|
||||||
/cmd/mbf/internal/pkgserver/ui/static
|
|
||||||
/internal/pkg/testdata/testtool
|
/internal/pkg/testdata/testtool
|
||||||
/internal/rosa/hakurei_current.tar.gz
|
/internal/rosa/hakurei_current.tar.gz
|
||||||
|
|
||||||
|
|||||||
8
all.sh
8
all.sh
@@ -2,9 +2,5 @@
|
|||||||
|
|
||||||
TOOLCHAIN_VERSION="$(go version)"
|
TOOLCHAIN_VERSION="$(go version)"
|
||||||
cd "$(dirname -- "$0")/"
|
cd "$(dirname -- "$0")/"
|
||||||
echo "Building cmd/dist using ${TOOLCHAIN_VERSION}."
|
echo "# Building cmd/dist using ${TOOLCHAIN_VERSION}."
|
||||||
FLAGS=''
|
go run -v --tags=dist ./cmd/dist
|
||||||
if test -n "$VERBOSE"; then
|
|
||||||
FLAGS="$FLAGS -v"
|
|
||||||
fi
|
|
||||||
go run $FLAGS --tags=dist ./cmd/dist
|
|
||||||
|
|||||||
@@ -4,23 +4,15 @@ import "strings"
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
// SpecialOverlayEscape is the escape string for overlay mount options.
|
// SpecialOverlayEscape is the escape string for overlay mount options.
|
||||||
//
|
|
||||||
// Deprecated: This is no longer used and will be removed in 0.5.
|
|
||||||
SpecialOverlayEscape = `\`
|
SpecialOverlayEscape = `\`
|
||||||
// SpecialOverlayOption is the separator string between overlay mount options.
|
// SpecialOverlayOption is the separator string between overlay mount options.
|
||||||
//
|
|
||||||
// Deprecated: This is no longer used and will be removed in 0.5.
|
|
||||||
SpecialOverlayOption = ","
|
SpecialOverlayOption = ","
|
||||||
// SpecialOverlayPath is the separator string between overlay paths.
|
// SpecialOverlayPath is the separator string between overlay paths.
|
||||||
//
|
|
||||||
// Deprecated: This is no longer used and will be removed in 0.5.
|
|
||||||
SpecialOverlayPath = ":"
|
SpecialOverlayPath = ":"
|
||||||
)
|
)
|
||||||
|
|
||||||
// EscapeOverlayDataSegment escapes a string for formatting into the data
|
// EscapeOverlayDataSegment escapes a string for formatting into the data
|
||||||
// argument of an overlay mount system call.
|
// argument of an overlay mount system call.
|
||||||
//
|
|
||||||
// Deprecated: This is no longer used and will be removed in 0.5.
|
|
||||||
func EscapeOverlayDataSegment(s string) string {
|
func EscapeOverlayDataSegment(s string) string {
|
||||||
if s == "" {
|
if s == "" {
|
||||||
return ""
|
return ""
|
||||||
|
|||||||
27
cmd/dist/main.go
vendored
27
cmd/dist/main.go
vendored
@@ -42,18 +42,14 @@ func mustRun(ctx context.Context, name string, arg ...string) {
|
|||||||
var comp []byte
|
var comp []byte
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
fmt.Println()
|
||||||
log.SetFlags(0)
|
log.SetFlags(0)
|
||||||
log.SetPrefix("")
|
log.SetPrefix("# ")
|
||||||
|
|
||||||
verbose := os.Getenv("VERBOSE") != ""
|
|
||||||
version := getenv("HAKUREI_VERSION", "untagged")
|
version := getenv("HAKUREI_VERSION", "untagged")
|
||||||
prefix := getenv("PREFIX", "/usr")
|
prefix := getenv("PREFIX", "/usr")
|
||||||
destdir := getenv("DESTDIR", "dist")
|
destdir := getenv("DESTDIR", "dist")
|
||||||
|
|
||||||
if verbose {
|
|
||||||
log.Println()
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := os.MkdirAll(destdir, 0755); err != nil {
|
if err := os.MkdirAll(destdir, 0755); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -80,17 +76,12 @@ func main() {
|
|||||||
ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt)
|
ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
verboseFlag := "-v"
|
log.Println("Building hakurei.")
|
||||||
if !verbose {
|
|
||||||
verboseFlag = "-buildvcs=false"
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Printf("Building hakurei for %s/%s.", runtime.GOOS, runtime.GOARCH)
|
|
||||||
mustRun(ctx, "go", "generate", "./...")
|
mustRun(ctx, "go", "generate", "./...")
|
||||||
mustRun(
|
mustRun(
|
||||||
ctx, "go", "build",
|
ctx, "go", "build",
|
||||||
"-trimpath",
|
"-trimpath",
|
||||||
verboseFlag, "-o", s,
|
"-v", "-o", s,
|
||||||
"-ldflags=-s -w "+
|
"-ldflags=-s -w "+
|
||||||
"-buildid= -linkmode external -extldflags=-static "+
|
"-buildid= -linkmode external -extldflags=-static "+
|
||||||
"-X hakurei.app/internal/info.buildVersion="+version+" "+
|
"-X hakurei.app/internal/info.buildVersion="+version+" "+
|
||||||
@@ -99,17 +90,17 @@ func main() {
|
|||||||
"-X main.hakureiPath="+prefix+"/bin/hakurei",
|
"-X main.hakureiPath="+prefix+"/bin/hakurei",
|
||||||
"./...",
|
"./...",
|
||||||
)
|
)
|
||||||
log.Println()
|
fmt.Println()
|
||||||
|
|
||||||
log.Println("##### Testing Hakurei.")
|
log.Println("Testing Hakurei.")
|
||||||
mustRun(
|
mustRun(
|
||||||
ctx, "go", "test",
|
ctx, "go", "test",
|
||||||
"-ldflags=-buildid= -linkmode external -extldflags=-static",
|
"-ldflags=-buildid= -linkmode external -extldflags=-static",
|
||||||
"./...",
|
"./...",
|
||||||
)
|
)
|
||||||
log.Println()
|
fmt.Println()
|
||||||
|
|
||||||
log.Println("##### Creating distribution.")
|
log.Println("Creating distribution.")
|
||||||
const suffix = ".tar.gz"
|
const suffix = ".tar.gz"
|
||||||
distName := "hakurei-" + version + "-" + runtime.GOARCH
|
distName := "hakurei-" + version + "-" + runtime.GOARCH
|
||||||
var f *os.File
|
var f *os.File
|
||||||
@@ -130,7 +121,7 @@ func main() {
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
h := sha512.New()
|
h := sha512.New()
|
||||||
gw, _ := gzip.NewWriterLevel(io.MultiWriter(f, h), gzip.BestCompression)
|
gw := gzip.NewWriter(io.MultiWriter(f, h))
|
||||||
tw := tar.NewWriter(gw)
|
tw := tar.NewWriter(gw)
|
||||||
|
|
||||||
mustWriteHeader := func(name string, size int64, mode os.FileMode) {
|
mustWriteHeader := func(name string, size int64, mode os.FileMode) {
|
||||||
|
|||||||
76
cmd/irdump/main.go
Normal file
76
cmd/irdump/main.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"hakurei.app/command"
|
||||||
|
"hakurei.app/internal/pkg"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
log.SetFlags(0)
|
||||||
|
log.SetPrefix("irdump: ")
|
||||||
|
|
||||||
|
var (
|
||||||
|
flagOutput string
|
||||||
|
flagReal bool
|
||||||
|
flagHeader bool
|
||||||
|
flagForce bool
|
||||||
|
flagRaw bool
|
||||||
|
)
|
||||||
|
c := command.New(os.Stderr, log.Printf, "irdump", func(args []string) (err error) {
|
||||||
|
var input *os.File
|
||||||
|
if len(args) != 1 {
|
||||||
|
return errors.New("irdump requires 1 argument")
|
||||||
|
}
|
||||||
|
if input, err = os.Open(args[0]); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer input.Close()
|
||||||
|
|
||||||
|
var output *os.File
|
||||||
|
if flagOutput == "" {
|
||||||
|
output = os.Stdout
|
||||||
|
} else {
|
||||||
|
defer output.Close()
|
||||||
|
if output, err = os.Create(flagOutput); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var out string
|
||||||
|
if out, err = pkg.Disassemble(input, flagReal, flagHeader, flagForce, flagRaw); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if _, err = output.WriteString(out); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}).Flag(
|
||||||
|
&flagOutput,
|
||||||
|
"o", command.StringFlag(""),
|
||||||
|
"Output file for asm (leave empty for stdout)",
|
||||||
|
).Flag(
|
||||||
|
&flagReal,
|
||||||
|
"r", command.BoolFlag(false),
|
||||||
|
"skip label generation; idents print real value",
|
||||||
|
).Flag(
|
||||||
|
&flagHeader,
|
||||||
|
"H", command.BoolFlag(false),
|
||||||
|
"display artifact headers",
|
||||||
|
).Flag(
|
||||||
|
&flagForce,
|
||||||
|
"f", command.BoolFlag(false),
|
||||||
|
"force display (skip validations)",
|
||||||
|
).Flag(
|
||||||
|
&flagRaw,
|
||||||
|
"R", command.BoolFlag(false),
|
||||||
|
"don't format output",
|
||||||
|
)
|
||||||
|
|
||||||
|
c.MustParse(os.Args[1:], func(err error) {
|
||||||
|
log.Fatal(err)
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -31,6 +31,9 @@ func (cache *cache) open() (err error) {
|
|||||||
return os.ErrInvalid
|
return os.ErrInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cache.base == "" {
|
||||||
|
cache.base = "cache"
|
||||||
|
}
|
||||||
var base *check.Absolute
|
var base *check.Absolute
|
||||||
if cache.base, err = filepath.Abs(cache.base); err != nil {
|
if cache.base, err = filepath.Abs(cache.base); err != nil {
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -99,9 +99,10 @@ func cancelIdent(
|
|||||||
var ident pkg.ID
|
var ident pkg.ID
|
||||||
if _, err := io.ReadFull(conn, ident[:]); err != nil {
|
if _, err := io.ReadFull(conn, ident[:]); err != nil {
|
||||||
return nil, false, errors.Join(err, conn.Close())
|
return nil, false, errors.Join(err, conn.Close())
|
||||||
|
} else if err = conn.Close(); err != nil {
|
||||||
|
return nil, false, err
|
||||||
}
|
}
|
||||||
ok := cache.Cancel(unique.Make(ident))
|
return &ident, cache.Cancel(unique.Make(ident)), nil
|
||||||
return &ident, ok, conn.Close()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// serve services connections from a [net.UnixListener].
|
// serve services connections from a [net.UnixListener].
|
||||||
@@ -193,11 +194,11 @@ func serve(
|
|||||||
}
|
}
|
||||||
|
|
||||||
case specialAbort:
|
case specialAbort:
|
||||||
log.Println("aborting all pending cures")
|
|
||||||
cm.c.Abort()
|
|
||||||
if _err := conn.Close(); _err != nil {
|
if _err := conn.Close(); _err != nil {
|
||||||
log.Println(_err)
|
log.Println(_err)
|
||||||
}
|
}
|
||||||
|
log.Println("aborting all pending cures")
|
||||||
|
cm.c.Abort()
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
@@ -305,7 +306,6 @@ func cancelRemote(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
addr *net.UnixAddr,
|
addr *net.UnixAddr,
|
||||||
a pkg.Artifact,
|
a pkg.Artifact,
|
||||||
wait bool,
|
|
||||||
) error {
|
) error {
|
||||||
done, conn, err := dial(ctx, addr)
|
done, conn, err := dial(ctx, addr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -324,19 +324,13 @@ func cancelRemote(
|
|||||||
} else if n != len(id) {
|
} else if n != len(id) {
|
||||||
return errors.Join(io.ErrShortWrite, conn.Close())
|
return errors.Join(io.ErrShortWrite, conn.Close())
|
||||||
}
|
}
|
||||||
if wait {
|
return conn.Close()
|
||||||
if _, err = conn.Read(make([]byte, 1)); err == io.EOF {
|
|
||||||
err = nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return errors.Join(err, conn.Close())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// abortRemote aborts all [pkg.Artifact] curing on a daemon.
|
// abortRemote aborts all [pkg.Artifact] curing on a daemon.
|
||||||
func abortRemote(
|
func abortRemote(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
addr *net.UnixAddr,
|
addr *net.UnixAddr,
|
||||||
wait bool,
|
|
||||||
) error {
|
) error {
|
||||||
done, conn, err := dial(ctx, addr)
|
done, conn, err := dial(ctx, addr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -345,10 +339,5 @@ func abortRemote(
|
|||||||
defer close(done)
|
defer close(done)
|
||||||
|
|
||||||
err = writeSpecialHeader(conn, specialAbort)
|
err = writeSpecialHeader(conn, specialAbort)
|
||||||
if wait && err == nil {
|
|
||||||
if _, err = conn.Read(make([]byte, 1)); err == io.EOF {
|
|
||||||
err = nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return errors.Join(err, conn.Close())
|
return errors.Join(err, conn.Close())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -106,11 +106,11 @@ func TestDaemon(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
if err = cancelRemote(ctx, &addr, pkg.NewFile("nonexistent", nil), true); err != nil {
|
if err = cancelRemote(ctx, &addr, pkg.NewFile("nonexistent", nil)); err != nil {
|
||||||
t.Fatalf("cancelRemote: error = %v", err)
|
t.Fatalf("cancelRemote: error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = abortRemote(ctx, &addr, true); err != nil {
|
if err = abortRemote(ctx, &addr); err != nil {
|
||||||
t.Fatalf("abortRemote: error = %v", err)
|
t.Fatalf("abortRemote: error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -17,12 +17,25 @@ func commandInfo(
|
|||||||
args []string,
|
args []string,
|
||||||
w io.Writer,
|
w io.Writer,
|
||||||
writeStatus bool,
|
writeStatus bool,
|
||||||
r *rosa.Report,
|
reportPath string,
|
||||||
) (err error) {
|
) (err error) {
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
return errors.New("info requires at least 1 argument")
|
return errors.New("info requires at least 1 argument")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var r *rosa.Report
|
||||||
|
if reportPath != "" {
|
||||||
|
if r, err = rosa.OpenReport(reportPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if closeErr := r.Close(); err == nil {
|
||||||
|
err = closeErr
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
defer r.HandleAccess(&err)()
|
||||||
|
}
|
||||||
|
|
||||||
// recovered by HandleAccess
|
// recovered by HandleAccess
|
||||||
mustPrintln := func(a ...any) {
|
mustPrintln := func(a ...any) {
|
||||||
if _, _err := fmt.Fprintln(w, a...); _err != nil {
|
if _, _err := fmt.Fprintln(w, a...); _err != nil {
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ status : not in report
|
|||||||
var (
|
var (
|
||||||
cm *cache
|
cm *cache
|
||||||
buf strings.Builder
|
buf strings.Builder
|
||||||
r *rosa.Report
|
rp string
|
||||||
)
|
)
|
||||||
|
|
||||||
if tc.status != nil || tc.report != "" {
|
if tc.status != nil || tc.report != "" {
|
||||||
@@ -108,25 +108,14 @@ status : not in report
|
|||||||
}
|
}
|
||||||
|
|
||||||
if tc.report != "" {
|
if tc.report != "" {
|
||||||
pathname := filepath.Join(t.TempDir(), "report")
|
rp = filepath.Join(t.TempDir(), "report")
|
||||||
err := os.WriteFile(
|
if err := os.WriteFile(
|
||||||
pathname,
|
rp,
|
||||||
unsafe.Slice(unsafe.StringData(tc.report), len(tc.report)),
|
unsafe.Slice(unsafe.StringData(tc.report), len(tc.report)),
|
||||||
0400,
|
0400,
|
||||||
)
|
); err != nil {
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
r, err = rosa.OpenReport(pathname)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
if err = r.Close(); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if tc.status != nil {
|
if tc.status != nil {
|
||||||
@@ -168,7 +157,7 @@ status : not in report
|
|||||||
tc.args,
|
tc.args,
|
||||||
&buf,
|
&buf,
|
||||||
cm != nil,
|
cm != nil,
|
||||||
r,
|
rp,
|
||||||
); !reflect.DeepEqual(err, wantErr) {
|
); !reflect.DeepEqual(err, wantErr) {
|
||||||
t.Fatalf("commandInfo: error = %v, want %v", err, wantErr)
|
t.Fatalf("commandInfo: error = %v, want %v", err, wantErr)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,202 +0,0 @@
|
|||||||
// Package pkgserver implements the package metadata service backend.
|
|
||||||
package pkgserver
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"log"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"path"
|
|
||||||
"strconv"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"hakurei.app/internal/info"
|
|
||||||
"hakurei.app/internal/rosa"
|
|
||||||
)
|
|
||||||
|
|
||||||
// for lazy initialisation of serveInfo
|
|
||||||
var (
|
|
||||||
infoPayload struct {
|
|
||||||
// Current package count.
|
|
||||||
Count int `json:"count"`
|
|
||||||
// Hakurei version, set at link time.
|
|
||||||
HakureiVersion string `json:"hakurei_version"`
|
|
||||||
}
|
|
||||||
infoPayloadOnce sync.Once
|
|
||||||
)
|
|
||||||
|
|
||||||
// handleInfo writes constant system information.
|
|
||||||
func handleInfo(w http.ResponseWriter, _ *http.Request) {
|
|
||||||
infoPayloadOnce.Do(func() {
|
|
||||||
infoPayload.Count = int(rosa.PresetUnexportedStart)
|
|
||||||
infoPayload.HakureiVersion = info.Version()
|
|
||||||
})
|
|
||||||
// TODO(mae): cache entire response if no additional fields are planned
|
|
||||||
writeAPIPayload(w, infoPayload)
|
|
||||||
}
|
|
||||||
|
|
||||||
// newStatusHandler returns a [http.HandlerFunc] that offers status files for
|
|
||||||
// viewing or download, if available.
|
|
||||||
func (index *packageIndex) newStatusHandler(disposition bool) http.HandlerFunc {
|
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
|
||||||
m, ok := index.names[path.Base(r.URL.Path)]
|
|
||||||
if !ok || !m.HasReport {
|
|
||||||
http.NotFound(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
contentType := "text/plain; charset=utf-8"
|
|
||||||
if disposition {
|
|
||||||
contentType = "application/octet-stream"
|
|
||||||
|
|
||||||
// quoting like this is unsound, but okay, because metadata is hardcoded
|
|
||||||
contentDisposition := `attachment; filename="`
|
|
||||||
contentDisposition += m.Name + "-"
|
|
||||||
if m.Version != "" {
|
|
||||||
contentDisposition += m.Version + "-"
|
|
||||||
}
|
|
||||||
contentDisposition += m.ids + `.log"`
|
|
||||||
w.Header().Set("Content-Disposition", contentDisposition)
|
|
||||||
}
|
|
||||||
w.Header().Set("Content-Type", contentType)
|
|
||||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
|
||||||
if err := func() (err error) {
|
|
||||||
defer index.handleAccess(&err)()
|
|
||||||
_, err = w.Write(m.status)
|
|
||||||
return
|
|
||||||
}(); err != nil {
|
|
||||||
log.Println(err)
|
|
||||||
http.Error(
|
|
||||||
w, "cannot deliver status, contact maintainers",
|
|
||||||
http.StatusInternalServerError,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// handleGet writes a slice of metadata with specified order.
|
|
||||||
func (index *packageIndex) handleGet(w http.ResponseWriter, r *http.Request) {
|
|
||||||
q := r.URL.Query()
|
|
||||||
limit, err := strconv.Atoi(q.Get("limit"))
|
|
||||||
if err != nil || limit > 100 || limit < 1 {
|
|
||||||
http.Error(
|
|
||||||
w, "limit must be an integer between 1 and 100",
|
|
||||||
http.StatusBadRequest,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
i, err := strconv.Atoi(q.Get("index"))
|
|
||||||
if err != nil || i >= len(index.sorts[0]) || i < 0 {
|
|
||||||
http.Error(
|
|
||||||
w, "index must be an integer between 0 and "+
|
|
||||||
strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
|
||||||
http.StatusBadRequest,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
sort, err := strconv.Atoi(q.Get("sort"))
|
|
||||||
if err != nil || sort >= len(index.sorts) || sort < 0 {
|
|
||||||
http.Error(
|
|
||||||
w, "sort must be an integer between 0 and "+
|
|
||||||
strconv.Itoa(sortOrderEnd),
|
|
||||||
http.StatusBadRequest,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
values := index.sorts[sort][i:min(i+limit, len(index.sorts[sort]))]
|
|
||||||
writeAPIPayload(w, &struct {
|
|
||||||
Values []*metadata `json:"values"`
|
|
||||||
}{values})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (index *packageIndex) handleSearch(w http.ResponseWriter, r *http.Request) {
|
|
||||||
q := r.URL.Query()
|
|
||||||
limit, err := strconv.Atoi(q.Get("limit"))
|
|
||||||
if err != nil || limit > 100 || limit < 1 {
|
|
||||||
http.Error(
|
|
||||||
w, "limit must be an integer between 1 and 100",
|
|
||||||
http.StatusBadRequest,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
i, err := strconv.Atoi(q.Get("index"))
|
|
||||||
if err != nil || i >= len(index.sorts[0]) || i < 0 {
|
|
||||||
http.Error(
|
|
||||||
w, "index must be an integer between 0 and "+
|
|
||||||
strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
|
||||||
http.StatusBadRequest,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
search, err := url.QueryUnescape(q.Get("search"))
|
|
||||||
if len(search) > 100 || err != nil {
|
|
||||||
http.Error(
|
|
||||||
w, "search must be a string between 0 and 100 characters long",
|
|
||||||
http.StatusBadRequest,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
desc := q.Get("desc") == "true"
|
|
||||||
n, res, err := index.performSearchQuery(limit, i, search, desc)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
}
|
|
||||||
writeAPIPayload(w, &struct {
|
|
||||||
Count int `json:"count"`
|
|
||||||
Values []searchResult `json:"values"`
|
|
||||||
}{n, res})
|
|
||||||
}
|
|
||||||
|
|
||||||
// apiVersion is the name of the current API revision, as part of the pattern.
|
|
||||||
const apiVersion = "v1"
|
|
||||||
|
|
||||||
// registerAPI registers API handler functions.
|
|
||||||
func (index *packageIndex) registerAPI(mux *http.ServeMux) {
|
|
||||||
mux.HandleFunc("GET /api/"+apiVersion+"/info", handleInfo)
|
|
||||||
mux.HandleFunc("GET /api/"+apiVersion+"/get", index.handleGet)
|
|
||||||
mux.HandleFunc("GET /api/"+apiVersion+"/search", index.handleSearch)
|
|
||||||
mux.HandleFunc("GET /api/"+apiVersion+"/status/", index.newStatusHandler(false))
|
|
||||||
mux.HandleFunc("GET /status/", index.newStatusHandler(true))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register arranges for mux to service API requests.
|
|
||||||
func Register(ctx context.Context, mux *http.ServeMux, report *rosa.Report) error {
|
|
||||||
var index packageIndex
|
|
||||||
index.search = make(searchCache)
|
|
||||||
if err := index.populate(report); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
ticker := time.NewTicker(1 * time.Minute)
|
|
||||||
go func() {
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-ctx.Done():
|
|
||||||
ticker.Stop()
|
|
||||||
return
|
|
||||||
case <-ticker.C:
|
|
||||||
index.search.clean()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
index.registerAPI(mux)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// writeAPIPayload sets headers common to API responses and encodes payload as
|
|
||||||
// JSON for the response body.
|
|
||||||
func writeAPIPayload(w http.ResponseWriter, payload any) {
|
|
||||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
|
||||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
|
||||||
w.Header().Set("Pragma", "no-cache")
|
|
||||||
w.Header().Set("Expires", "0")
|
|
||||||
|
|
||||||
if err := json.NewEncoder(w).Encode(payload); err != nil {
|
|
||||||
log.Println(err)
|
|
||||||
http.Error(
|
|
||||||
w, "cannot encode payload, contact maintainers",
|
|
||||||
http.StatusInternalServerError,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,181 +0,0 @@
|
|||||||
package pkgserver
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"net/http/httptest"
|
|
||||||
"slices"
|
|
||||||
"strconv"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"hakurei.app/internal/info"
|
|
||||||
"hakurei.app/internal/rosa"
|
|
||||||
)
|
|
||||||
|
|
||||||
// prefix is prepended to every API path.
|
|
||||||
const prefix = "/api/" + apiVersion + "/"
|
|
||||||
|
|
||||||
func TestAPIInfo(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
w := httptest.NewRecorder()
|
|
||||||
handleInfo(w, httptest.NewRequestWithContext(
|
|
||||||
t.Context(),
|
|
||||||
http.MethodGet,
|
|
||||||
prefix+"info",
|
|
||||||
nil,
|
|
||||||
))
|
|
||||||
|
|
||||||
resp := w.Result()
|
|
||||||
checkStatus(t, resp, http.StatusOK)
|
|
||||||
checkAPIHeader(t, w.Header())
|
|
||||||
|
|
||||||
checkPayload(t, resp, struct {
|
|
||||||
Count int `json:"count"`
|
|
||||||
HakureiVersion string `json:"hakurei_version"`
|
|
||||||
}{int(rosa.PresetUnexportedStart), info.Version()})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAPIGet(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
const target = prefix + "get"
|
|
||||||
|
|
||||||
index := newIndex(t)
|
|
||||||
newRequest := func(suffix string) *httptest.ResponseRecorder {
|
|
||||||
w := httptest.NewRecorder()
|
|
||||||
index.handleGet(w, httptest.NewRequestWithContext(
|
|
||||||
t.Context(),
|
|
||||||
http.MethodGet,
|
|
||||||
target+suffix,
|
|
||||||
nil,
|
|
||||||
))
|
|
||||||
return w
|
|
||||||
}
|
|
||||||
|
|
||||||
checkValidate := func(t *testing.T, suffix string, vmin, vmax int, wantErr string) {
|
|
||||||
t.Run("invalid", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
w := newRequest("?" + suffix + "=invalid")
|
|
||||||
resp := w.Result()
|
|
||||||
checkError(t, resp, wantErr, http.StatusBadRequest)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("min", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
w := newRequest("?" + suffix + "=" + strconv.Itoa(vmin-1))
|
|
||||||
resp := w.Result()
|
|
||||||
checkError(t, resp, wantErr, http.StatusBadRequest)
|
|
||||||
|
|
||||||
w = newRequest("?" + suffix + "=" + strconv.Itoa(vmin))
|
|
||||||
resp = w.Result()
|
|
||||||
checkStatus(t, resp, http.StatusOK)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("max", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
w := newRequest("?" + suffix + "=" + strconv.Itoa(vmax+1))
|
|
||||||
resp := w.Result()
|
|
||||||
checkError(t, resp, wantErr, http.StatusBadRequest)
|
|
||||||
|
|
||||||
w = newRequest("?" + suffix + "=" + strconv.Itoa(vmax))
|
|
||||||
resp = w.Result()
|
|
||||||
checkStatus(t, resp, http.StatusOK)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("limit", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
checkValidate(
|
|
||||||
t, "index=0&sort=0&limit", 1, 100,
|
|
||||||
"limit must be an integer between 1 and 100",
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("index", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
checkValidate(
|
|
||||||
t, "limit=1&sort=0&index", 0, int(rosa.PresetUnexportedStart-1),
|
|
||||||
"index must be an integer between 0 and "+strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("sort", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
checkValidate(
|
|
||||||
t, "index=0&limit=1&sort", 0, int(sortOrderEnd),
|
|
||||||
"sort must be an integer between 0 and "+strconv.Itoa(int(sortOrderEnd)),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
checkWithSuffix := func(name, suffix string, want []*metadata) {
|
|
||||||
t.Run(name, func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
w := newRequest(suffix)
|
|
||||||
resp := w.Result()
|
|
||||||
checkStatus(t, resp, http.StatusOK)
|
|
||||||
checkAPIHeader(t, w.Header())
|
|
||||||
checkPayloadFunc(t, resp, func(got *struct {
|
|
||||||
Values []*metadata `json:"values"`
|
|
||||||
}) bool {
|
|
||||||
return slices.EqualFunc(got.Values, want, func(a, b *metadata) bool {
|
|
||||||
return (a.Version == b.Version ||
|
|
||||||
a.Version == rosa.Unversioned ||
|
|
||||||
b.Version == rosa.Unversioned) &&
|
|
||||||
a.HasReport == b.HasReport &&
|
|
||||||
a.Name == b.Name &&
|
|
||||||
a.Description == b.Description &&
|
|
||||||
a.Website == b.Website
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
checkWithSuffix("declarationAscending", "?limit=2&index=1&sort=0", []*metadata{
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(1),
|
|
||||||
Version: rosa.Std.Version(1),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(2),
|
|
||||||
Version: rosa.Std.Version(2),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
checkWithSuffix("declarationAscending offset", "?limit=3&index=5&sort=0", []*metadata{
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(5),
|
|
||||||
Version: rosa.Std.Version(5),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(6),
|
|
||||||
Version: rosa.Std.Version(6),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(7),
|
|
||||||
Version: rosa.Std.Version(7),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
checkWithSuffix("declarationDescending", "?limit=3&index=0&sort=1", []*metadata{
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 1),
|
|
||||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 1),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 2),
|
|
||||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 2),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 3),
|
|
||||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 3),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
checkWithSuffix("declarationDescending offset", "?limit=1&index=37&sort=1", []*metadata{
|
|
||||||
{
|
|
||||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 38),
|
|
||||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 38),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
package pkgserver
|
|
||||||
|
|
||||||
import (
|
|
||||||
"cmp"
|
|
||||||
"errors"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"hakurei.app/internal/pkg"
|
|
||||||
"hakurei.app/internal/rosa"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
declarationAscending = iota
|
|
||||||
declarationDescending
|
|
||||||
nameAscending
|
|
||||||
nameDescending
|
|
||||||
sizeAscending
|
|
||||||
sizeDescending
|
|
||||||
|
|
||||||
sortOrderEnd = iota - 1
|
|
||||||
)
|
|
||||||
|
|
||||||
// packageIndex refers to metadata by name and various sort orders.
|
|
||||||
type packageIndex struct {
|
|
||||||
sorts [sortOrderEnd + 1][rosa.PresetUnexportedStart]*metadata
|
|
||||||
names map[string]*metadata
|
|
||||||
search searchCache
|
|
||||||
// Taken from [rosa.Report] if available.
|
|
||||||
handleAccess func(*error) func()
|
|
||||||
}
|
|
||||||
|
|
||||||
// metadata holds [rosa.Metadata] extended with additional information.
|
|
||||||
type metadata struct {
|
|
||||||
p rosa.PArtifact
|
|
||||||
*rosa.Metadata
|
|
||||||
|
|
||||||
// Populated via [rosa.Toolchain.Version], [rosa.Unversioned] is equivalent
|
|
||||||
// to the zero value. Otherwise, the zero value is invalid.
|
|
||||||
Version string `json:"version,omitempty"`
|
|
||||||
// Output data size, available if present in report.
|
|
||||||
Size int64 `json:"size,omitempty"`
|
|
||||||
// Whether the underlying [pkg.Artifact] is present in the report.
|
|
||||||
HasReport bool `json:"report"`
|
|
||||||
|
|
||||||
// Ident string encoded ahead of time.
|
|
||||||
ids string
|
|
||||||
// Backed by [rosa.Report], access must be prepared by HandleAccess.
|
|
||||||
status []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
// populate deterministically populates packageIndex, optionally with a report.
|
|
||||||
func (index *packageIndex) populate(report *rosa.Report) (err error) {
|
|
||||||
if report != nil {
|
|
||||||
defer report.HandleAccess(&err)()
|
|
||||||
index.handleAccess = report.HandleAccess
|
|
||||||
}
|
|
||||||
|
|
||||||
var work [rosa.PresetUnexportedStart]*metadata
|
|
||||||
index.names = make(map[string]*metadata)
|
|
||||||
ir := pkg.NewIR()
|
|
||||||
for p := range rosa.PresetUnexportedStart {
|
|
||||||
m := metadata{
|
|
||||||
p: p,
|
|
||||||
|
|
||||||
Metadata: rosa.GetMetadata(p),
|
|
||||||
Version: rosa.Std.Version(p),
|
|
||||||
}
|
|
||||||
if m.Version == "" {
|
|
||||||
return errors.New("invalid version from " + m.Name)
|
|
||||||
}
|
|
||||||
if m.Version == rosa.Unversioned {
|
|
||||||
m.Version = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
if report != nil {
|
|
||||||
id := ir.Ident(rosa.Std.Load(p))
|
|
||||||
m.ids = pkg.Encode(id.Value())
|
|
||||||
m.status, m.Size = report.ArtifactOf(id)
|
|
||||||
m.HasReport = m.Size >= 0
|
|
||||||
}
|
|
||||||
|
|
||||||
work[p] = &m
|
|
||||||
index.names[m.Name] = &m
|
|
||||||
}
|
|
||||||
|
|
||||||
index.sorts[declarationAscending] = work
|
|
||||||
index.sorts[declarationDescending] = work
|
|
||||||
slices.Reverse(index.sorts[declarationDescending][:])
|
|
||||||
|
|
||||||
index.sorts[nameAscending] = work
|
|
||||||
slices.SortFunc(index.sorts[nameAscending][:], func(a, b *metadata) int {
|
|
||||||
return strings.Compare(a.Name, b.Name)
|
|
||||||
})
|
|
||||||
index.sorts[nameDescending] = index.sorts[nameAscending]
|
|
||||||
slices.Reverse(index.sorts[nameDescending][:])
|
|
||||||
|
|
||||||
index.sorts[sizeAscending] = work
|
|
||||||
slices.SortFunc(index.sorts[sizeAscending][:], func(a, b *metadata) int {
|
|
||||||
return cmp.Compare(a.Size, b.Size)
|
|
||||||
})
|
|
||||||
index.sorts[sizeDescending] = index.sorts[sizeAscending]
|
|
||||||
slices.Reverse(index.sorts[sizeDescending][:])
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
package pkgserver
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"reflect"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
// newIndex returns the address of a newly populated packageIndex.
|
|
||||||
func newIndex(t *testing.T) *packageIndex {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
var index packageIndex
|
|
||||||
if err := index.populate(nil); err != nil {
|
|
||||||
t.Fatalf("populate: error = %v", err)
|
|
||||||
}
|
|
||||||
return &index
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkStatus checks response status code.
|
|
||||||
func checkStatus(t *testing.T, resp *http.Response, want int) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
if resp.StatusCode != want {
|
|
||||||
t.Errorf(
|
|
||||||
"StatusCode: %s, want %s",
|
|
||||||
http.StatusText(resp.StatusCode),
|
|
||||||
http.StatusText(want),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkHeader checks the value of a header entry.
|
|
||||||
func checkHeader(t *testing.T, h http.Header, key, want string) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
if got := h.Get(key); got != want {
|
|
||||||
t.Errorf("%s: %q, want %q", key, got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkAPIHeader checks common entries set for API endpoints.
|
|
||||||
func checkAPIHeader(t *testing.T, h http.Header) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
checkHeader(t, h, "Content-Type", "application/json; charset=utf-8")
|
|
||||||
checkHeader(t, h, "Cache-Control", "no-cache, no-store, must-revalidate")
|
|
||||||
checkHeader(t, h, "Pragma", "no-cache")
|
|
||||||
checkHeader(t, h, "Expires", "0")
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkPayloadFunc checks the JSON response of an API endpoint by passing it to f.
|
|
||||||
func checkPayloadFunc[T any](
|
|
||||||
t *testing.T,
|
|
||||||
resp *http.Response,
|
|
||||||
f func(got *T) bool,
|
|
||||||
) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
var got T
|
|
||||||
r := io.Reader(resp.Body)
|
|
||||||
if testing.Verbose() {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
r = io.TeeReader(r, &buf)
|
|
||||||
defer func() { t.Helper(); t.Log(buf.String()) }()
|
|
||||||
}
|
|
||||||
if err := json.NewDecoder(r).Decode(&got); err != nil {
|
|
||||||
t.Fatalf("Decode: error = %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !f(&got) {
|
|
||||||
t.Errorf("Body: %#v", got)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkPayload checks the JSON response of an API endpoint.
|
|
||||||
func checkPayload[T any](t *testing.T, resp *http.Response, want T) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
checkPayloadFunc(t, resp, func(got *T) bool {
|
|
||||||
return reflect.DeepEqual(got, &want)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkError(t *testing.T, resp *http.Response, error string, code int) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
checkStatus(t, resp, code)
|
|
||||||
if got, _ := io.ReadAll(resp.Body); string(got) != fmt.Sprintln(error) {
|
|
||||||
t.Errorf("Body: %q, want %q", string(got), error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
package pkgserver
|
|
||||||
|
|
||||||
import (
|
|
||||||
"cmp"
|
|
||||||
"maps"
|
|
||||||
"regexp"
|
|
||||||
"slices"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type searchCache map[string]searchCacheEntry
|
|
||||||
type searchResult struct {
|
|
||||||
NameIndices [][]int `json:"name_matches"`
|
|
||||||
DescIndices [][]int `json:"desc_matches,omitempty"`
|
|
||||||
Score float64 `json:"score"`
|
|
||||||
*metadata
|
|
||||||
}
|
|
||||||
type searchCacheEntry struct {
|
|
||||||
query string
|
|
||||||
results []searchResult
|
|
||||||
expiry time.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
func (index *packageIndex) performSearchQuery(limit int, i int, search string, desc bool) (int, []searchResult, error) {
|
|
||||||
query := search
|
|
||||||
if desc {
|
|
||||||
query += ";withDesc"
|
|
||||||
}
|
|
||||||
entry, ok := index.search[query]
|
|
||||||
if ok && len(entry.results) > 0 {
|
|
||||||
return len(entry.results), entry.results[min(i, len(entry.results)-1):min(i+limit, len(entry.results))], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
regex, err := regexp.Compile(search)
|
|
||||||
if err != nil {
|
|
||||||
return 0, make([]searchResult, 0), err
|
|
||||||
}
|
|
||||||
res := make([]searchResult, 0)
|
|
||||||
for p := range maps.Values(index.names) {
|
|
||||||
nameIndices := regex.FindAllIndex([]byte(p.Name), -1)
|
|
||||||
var descIndices [][]int = nil
|
|
||||||
if desc {
|
|
||||||
descIndices = regex.FindAllIndex([]byte(p.Description), -1)
|
|
||||||
}
|
|
||||||
if nameIndices == nil && descIndices == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
score := float64(indexsum(nameIndices)) / (float64(len(nameIndices)) + 1)
|
|
||||||
if desc {
|
|
||||||
score += float64(indexsum(descIndices)) / (float64(len(descIndices)) + 1) / 10.0
|
|
||||||
}
|
|
||||||
res = append(res, searchResult{
|
|
||||||
NameIndices: nameIndices,
|
|
||||||
DescIndices: descIndices,
|
|
||||||
Score: score,
|
|
||||||
metadata: p,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
slices.SortFunc(res[:], func(a, b searchResult) int { return -cmp.Compare(a.Score, b.Score) })
|
|
||||||
expiry := time.Now().Add(1 * time.Minute)
|
|
||||||
entry = searchCacheEntry{
|
|
||||||
query: search,
|
|
||||||
results: res,
|
|
||||||
expiry: expiry,
|
|
||||||
}
|
|
||||||
index.search[query] = entry
|
|
||||||
|
|
||||||
return len(res), res[i:min(i+limit, len(entry.results))], nil
|
|
||||||
}
|
|
||||||
func (s *searchCache) clean() {
|
|
||||||
maps.DeleteFunc(*s, func(_ string, v searchCacheEntry) bool {
|
|
||||||
return v.expiry.Before(time.Now())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
func indexsum(in [][]int) int {
|
|
||||||
sum := 0
|
|
||||||
for i := 0; i < len(in); i++ {
|
|
||||||
sum += in[i][1] - in[i][0]
|
|
||||||
}
|
|
||||||
return sum
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<link rel="stylesheet" href="style.css">
|
|
||||||
<title>Hakurei PkgServer</title>
|
|
||||||
<script src="index.js"></script>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>Hakurei PkgServer</h1>
|
|
||||||
<div class="top-controls" id="top-controls-regular">
|
|
||||||
<p>Showing entries <span id="entry-counter"></span>.</p>
|
|
||||||
<span id="search-bar">
|
|
||||||
<label for="search">Search: </label>
|
|
||||||
<input type="text" name="search" id="search"/>
|
|
||||||
<button onclick="doSearch()">Find</button>
|
|
||||||
<label for="include-desc">Include descriptions: </label>
|
|
||||||
<input type="checkbox" name="include-desc" id="include-desc" checked/>
|
|
||||||
</span>
|
|
||||||
<div><label for="count">Entries per page: </label><select name="count" id="count">
|
|
||||||
<option value="10">10</option>
|
|
||||||
<option value="20">20</option>
|
|
||||||
<option value="30">30</option>
|
|
||||||
<option value="50">50</option>
|
|
||||||
</select></div>
|
|
||||||
<div><label for="sort">Sort by: </label><select name="sort" id="sort">
|
|
||||||
<option value="0">Definition (ascending)</option>
|
|
||||||
<option value="1">Definition (descending)</option>
|
|
||||||
<option value="2">Name (ascending)</option>
|
|
||||||
<option value="3">Name (descending)</option>
|
|
||||||
<option value="4">Size (ascending)</option>
|
|
||||||
<option value="5">Size (descending)</option>
|
|
||||||
</select></div>
|
|
||||||
</div>
|
|
||||||
<div class="top-controls" id="search-top-controls" hidden>
|
|
||||||
<p>Showing search results <span id="search-entry-counter"></span> for query "<span id="search-query"></span>".</p>
|
|
||||||
<button onclick="exitSearch()">Back</button>
|
|
||||||
<div><label for="search-count">Entries per page: </label><select name="search-count" id="search-count">
|
|
||||||
<option value="10">10</option>
|
|
||||||
<option value="20">20</option>
|
|
||||||
<option value="30">30</option>
|
|
||||||
<option value="50">50</option>
|
|
||||||
</select></div>
|
|
||||||
<p>Sorted by best match</p>
|
|
||||||
</div>
|
|
||||||
<div class="page-controls"><a href="javascript:prevPage()">« Previous</a> <input type="text" class="page-number" value="1"/> <a href="javascript:nextPage()">Next »</a></div>
|
|
||||||
<table id="pkg-list">
|
|
||||||
<tr><td>Loading...</td></tr>
|
|
||||||
</table>
|
|
||||||
<div class="page-controls"><a href="javascript:prevPage()">« Previous</a> <input type="text" class="page-number" value="1"/> <a href="javascript:nextPage()">Next »</a></div>
|
|
||||||
<footer>
|
|
||||||
<p>©<a href="https://hakurei.app/">Hakurei</a> (<span id="hakurei-version">unknown</span>). Licensed under the MIT license.</p>
|
|
||||||
</footer>
|
|
||||||
<script>main();</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@@ -1,331 +0,0 @@
|
|||||||
interface PackageIndexEntry {
|
|
||||||
name: string
|
|
||||||
size?: number
|
|
||||||
description?: string
|
|
||||||
website?: string
|
|
||||||
version?: string
|
|
||||||
report?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
function entryToHTML(entry: PackageIndexEntry | SearchResult): HTMLTableRowElement {
|
|
||||||
let v = entry.version != null ? `<span>${escapeHtml(entry.version)}</span>` : ""
|
|
||||||
let s = entry.size != null && entry.size > 0 ? `<p>Size: ${toByteSizeString(entry.size)} (${entry.size})</p>` : ""
|
|
||||||
let n: string
|
|
||||||
let d: string
|
|
||||||
if ('name_matches' in entry) {
|
|
||||||
n = `<h2>${nameMatches(entry as SearchResult)} ${v}</h2>`
|
|
||||||
} else {
|
|
||||||
n = `<h2>${escapeHtml(entry.name)} ${v}</h2>`
|
|
||||||
}
|
|
||||||
if ('desc_matches' in entry && STATE.getIncludeDescriptions()) {
|
|
||||||
d = descMatches(entry as SearchResult)
|
|
||||||
} else {
|
|
||||||
d = (entry as PackageIndexEntry).description != null ? `<p>${escapeHtml((entry as PackageIndexEntry).description)}</p>` : ""
|
|
||||||
}
|
|
||||||
let w = entry.website != null ? `<a href="${encodeURI(entry.website)}">Website</a>` : ""
|
|
||||||
let r = entry.report ? `Log (<a href=\"${encodeURI('/api/v1/status/' + entry.name)}\">View</a> | <a href=\"${encodeURI('/status/' + entry.name)}\">Download</a>)` : ""
|
|
||||||
let row = <HTMLTableRowElement>(document.createElement('tr'))
|
|
||||||
row.innerHTML = `<td>
|
|
||||||
${n}
|
|
||||||
${d}
|
|
||||||
${s}
|
|
||||||
${w}
|
|
||||||
${r}
|
|
||||||
</td>`
|
|
||||||
return row
|
|
||||||
}
|
|
||||||
|
|
||||||
function nameMatches(sr: SearchResult): string {
|
|
||||||
return markMatches(sr.name, sr.name_matches)
|
|
||||||
}
|
|
||||||
|
|
||||||
function descMatches(sr: SearchResult): string {
|
|
||||||
return markMatches(sr.description!, sr.desc_matches)
|
|
||||||
}
|
|
||||||
|
|
||||||
function markMatches(str: string, indices: [number, number][]): string {
|
|
||||||
if (indices == null) {
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
let out: string = ""
|
|
||||||
let j = 0
|
|
||||||
for (let i = 0; i < str.length; i++) {
|
|
||||||
if (j < indices.length) {
|
|
||||||
if (i === indices[j][0]) {
|
|
||||||
out += `<mark>${escapeHtmlChar(str[i])}`
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (i === indices[j][1]) {
|
|
||||||
out += `</mark>${escapeHtmlChar(str[i])}`
|
|
||||||
j++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out += escapeHtmlChar(str[i])
|
|
||||||
}
|
|
||||||
if (indices[j] !== undefined) {
|
|
||||||
out += "</mark>"
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
function toByteSizeString(bytes: number): string {
|
|
||||||
if (bytes == null) return `unspecified`
|
|
||||||
if (bytes < 1024) return `${bytes}B`
|
|
||||||
if (bytes < Math.pow(1024, 2)) return `${(bytes / 1024).toFixed(2)}kiB`
|
|
||||||
if (bytes < Math.pow(1024, 3)) return `${(bytes / Math.pow(1024, 2)).toFixed(2)}MiB`
|
|
||||||
if (bytes < Math.pow(1024, 4)) return `${(bytes / Math.pow(1024, 3)).toFixed(2)}GiB`
|
|
||||||
if (bytes < Math.pow(1024, 5)) return `${(bytes / Math.pow(1024, 4)).toFixed(2)}TiB`
|
|
||||||
return "not only is it big, it's large"
|
|
||||||
}
|
|
||||||
|
|
||||||
const API_VERSION = 1
|
|
||||||
const ENDPOINT = `/api/v${API_VERSION}`
|
|
||||||
|
|
||||||
interface InfoPayload {
|
|
||||||
count?: number
|
|
||||||
hakurei_version?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
async function infoRequest(): Promise<InfoPayload> {
|
|
||||||
const res = await fetch(`${ENDPOINT}/info`)
|
|
||||||
const payload = await res.json()
|
|
||||||
return payload as InfoPayload
|
|
||||||
}
|
|
||||||
|
|
||||||
interface GetPayload {
|
|
||||||
values?: PackageIndexEntry[]
|
|
||||||
}
|
|
||||||
|
|
||||||
enum SortOrders {
|
|
||||||
DeclarationAscending,
|
|
||||||
DeclarationDescending,
|
|
||||||
NameAscending,
|
|
||||||
NameDescending
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getRequest(limit: number, index: number, sort: SortOrders): Promise<GetPayload> {
|
|
||||||
const res = await fetch(`${ENDPOINT}/get?limit=${limit}&index=${index}&sort=${sort.valueOf()}`)
|
|
||||||
const payload = await res.json()
|
|
||||||
return payload as GetPayload
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SearchResult extends PackageIndexEntry {
|
|
||||||
name_matches: [number, number][]
|
|
||||||
desc_matches: [number, number][]
|
|
||||||
score: number
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SearchPayload {
|
|
||||||
count?: number
|
|
||||||
values?: SearchResult[]
|
|
||||||
}
|
|
||||||
|
|
||||||
async function searchRequest(limit: number, index: number, search: string, desc: boolean): Promise<SearchPayload> {
|
|
||||||
const res = await fetch(`${ENDPOINT}/search?limit=${limit}&index=${index}&search=${encodeURIComponent(search)}&desc=${desc}`)
|
|
||||||
if (!res.ok) {
|
|
||||||
exitSearch()
|
|
||||||
alert("invalid search query!")
|
|
||||||
return Promise.reject(res.statusText)
|
|
||||||
}
|
|
||||||
const payload = await res.json()
|
|
||||||
return payload as SearchPayload
|
|
||||||
}
|
|
||||||
|
|
||||||
class State {
|
|
||||||
entriesPerPage: number = 10
|
|
||||||
entryIndex: number = 0
|
|
||||||
maxTotal: number = 0
|
|
||||||
maxEntries: number = 0
|
|
||||||
sort: SortOrders = SortOrders.DeclarationAscending
|
|
||||||
search: boolean = false
|
|
||||||
|
|
||||||
getEntriesPerPage(): number {
|
|
||||||
return this.entriesPerPage
|
|
||||||
}
|
|
||||||
|
|
||||||
setEntriesPerPage(entriesPerPage: number) {
|
|
||||||
this.entriesPerPage = entriesPerPage
|
|
||||||
this.setEntryIndex(Math.floor(this.getEntryIndex() / entriesPerPage) * entriesPerPage)
|
|
||||||
}
|
|
||||||
|
|
||||||
getEntryIndex(): number {
|
|
||||||
return this.entryIndex
|
|
||||||
}
|
|
||||||
|
|
||||||
setEntryIndex(entryIndex: number) {
|
|
||||||
this.entryIndex = entryIndex
|
|
||||||
this.updatePage()
|
|
||||||
this.updateRange()
|
|
||||||
this.updateListings()
|
|
||||||
}
|
|
||||||
|
|
||||||
getMaxTotal(): number {
|
|
||||||
return this.maxTotal
|
|
||||||
}
|
|
||||||
|
|
||||||
setMaxTotal(max: number) {
|
|
||||||
this.maxTotal = max
|
|
||||||
}
|
|
||||||
|
|
||||||
getSortOrder(): SortOrders {
|
|
||||||
return this.sort
|
|
||||||
}
|
|
||||||
|
|
||||||
setSortOrder(sortOrder: SortOrders) {
|
|
||||||
this.sort = sortOrder
|
|
||||||
this.setEntryIndex(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
updatePage() {
|
|
||||||
let page = Math.ceil(((this.getEntryIndex() + this.getEntriesPerPage()) - 1) / this.getEntriesPerPage())
|
|
||||||
for (let e of document.getElementsByClassName("page-number")) {
|
|
||||||
(e as HTMLInputElement).value = String(page)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
updateRange() {
|
|
||||||
let max = Math.min(this.getEntryIndex() + this.getEntriesPerPage(), this.getMaxTotal())
|
|
||||||
document.getElementById("entry-counter")!.textContent = `${this.getEntryIndex() + 1}-${max} of ${this.getMaxTotal()}`
|
|
||||||
if (this.search) {
|
|
||||||
document.getElementById("search-entry-counter")!.textContent = `${this.getEntryIndex() + 1}-${max} of ${this.maxTotal}/${this.maxEntries}`
|
|
||||||
document.getElementById("search-query")!.innerHTML = `<code>${escapeHtml(this.getSearchQuery())}</code>`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getSearchQuery(): string {
|
|
||||||
let queryString = document.getElementById("search")!;
|
|
||||||
return (queryString as HTMLInputElement).value
|
|
||||||
}
|
|
||||||
|
|
||||||
getIncludeDescriptions(): boolean {
|
|
||||||
let includeDesc = document.getElementById("include-desc")!;
|
|
||||||
return (includeDesc as HTMLInputElement).checked
|
|
||||||
}
|
|
||||||
|
|
||||||
updateListings() {
|
|
||||||
if (this.search) {
|
|
||||||
searchRequest(this.getEntriesPerPage(), this.getEntryIndex(), this.getSearchQuery(), this.getIncludeDescriptions())
|
|
||||||
.then(res => {
|
|
||||||
let table = document.getElementById("pkg-list")!
|
|
||||||
table.innerHTML = ''
|
|
||||||
for (let row of res.values!) {
|
|
||||||
table.appendChild(entryToHTML(row))
|
|
||||||
}
|
|
||||||
STATE.maxTotal = res.count!
|
|
||||||
STATE.updateRange()
|
|
||||||
if(res.count! < 1) {
|
|
||||||
exitSearch()
|
|
||||||
alert("no results found!")
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
getRequest(this.getEntriesPerPage(), this.getEntryIndex(), this.getSortOrder())
|
|
||||||
.then(res => {
|
|
||||||
let table = document.getElementById("pkg-list")!
|
|
||||||
table.innerHTML = ''
|
|
||||||
for (let row of res.values!) {
|
|
||||||
table.appendChild(entryToHTML(row))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let STATE: State
|
|
||||||
|
|
||||||
|
|
||||||
function lastPageIndex(): number {
|
|
||||||
return Math.floor(STATE.getMaxTotal() / STATE.getEntriesPerPage()) * STATE.getEntriesPerPage()
|
|
||||||
}
|
|
||||||
|
|
||||||
function setPage(page: number) {
|
|
||||||
STATE.setEntryIndex(Math.max(0, Math.min(STATE.getEntriesPerPage() * (page - 1), lastPageIndex())))
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function escapeHtml(str?: string): string {
|
|
||||||
let out: string = ''
|
|
||||||
if (str == undefined) return ""
|
|
||||||
for (let i = 0; i < str.length; i++) {
|
|
||||||
out += escapeHtmlChar(str[i])
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
function escapeHtmlChar(char: string): string {
|
|
||||||
if (char.length != 1) return char
|
|
||||||
switch (char[0]) {
|
|
||||||
case '&':
|
|
||||||
return "&"
|
|
||||||
case '<':
|
|
||||||
return "<"
|
|
||||||
case '>':
|
|
||||||
return ">"
|
|
||||||
case '"':
|
|
||||||
return """
|
|
||||||
case "'":
|
|
||||||
return "'"
|
|
||||||
default:
|
|
||||||
return char
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function firstPage() {
|
|
||||||
STATE.setEntryIndex(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
function prevPage() {
|
|
||||||
let index = STATE.getEntryIndex()
|
|
||||||
STATE.setEntryIndex(Math.max(0, index - STATE.getEntriesPerPage()))
|
|
||||||
}
|
|
||||||
|
|
||||||
function lastPage() {
|
|
||||||
STATE.setEntryIndex(lastPageIndex())
|
|
||||||
}
|
|
||||||
|
|
||||||
function nextPage() {
|
|
||||||
let index = STATE.getEntryIndex()
|
|
||||||
STATE.setEntryIndex(Math.min(lastPageIndex(), index + STATE.getEntriesPerPage()))
|
|
||||||
}
|
|
||||||
|
|
||||||
function doSearch() {
|
|
||||||
document.getElementById("top-controls-regular")!.toggleAttribute("hidden");
|
|
||||||
document.getElementById("search-top-controls")!.toggleAttribute("hidden");
|
|
||||||
STATE.search = true;
|
|
||||||
STATE.setEntryIndex(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
function exitSearch() {
|
|
||||||
document.getElementById("top-controls-regular")!.toggleAttribute("hidden");
|
|
||||||
document.getElementById("search-top-controls")!.toggleAttribute("hidden");
|
|
||||||
STATE.search = false;
|
|
||||||
STATE.setMaxTotal(STATE.maxEntries)
|
|
||||||
STATE.setEntryIndex(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
function main() {
|
|
||||||
STATE = new State()
|
|
||||||
infoRequest()
|
|
||||||
.then(res => {
|
|
||||||
STATE.maxEntries = res.count!
|
|
||||||
STATE.setMaxTotal(STATE.maxEntries)
|
|
||||||
document.getElementById("hakurei-version")!.textContent = res.hakurei_version!
|
|
||||||
STATE.updateRange()
|
|
||||||
STATE.updateListings()
|
|
||||||
})
|
|
||||||
for (let e of document.getElementsByClassName("page-number")) {
|
|
||||||
e.addEventListener("change", (_) => {
|
|
||||||
setPage(parseInt((e as HTMLInputElement).value))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
document.getElementById("count")?.addEventListener("change", (event) => {
|
|
||||||
STATE.setEntriesPerPage(parseInt((event.target as HTMLSelectElement).value))
|
|
||||||
})
|
|
||||||
document.getElementById("sort")?.addEventListener("change", (event) => {
|
|
||||||
STATE.setSortOrder(parseInt((event.target as HTMLSelectElement).value))
|
|
||||||
})
|
|
||||||
document.getElementById("search")?.addEventListener("keyup", (event) => {
|
|
||||||
if (event.key === 'Enter') doSearch()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
.page-number {
|
|
||||||
width: 2em;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
.page-number {
|
|
||||||
width: 2em;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (prefers-color-scheme: dark) {
|
|
||||||
html {
|
|
||||||
background-color: #2c2c2c;
|
|
||||||
color: ghostwhite;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@media (prefers-color-scheme: light) {
|
|
||||||
html {
|
|
||||||
background-color: #d3d3d3;
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "ES2024",
|
|
||||||
"strict": true,
|
|
||||||
"alwaysStrict": true,
|
|
||||||
"outDir": "static"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
// Package ui holds the static web UI.
|
|
||||||
package ui
|
|
||||||
|
|
||||||
import "net/http"
|
|
||||||
|
|
||||||
// Register arranges for mux to serve the embedded frontend.
|
|
||||||
func Register(mux *http.ServeMux) {
|
|
||||||
mux.Handle("GET /", http.FileServer(http.FS(static)))
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
//go:build frontend
|
|
||||||
|
|
||||||
package ui
|
|
||||||
|
|
||||||
import (
|
|
||||||
"embed"
|
|
||||||
"io/fs"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:generate tsc
|
|
||||||
//go:generate cp index.html style.css static
|
|
||||||
//go:embed static
|
|
||||||
var _static embed.FS
|
|
||||||
|
|
||||||
var static = func() fs.FS {
|
|
||||||
if f, err := fs.Sub(_static, "static"); err != nil {
|
|
||||||
panic(err)
|
|
||||||
} else {
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
//go:build !frontend
|
|
||||||
|
|
||||||
package ui
|
|
||||||
|
|
||||||
import "testing/fstest"
|
|
||||||
|
|
||||||
var static fstest.MapFS
|
|
||||||
132
cmd/mbf/main.go
132
cmd/mbf/main.go
@@ -20,7 +20,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -42,9 +41,6 @@ import (
|
|||||||
"hakurei.app/internal/pkg"
|
"hakurei.app/internal/pkg"
|
||||||
"hakurei.app/internal/rosa"
|
"hakurei.app/internal/rosa"
|
||||||
"hakurei.app/message"
|
"hakurei.app/message"
|
||||||
|
|
||||||
"hakurei.app/cmd/mbf/internal/pkgserver"
|
|
||||||
"hakurei.app/cmd/mbf/internal/pkgserver/ui"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
@@ -63,12 +59,17 @@ func main() {
|
|||||||
defer stop()
|
defer stop()
|
||||||
|
|
||||||
var cm cache
|
var cm cache
|
||||||
defer func() { cm.Close() }()
|
defer func() {
|
||||||
|
cm.Close()
|
||||||
|
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
fmt.Println(r)
|
||||||
|
log.Fatal("consider scrubbing the on-disk cache")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
var (
|
var (
|
||||||
flagQuiet bool
|
flagQuiet bool
|
||||||
flagCheck bool
|
|
||||||
flagLTO bool
|
|
||||||
|
|
||||||
addr net.UnixAddr
|
addr net.UnixAddr
|
||||||
)
|
)
|
||||||
@@ -76,38 +77,18 @@ func main() {
|
|||||||
msg.SwapVerbose(!flagQuiet)
|
msg.SwapVerbose(!flagQuiet)
|
||||||
cm.ctx, cm.msg = ctx, msg
|
cm.ctx, cm.msg = ctx, msg
|
||||||
cm.base = os.ExpandEnv(cm.base)
|
cm.base = os.ExpandEnv(cm.base)
|
||||||
if cm.base == "" {
|
|
||||||
cm.base = "cache"
|
|
||||||
}
|
|
||||||
|
|
||||||
addr.Net = "unix"
|
addr.Net = "unix"
|
||||||
addr.Name = os.ExpandEnv(addr.Name)
|
addr.Name = os.ExpandEnv(addr.Name)
|
||||||
if addr.Name == "" {
|
if addr.Name == "" {
|
||||||
addr.Name = filepath.Join(cm.base, "daemon")
|
addr.Name = "daemon"
|
||||||
}
|
}
|
||||||
|
|
||||||
var flags int
|
|
||||||
if !flagCheck {
|
|
||||||
flags |= rosa.OptSkipCheck
|
|
||||||
}
|
|
||||||
if !flagLTO {
|
|
||||||
flags |= rosa.OptLLVMNoLTO
|
|
||||||
}
|
|
||||||
rosa.DropCaches(flags)
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}).Flag(
|
}).Flag(
|
||||||
&flagQuiet,
|
&flagQuiet,
|
||||||
"q", command.BoolFlag(false),
|
"q", command.BoolFlag(false),
|
||||||
"Do not print cure messages",
|
"Do not print cure messages",
|
||||||
).Flag(
|
|
||||||
&flagLTO,
|
|
||||||
"lto", command.BoolFlag(false),
|
|
||||||
"Enable LTO in stage2 and stage3 LLVM toolchains",
|
|
||||||
).Flag(
|
|
||||||
&flagCheck,
|
|
||||||
"check", command.BoolFlag(true),
|
|
||||||
"Run test suites",
|
|
||||||
).Flag(
|
).Flag(
|
||||||
&cm.cures,
|
&cm.cures,
|
||||||
"cures", command.IntFlag(0),
|
"cures", command.IntFlag(0),
|
||||||
@@ -140,17 +121,7 @@ func main() {
|
|||||||
c.NewCommand(
|
c.NewCommand(
|
||||||
"checksum", "Compute checksum of data read from standard input",
|
"checksum", "Compute checksum of data read from standard input",
|
||||||
func([]string) error {
|
func([]string) error {
|
||||||
done := make(chan struct{})
|
go func() { <-ctx.Done(); os.Exit(1) }()
|
||||||
defer close(done)
|
|
||||||
go func() {
|
|
||||||
select {
|
|
||||||
case <-ctx.Done():
|
|
||||||
os.Exit(1)
|
|
||||||
case <-done:
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
h := sha512.New384()
|
h := sha512.New384()
|
||||||
if _, err := io.Copy(h, os.Stdin); err != nil {
|
if _, err := io.Copy(h, os.Stdin); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -184,7 +155,6 @@ func main() {
|
|||||||
|
|
||||||
{
|
{
|
||||||
var (
|
var (
|
||||||
flagBind string
|
|
||||||
flagStatus bool
|
flagStatus bool
|
||||||
flagReport string
|
flagReport string
|
||||||
)
|
)
|
||||||
@@ -192,52 +162,8 @@ func main() {
|
|||||||
"info",
|
"info",
|
||||||
"Display out-of-band metadata of an artifact",
|
"Display out-of-band metadata of an artifact",
|
||||||
func(args []string) (err error) {
|
func(args []string) (err error) {
|
||||||
const shutdownTimeout = 15 * time.Second
|
return commandInfo(&cm, args, os.Stdout, flagStatus, flagReport)
|
||||||
|
|
||||||
var r *rosa.Report
|
|
||||||
if flagReport != "" {
|
|
||||||
if r, err = rosa.OpenReport(flagReport); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
if closeErr := r.Close(); err == nil {
|
|
||||||
err = closeErr
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
defer r.HandleAccess(&err)()
|
|
||||||
}
|
|
||||||
|
|
||||||
if flagBind == "" {
|
|
||||||
return commandInfo(&cm, args, os.Stdout, flagStatus, r)
|
|
||||||
}
|
|
||||||
|
|
||||||
var mux http.ServeMux
|
|
||||||
ui.Register(&mux)
|
|
||||||
if err = pkgserver.Register(ctx, &mux, r); err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
server := http.Server{Addr: flagBind, Handler: &mux}
|
|
||||||
go func() {
|
|
||||||
<-ctx.Done()
|
|
||||||
cc, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
|
|
||||||
defer cancel()
|
|
||||||
if _err := server.Shutdown(cc); _err != nil {
|
|
||||||
log.Fatal(_err)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
msg.Verbosef("listening on %q", flagBind)
|
|
||||||
err = server.ListenAndServe()
|
|
||||||
if errors.Is(err, http.ErrServerClosed) {
|
|
||||||
err = nil
|
|
||||||
}
|
|
||||||
return
|
|
||||||
},
|
},
|
||||||
).Flag(
|
|
||||||
&flagBind,
|
|
||||||
"bind", command.StringFlag(""),
|
|
||||||
"TCP address for the server to listen on",
|
|
||||||
).Flag(
|
).Flag(
|
||||||
&flagStatus,
|
&flagStatus,
|
||||||
"status", command.BoolFlag(false),
|
"status", command.BoolFlag(false),
|
||||||
@@ -396,7 +322,7 @@ func main() {
|
|||||||
|
|
||||||
if err = cm.Do(func(cache *pkg.Cache) (err error) {
|
if err = cm.Do(func(cache *pkg.Cache) (err error) {
|
||||||
pathname, _, err = cache.Cure(
|
pathname, _, err = cache.Cure(
|
||||||
(t - 2).Load(rosa.LLVM),
|
(t - 2).Load(rosa.Clang),
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
@@ -406,7 +332,7 @@ func main() {
|
|||||||
|
|
||||||
if err = cm.Do(func(cache *pkg.Cache) (err error) {
|
if err = cm.Do(func(cache *pkg.Cache) (err error) {
|
||||||
pathname, checksum[0], err = cache.Cure(
|
pathname, checksum[0], err = cache.Cure(
|
||||||
(t - 1).Load(rosa.LLVM),
|
(t - 1).Load(rosa.Clang),
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
@@ -415,7 +341,7 @@ func main() {
|
|||||||
log.Println("stage2:", pathname)
|
log.Println("stage2:", pathname)
|
||||||
if err = cm.Do(func(cache *pkg.Cache) (err error) {
|
if err = cm.Do(func(cache *pkg.Cache) (err error) {
|
||||||
pathname, checksum[1], err = cache.Cure(
|
pathname, checksum[1], err = cache.Cure(
|
||||||
t.Load(rosa.LLVM),
|
t.Load(rosa.Clang),
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
@@ -471,9 +397,6 @@ func main() {
|
|||||||
flagExport string
|
flagExport string
|
||||||
flagRemote bool
|
flagRemote bool
|
||||||
flagNoReply bool
|
flagNoReply bool
|
||||||
|
|
||||||
flagBoot bool
|
|
||||||
flagStd bool
|
|
||||||
)
|
)
|
||||||
c.NewCommand(
|
c.NewCommand(
|
||||||
"cure",
|
"cure",
|
||||||
@@ -487,18 +410,11 @@ func main() {
|
|||||||
return fmt.Errorf("unknown artifact %q", args[0])
|
return fmt.Errorf("unknown artifact %q", args[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
t := rosa.Std
|
|
||||||
if flagBoot {
|
|
||||||
t -= 2
|
|
||||||
} else if flagStd {
|
|
||||||
t -= 1
|
|
||||||
}
|
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
default:
|
default:
|
||||||
var pathname *check.Absolute
|
var pathname *check.Absolute
|
||||||
err := cm.Do(func(cache *pkg.Cache) (err error) {
|
err := cm.Do(func(cache *pkg.Cache) (err error) {
|
||||||
pathname, _, err = cache.Cure(t.Load(p))
|
pathname, _, err = cache.Cure(rosa.Std.Load(p))
|
||||||
return
|
return
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -551,7 +467,7 @@ func main() {
|
|||||||
return cm.Do(func(cache *pkg.Cache) error {
|
return cm.Do(func(cache *pkg.Cache) error {
|
||||||
return cache.EnterExec(
|
return cache.EnterExec(
|
||||||
ctx,
|
ctx,
|
||||||
t.Load(p),
|
rosa.Std.Load(p),
|
||||||
true, os.Stdin, os.Stdout, os.Stderr,
|
true, os.Stdin, os.Stdout, os.Stderr,
|
||||||
rosa.AbsSystem.Append("bin", "mksh"),
|
rosa.AbsSystem.Append("bin", "mksh"),
|
||||||
"sh",
|
"sh",
|
||||||
@@ -563,7 +479,7 @@ func main() {
|
|||||||
if flagNoReply {
|
if flagNoReply {
|
||||||
flags |= remoteNoReply
|
flags |= remoteNoReply
|
||||||
}
|
}
|
||||||
a := t.Load(p)
|
a := rosa.Std.Load(p)
|
||||||
pathname, err := cureRemote(ctx, &addr, a, flags)
|
pathname, err := cureRemote(ctx, &addr, a, flags)
|
||||||
if !flagNoReply && err == nil {
|
if !flagNoReply && err == nil {
|
||||||
log.Println(pathname)
|
log.Println(pathname)
|
||||||
@@ -573,7 +489,7 @@ func main() {
|
|||||||
cc, cancel := context.WithDeadline(context.Background(), daemonDeadline())
|
cc, cancel := context.WithDeadline(context.Background(), daemonDeadline())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
if _err := cancelRemote(cc, &addr, a, false); _err != nil {
|
if _err := cancelRemote(cc, &addr, a); _err != nil {
|
||||||
log.Println(err)
|
log.Println(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -601,21 +517,13 @@ func main() {
|
|||||||
&flagNoReply,
|
&flagNoReply,
|
||||||
"no-reply", command.BoolFlag(false),
|
"no-reply", command.BoolFlag(false),
|
||||||
"Do not receive a reply from the daemon",
|
"Do not receive a reply from the daemon",
|
||||||
).Flag(
|
|
||||||
&flagBoot,
|
|
||||||
"boot", command.BoolFlag(false),
|
|
||||||
"Build on the stage0 toolchain",
|
|
||||||
).Flag(
|
|
||||||
&flagStd,
|
|
||||||
"std", command.BoolFlag(false),
|
|
||||||
"Build on the intermediate toolchain",
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.NewCommand(
|
c.NewCommand(
|
||||||
"abort",
|
"abort",
|
||||||
"Abort all pending cures on the daemon",
|
"Abort all pending cures on the daemon",
|
||||||
func([]string) error { return abortRemote(ctx, &addr, false) },
|
func([]string) error { return abortRemote(ctx, &addr) },
|
||||||
)
|
)
|
||||||
|
|
||||||
{
|
{
|
||||||
@@ -638,7 +546,7 @@ func main() {
|
|||||||
presets[i] = p
|
presets[i] = p
|
||||||
}
|
}
|
||||||
|
|
||||||
base := rosa.LLVM
|
base := rosa.Clang
|
||||||
if !flagWithToolchain {
|
if !flagWithToolchain {
|
||||||
base = rosa.Musl
|
base = rosa.Musl
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,47 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net"
|
|
||||||
"os"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"hakurei.app/internal/rosa"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
|
||||||
rosa.DropCaches(rosa.OptLLVMNoLTO)
|
|
||||||
os.Exit(m.Run())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCureAll(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
const env = "ROSA_TEST_DAEMON"
|
|
||||||
|
|
||||||
if !testing.Verbose() {
|
|
||||||
t.Skip("verbose flag not set")
|
|
||||||
}
|
|
||||||
|
|
||||||
pathname, ok := os.LookupEnv(env)
|
|
||||||
if !ok {
|
|
||||||
t.Skip(env + " not set")
|
|
||||||
}
|
|
||||||
|
|
||||||
addr := net.UnixAddr{Net: "unix", Name: pathname}
|
|
||||||
t.Cleanup(func() {
|
|
||||||
if t.Failed() {
|
|
||||||
if err := abortRemote(t.Context(), &addr, false); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
for i := range rosa.PresetEnd {
|
|
||||||
p := rosa.PArtifact(i)
|
|
||||||
t.Run(rosa.GetMetadata(p).Name, func(t *testing.T) {
|
|
||||||
_, err := cureRemote(t.Context(), &addr, rosa.Std.Load(p), 0)
|
|
||||||
if err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -16,7 +16,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"syscall"
|
"syscall"
|
||||||
"testing"
|
"testing"
|
||||||
_ "unsafe" // for go:linkname
|
|
||||||
|
|
||||||
"hakurei.app/check"
|
"hakurei.app/check"
|
||||||
"hakurei.app/command"
|
"hakurei.app/command"
|
||||||
@@ -234,9 +233,6 @@ func earlyMnt(mnt ...*vfs.MountInfoEntry) func(*testing.T, context.Context) []*v
|
|||||||
return func(*testing.T, context.Context) []*vfs.MountInfoEntry { return mnt }
|
return func(*testing.T, context.Context) []*vfs.MountInfoEntry { return mnt }
|
||||||
}
|
}
|
||||||
|
|
||||||
//go:linkname toHost hakurei.app/container.toHost
|
|
||||||
func toHost(name string) string
|
|
||||||
|
|
||||||
var containerTestCases = []struct {
|
var containerTestCases = []struct {
|
||||||
name string
|
name string
|
||||||
filter bool
|
filter bool
|
||||||
@@ -336,15 +332,13 @@ var containerTestCases = []struct {
|
|||||||
func(t *testing.T, ctx context.Context) []*vfs.MountInfoEntry {
|
func(t *testing.T, ctx context.Context) []*vfs.MountInfoEntry {
|
||||||
return []*vfs.MountInfoEntry{
|
return []*vfs.MountInfoEntry{
|
||||||
ent("/", hst.PrivateTmp, "rw", "overlay", "overlay",
|
ent("/", hst.PrivateTmp, "rw", "overlay", "overlay",
|
||||||
"rw"+
|
"rw,lowerdir="+
|
||||||
",lowerdir+="+
|
container.InternalToHostOvlEscape(ctx.Value(testVal("lower0")).(*check.Absolute).String())+":"+
|
||||||
toHost(ctx.Value(testVal("lower0")).(*check.Absolute).String())+
|
container.InternalToHostOvlEscape(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
|
||||||
",lowerdir+="+
|
|
||||||
toHost(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
|
|
||||||
",upperdir="+
|
",upperdir="+
|
||||||
toHost(ctx.Value(testVal("upper")).(*check.Absolute).String())+
|
container.InternalToHostOvlEscape(ctx.Value(testVal("upper")).(*check.Absolute).String())+
|
||||||
",workdir="+
|
",workdir="+
|
||||||
toHost(ctx.Value(testVal("work")).(*check.Absolute).String())+
|
container.InternalToHostOvlEscape(ctx.Value(testVal("work")).(*check.Absolute).String())+
|
||||||
",redirect_dir=nofollow,uuid=on,userxattr"),
|
",redirect_dir=nofollow,uuid=on,userxattr"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -394,11 +388,9 @@ var containerTestCases = []struct {
|
|||||||
func(t *testing.T, ctx context.Context) []*vfs.MountInfoEntry {
|
func(t *testing.T, ctx context.Context) []*vfs.MountInfoEntry {
|
||||||
return []*vfs.MountInfoEntry{
|
return []*vfs.MountInfoEntry{
|
||||||
ent("/", hst.PrivateTmp, "rw", "overlay", "overlay",
|
ent("/", hst.PrivateTmp, "rw", "overlay", "overlay",
|
||||||
"ro"+
|
"ro,lowerdir="+
|
||||||
",lowerdir+="+
|
container.InternalToHostOvlEscape(ctx.Value(testVal("lower0")).(*check.Absolute).String())+":"+
|
||||||
toHost(ctx.Value(testVal("lower0")).(*check.Absolute).String())+
|
container.InternalToHostOvlEscape(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
|
||||||
",lowerdir+="+
|
|
||||||
toHost(ctx.Value(testVal("lower1")).(*check.Absolute).String())+
|
|
||||||
",redirect_dir=nofollow,userxattr"),
|
",redirect_dir=nofollow,userxattr"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -65,8 +65,6 @@ type syscallDispatcher interface {
|
|||||||
remount(msg message.Msg, target string, flags uintptr) error
|
remount(msg message.Msg, target string, flags uintptr) error
|
||||||
// mountTmpfs provides mountTmpfs.
|
// mountTmpfs provides mountTmpfs.
|
||||||
mountTmpfs(fsname, target string, flags uintptr, size int, perm os.FileMode) error
|
mountTmpfs(fsname, target string, flags uintptr, size int, perm os.FileMode) error
|
||||||
// mountOverlay provides mountOverlay.
|
|
||||||
mountOverlay(target string, options [][2]string) error
|
|
||||||
// ensureFile provides ensureFile.
|
// ensureFile provides ensureFile.
|
||||||
ensureFile(name string, perm, pperm os.FileMode) error
|
ensureFile(name string, perm, pperm os.FileMode) error
|
||||||
// mustLoopback provides mustLoopback.
|
// mustLoopback provides mustLoopback.
|
||||||
@@ -171,9 +169,6 @@ func (direct) remount(msg message.Msg, target string, flags uintptr) error {
|
|||||||
func (k direct) mountTmpfs(fsname, target string, flags uintptr, size int, perm os.FileMode) error {
|
func (k direct) mountTmpfs(fsname, target string, flags uintptr, size int, perm os.FileMode) error {
|
||||||
return mountTmpfs(k, fsname, target, flags, size, perm)
|
return mountTmpfs(k, fsname, target, flags, size, perm)
|
||||||
}
|
}
|
||||||
func (k direct) mountOverlay(target string, options [][2]string) error {
|
|
||||||
return mountOverlay(target, options)
|
|
||||||
}
|
|
||||||
func (direct) ensureFile(name string, perm, pperm os.FileMode) error {
|
func (direct) ensureFile(name string, perm, pperm os.FileMode) error {
|
||||||
return ensureFile(name, perm, pperm)
|
return ensureFile(name, perm, pperm)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -468,14 +468,6 @@ func (k *kstub) mountTmpfs(fsname, target string, flags uintptr, size int, perm
|
|||||||
stub.CheckArg(k.Stub, "perm", perm, 4))
|
stub.CheckArg(k.Stub, "perm", perm, 4))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (k *kstub) mountOverlay(target string, options [][2]string) error {
|
|
||||||
k.Helper()
|
|
||||||
return k.Expects("mountOverlay").Error(
|
|
||||||
stub.CheckArg(k.Stub, "target", target, 0),
|
|
||||||
stub.CheckArgReflect(k.Stub, "options", options, 1),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (k *kstub) ensureFile(name string, perm, pperm os.FileMode) error {
|
func (k *kstub) ensureFile(name string, perm, pperm os.FileMode) error {
|
||||||
k.Helper()
|
k.Helper()
|
||||||
return k.Expects("ensureFile").Error(
|
return k.Expects("ensureFile").Error(
|
||||||
|
|||||||
@@ -118,10 +118,6 @@ func errnoFallback(op, path string, err error) (syscall.Errno, *os.PathError) {
|
|||||||
|
|
||||||
// mount wraps syscall.Mount for error handling.
|
// mount wraps syscall.Mount for error handling.
|
||||||
func mount(source, target, fstype string, flags uintptr, data string) error {
|
func mount(source, target, fstype string, flags uintptr, data string) error {
|
||||||
if max(len(source), len(target), len(data))+1 > os.Getpagesize() {
|
|
||||||
return &MountError{source, target, fstype, flags, data, syscall.ENOMEM}
|
|
||||||
}
|
|
||||||
|
|
||||||
err := syscall.Mount(source, target, fstype, flags, data)
|
err := syscall.Mount(source, target, fstype, flags, data)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ import (
|
|||||||
"encoding/gob"
|
"encoding/gob"
|
||||||
"fmt"
|
"fmt"
|
||||||
"slices"
|
"slices"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"hakurei.app/check"
|
"hakurei.app/check"
|
||||||
"hakurei.app/ext"
|
|
||||||
"hakurei.app/fhs"
|
"hakurei.app/fhs"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -150,7 +150,7 @@ func (o *MountOverlayOp) early(_ *setupState, k syscallDispatcher) error {
|
|||||||
if v, err := k.evalSymlinks(o.Upper.String()); err != nil {
|
if v, err := k.evalSymlinks(o.Upper.String()); err != nil {
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
o.upper = toHost(v)
|
o.upper = check.EscapeOverlayDataSegment(toHost(v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -158,7 +158,7 @@ func (o *MountOverlayOp) early(_ *setupState, k syscallDispatcher) error {
|
|||||||
if v, err := k.evalSymlinks(o.Work.String()); err != nil {
|
if v, err := k.evalSymlinks(o.Work.String()); err != nil {
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
o.work = toHost(v)
|
o.work = check.EscapeOverlayDataSegment(toHost(v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -168,39 +168,12 @@ func (o *MountOverlayOp) early(_ *setupState, k syscallDispatcher) error {
|
|||||||
if v, err := k.evalSymlinks(a.String()); err != nil {
|
if v, err := k.evalSymlinks(a.String()); err != nil {
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
o.lower[i] = toHost(v)
|
o.lower[i] = check.EscapeOverlayDataSegment(toHost(v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// mountOverlay sets up an overlay mount via [ext.FS].
|
|
||||||
func mountOverlay(target string, options [][2]string) error {
|
|
||||||
fs, err := ext.OpenFS(SourceOverlay, 0)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err = fs.SetString("source", SourceOverlay); err != nil {
|
|
||||||
_ = fs.Close()
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, option := range options {
|
|
||||||
if err = fs.SetString(option[0], option[1]); err != nil {
|
|
||||||
_ = fs.Close()
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err = fs.SetFlag(OptionOverlayUserxattr); err != nil {
|
|
||||||
_ = fs.Close()
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err = fs.Mount(target, 0); err != nil {
|
|
||||||
_ = fs.Close()
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return fs.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *MountOverlayOp) apply(state *setupState, k syscallDispatcher) error {
|
func (o *MountOverlayOp) apply(state *setupState, k syscallDispatcher) error {
|
||||||
target := o.Target.String()
|
target := o.Target.String()
|
||||||
if !o.noPrefix {
|
if !o.noPrefix {
|
||||||
@@ -221,7 +194,7 @@ func (o *MountOverlayOp) apply(state *setupState, k syscallDispatcher) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
options := make([][2]string, 0, 2+len(o.lower))
|
options := make([]string, 0, 4)
|
||||||
|
|
||||||
if o.upper == zeroString && o.work == zeroString { // readonly
|
if o.upper == zeroString && o.work == zeroString { // readonly
|
||||||
if len(o.Lower) < 2 {
|
if len(o.Lower) < 2 {
|
||||||
@@ -232,16 +205,15 @@ func (o *MountOverlayOp) apply(state *setupState, k syscallDispatcher) error {
|
|||||||
if len(o.Lower) == 0 {
|
if len(o.Lower) == 0 {
|
||||||
return &OverlayArgumentError{OverlayEmptyLower, zeroString}
|
return &OverlayArgumentError{OverlayEmptyLower, zeroString}
|
||||||
}
|
}
|
||||||
options = append(options, [][2]string{
|
options = append(options,
|
||||||
{OptionOverlayUpperdir, o.upper},
|
OptionOverlayUpperdir+"="+o.upper,
|
||||||
{OptionOverlayWorkdir, o.work},
|
OptionOverlayWorkdir+"="+o.work)
|
||||||
}...)
|
|
||||||
}
|
|
||||||
for _, lower := range o.lower {
|
|
||||||
options = append(options, [2]string{OptionOverlayLowerdir + "+", lower})
|
|
||||||
}
|
}
|
||||||
|
options = append(options,
|
||||||
|
OptionOverlayLowerdir+"="+strings.Join(o.lower, check.SpecialOverlayPath),
|
||||||
|
OptionOverlayUserxattr)
|
||||||
|
|
||||||
return k.mountOverlay(target, options)
|
return k.mount(SourceOverlay, target, FstypeOverlay, 0, strings.Join(options, check.SpecialOverlayOption))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *MountOverlayOp) late(*setupState, syscallDispatcher) error { return nil }
|
func (o *MountOverlayOp) late(*setupState, syscallDispatcher) error { return nil }
|
||||||
|
|||||||
@@ -97,12 +97,13 @@ func TestMountOverlayOp(t *testing.T) {
|
|||||||
call("mkdirAll", stub.ExpectArgs{"/sysroot", os.FileMode(0705)}, nil, nil),
|
call("mkdirAll", stub.ExpectArgs{"/sysroot", os.FileMode(0705)}, nil, nil),
|
||||||
call("mkdirTemp", stub.ExpectArgs{"/", "overlay.upper.*"}, "overlay.upper.32768", nil),
|
call("mkdirTemp", stub.ExpectArgs{"/", "overlay.upper.*"}, "overlay.upper.32768", nil),
|
||||||
call("mkdirTemp", stub.ExpectArgs{"/", "overlay.work.*"}, "overlay.work.32768", nil),
|
call("mkdirTemp", stub.ExpectArgs{"/", "overlay.work.*"}, "overlay.work.32768", nil),
|
||||||
call("mountOverlay", stub.ExpectArgs{"/sysroot", [][2]string{
|
call("mount", stub.ExpectArgs{"overlay", "/sysroot", "overlay", uintptr(0), "" +
|
||||||
{"upperdir", "overlay.upper.32768"},
|
"upperdir=overlay.upper.32768," +
|
||||||
{"workdir", "overlay.work.32768"},
|
"workdir=overlay.work.32768," +
|
||||||
{"lowerdir+", `/host/var/lib/planterette/base/debian:f92c9052`},
|
"lowerdir=" +
|
||||||
{"lowerdir+", `/host/var/lib/planterette/app/org.chromium.Chromium@debian:f92c9052`},
|
`/host/var/lib/planterette/base/debian\:f92c9052:` +
|
||||||
}}, nil, nil),
|
`/host/var/lib/planterette/app/org.chromium.Chromium@debian\:f92c9052,` +
|
||||||
|
"userxattr"}, nil, nil),
|
||||||
}, nil},
|
}, nil},
|
||||||
|
|
||||||
{"short lower ro", &Params{ParentPerm: 0755}, &MountOverlayOp{
|
{"short lower ro", &Params{ParentPerm: 0755}, &MountOverlayOp{
|
||||||
@@ -128,10 +129,11 @@ func TestMountOverlayOp(t *testing.T) {
|
|||||||
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store0"}, "/mnt-root/nix/.ro-store0", nil),
|
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store0"}, "/mnt-root/nix/.ro-store0", nil),
|
||||||
}, nil, []stub.Call{
|
}, nil, []stub.Call{
|
||||||
call("mkdirAll", stub.ExpectArgs{"/nix/store", os.FileMode(0755)}, nil, nil),
|
call("mkdirAll", stub.ExpectArgs{"/nix/store", os.FileMode(0755)}, nil, nil),
|
||||||
call("mountOverlay", stub.ExpectArgs{"/nix/store", [][2]string{
|
call("mount", stub.ExpectArgs{"overlay", "/nix/store", "overlay", uintptr(0), "" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/.ro-store"},
|
"lowerdir=" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/.ro-store0"},
|
"/host/mnt-root/nix/.ro-store:" +
|
||||||
}}, nil, nil),
|
"/host/mnt-root/nix/.ro-store0," +
|
||||||
|
"userxattr"}, nil, nil),
|
||||||
}, nil},
|
}, nil},
|
||||||
|
|
||||||
{"success ro", &Params{ParentPerm: 0755}, &MountOverlayOp{
|
{"success ro", &Params{ParentPerm: 0755}, &MountOverlayOp{
|
||||||
@@ -145,10 +147,11 @@ func TestMountOverlayOp(t *testing.T) {
|
|||||||
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store0"}, "/mnt-root/nix/.ro-store0", nil),
|
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store0"}, "/mnt-root/nix/.ro-store0", nil),
|
||||||
}, nil, []stub.Call{
|
}, nil, []stub.Call{
|
||||||
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0755)}, nil, nil),
|
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0755)}, nil, nil),
|
||||||
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
|
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/.ro-store"},
|
"lowerdir=" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/.ro-store0"},
|
"/host/mnt-root/nix/.ro-store:" +
|
||||||
}}, nil, nil),
|
"/host/mnt-root/nix/.ro-store0," +
|
||||||
|
"userxattr"}, nil, nil),
|
||||||
}, nil},
|
}, nil},
|
||||||
|
|
||||||
{"nil lower", &Params{ParentPerm: 0700}, &MountOverlayOp{
|
{"nil lower", &Params{ParentPerm: 0700}, &MountOverlayOp{
|
||||||
@@ -216,11 +219,7 @@ func TestMountOverlayOp(t *testing.T) {
|
|||||||
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store"}, "/mnt-root/nix/ro-store", nil),
|
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store"}, "/mnt-root/nix/ro-store", nil),
|
||||||
}, nil, []stub.Call{
|
}, nil, []stub.Call{
|
||||||
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
|
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
|
||||||
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
|
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "upperdir=/host/mnt-root/nix/.rw-store/.upper,workdir=/host/mnt-root/nix/.rw-store/.work,lowerdir=/host/mnt-root/nix/ro-store,userxattr"}, nil, stub.UniqueError(0)),
|
||||||
{"upperdir", "/host/mnt-root/nix/.rw-store/.upper"},
|
|
||||||
{"workdir", "/host/mnt-root/nix/.rw-store/.work"},
|
|
||||||
{"lowerdir+", "/host/mnt-root/nix/ro-store"},
|
|
||||||
}}, nil, stub.UniqueError(0)),
|
|
||||||
}, stub.UniqueError(0)},
|
}, stub.UniqueError(0)},
|
||||||
|
|
||||||
{"success single layer", &Params{ParentPerm: 0700}, &MountOverlayOp{
|
{"success single layer", &Params{ParentPerm: 0700}, &MountOverlayOp{
|
||||||
@@ -234,11 +233,11 @@ func TestMountOverlayOp(t *testing.T) {
|
|||||||
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store"}, "/mnt-root/nix/ro-store", nil),
|
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store"}, "/mnt-root/nix/ro-store", nil),
|
||||||
}, nil, []stub.Call{
|
}, nil, []stub.Call{
|
||||||
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
|
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
|
||||||
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
|
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "" +
|
||||||
{"upperdir", "/host/mnt-root/nix/.rw-store/.upper"},
|
"upperdir=/host/mnt-root/nix/.rw-store/.upper," +
|
||||||
{"workdir", "/host/mnt-root/nix/.rw-store/.work"},
|
"workdir=/host/mnt-root/nix/.rw-store/.work," +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/ro-store"},
|
"lowerdir=/host/mnt-root/nix/ro-store," +
|
||||||
}}, nil, nil),
|
"userxattr"}, nil, nil),
|
||||||
}, nil},
|
}, nil},
|
||||||
|
|
||||||
{"success", &Params{ParentPerm: 0700}, &MountOverlayOp{
|
{"success", &Params{ParentPerm: 0700}, &MountOverlayOp{
|
||||||
@@ -262,15 +261,16 @@ func TestMountOverlayOp(t *testing.T) {
|
|||||||
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store3"}, "/mnt-root/nix/ro-store3", nil),
|
call("evalSymlinks", stub.ExpectArgs{"/mnt-root/nix/.ro-store3"}, "/mnt-root/nix/ro-store3", nil),
|
||||||
}, nil, []stub.Call{
|
}, nil, []stub.Call{
|
||||||
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
|
call("mkdirAll", stub.ExpectArgs{"/sysroot/nix/store", os.FileMode(0700)}, nil, nil),
|
||||||
call("mountOverlay", stub.ExpectArgs{"/sysroot/nix/store", [][2]string{
|
call("mount", stub.ExpectArgs{"overlay", "/sysroot/nix/store", "overlay", uintptr(0), "" +
|
||||||
{"upperdir", "/host/mnt-root/nix/.rw-store/.upper"},
|
"upperdir=/host/mnt-root/nix/.rw-store/.upper," +
|
||||||
{"workdir", "/host/mnt-root/nix/.rw-store/.work"},
|
"workdir=/host/mnt-root/nix/.rw-store/.work," +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/ro-store"},
|
"lowerdir=" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/ro-store0"},
|
"/host/mnt-root/nix/ro-store:" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/ro-store1"},
|
"/host/mnt-root/nix/ro-store0:" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/ro-store2"},
|
"/host/mnt-root/nix/ro-store1:" +
|
||||||
{"lowerdir+", "/host/mnt-root/nix/ro-store3"},
|
"/host/mnt-root/nix/ro-store2:" +
|
||||||
}}, nil, nil),
|
"/host/mnt-root/nix/ro-store3," +
|
||||||
|
"userxattr"}, nil, nil),
|
||||||
}, nil},
|
}, nil},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
|
"hakurei.app/check"
|
||||||
"hakurei.app/vfs"
|
"hakurei.app/vfs"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -49,6 +50,9 @@ func TestToHost(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// InternalToHostOvlEscape exports toHost passed to [check.EscapeOverlayDataSegment].
|
||||||
|
func InternalToHostOvlEscape(s string) string { return check.EscapeOverlayDataSegment(toHost(s)) }
|
||||||
|
|
||||||
func TestCreateFile(t *testing.T) {
|
func TestCreateFile(t *testing.T) {
|
||||||
t.Run("nonexistent", func(t *testing.T) {
|
t.Run("nonexistent", func(t *testing.T) {
|
||||||
t.Run("mkdir", func(t *testing.T) {
|
t.Run("mkdir", func(t *testing.T) {
|
||||||
|
|||||||
267
ext/fs.go
267
ext/fs.go
@@ -1,267 +0,0 @@
|
|||||||
package ext
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"runtime"
|
|
||||||
"syscall"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
// include/uapi/linux/mount.h
|
|
||||||
|
|
||||||
/*
|
|
||||||
* move_mount() flags.
|
|
||||||
*/
|
|
||||||
const (
|
|
||||||
MOVE_MOUNT_F_SYMLINKS = 1 << iota /* Follow symlinks on from path */
|
|
||||||
MOVE_MOUNT_F_AUTOMOUNTS /* Follow automounts on from path */
|
|
||||||
MOVE_MOUNT_F_EMPTY_PATH /* Empty from path permitted */
|
|
||||||
_
|
|
||||||
MOVE_MOUNT_T_SYMLINKS /* Follow symlinks on to path */
|
|
||||||
MOVE_MOUNT_T_AUTOMOUNTS /* Follow automounts on to path */
|
|
||||||
MOVE_MOUNT_T_EMPTY_PATH /* Empty to path permitted */
|
|
||||||
_
|
|
||||||
MOVE_MOUNT_SET_GROUP /* Set sharing group instead */
|
|
||||||
MOVE_MOUNT_BENEATH /* Mount beneath top mount */
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* fsopen() flags.
|
|
||||||
*/
|
|
||||||
const (
|
|
||||||
FSOPEN_CLOEXEC = 1 << iota
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* fspick() flags.
|
|
||||||
*/
|
|
||||||
const (
|
|
||||||
FSPICK_CLOEXEC = 1 << iota
|
|
||||||
FSPICK_SYMLINK_NOFOLLOW
|
|
||||||
FSPICK_NO_AUTOMOUNT
|
|
||||||
FSPICK_EMPTY_PATH
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* The type of fsconfig() call made.
|
|
||||||
*/
|
|
||||||
const (
|
|
||||||
FSCONFIG_SET_FLAG = iota /* Set parameter, supplying no value */
|
|
||||||
FSCONFIG_SET_STRING /* Set parameter, supplying a string value */
|
|
||||||
FSCONFIG_SET_BINARY /* Set parameter, supplying a binary blob value */
|
|
||||||
FSCONFIG_SET_PATH /* Set parameter, supplying an object by path */
|
|
||||||
FSCONFIG_SET_PATH_EMPTY /* Set parameter, supplying an object by (empty) path */
|
|
||||||
FSCONFIG_SET_FD /* Set parameter, supplying an object by fd */
|
|
||||||
FSCONFIG_CMD_CREATE /* Create new or reuse existing superblock */
|
|
||||||
FSCONFIG_CMD_RECONFIGURE /* Invoke superblock reconfiguration */
|
|
||||||
FSCONFIG_CMD_CREATE_EXCL /* Create new superblock, fail if reusing existing superblock */
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* fsmount() flags.
|
|
||||||
*/
|
|
||||||
const (
|
|
||||||
FSMOUNT_CLOEXEC = 1 << iota
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Mount attributes.
|
|
||||||
*/
|
|
||||||
const (
|
|
||||||
MOUNT_ATTR_RDONLY = 0x00000001 /* Mount read-only */
|
|
||||||
MOUNT_ATTR_NOSUID = 0x00000002 /* Ignore suid and sgid bits */
|
|
||||||
MOUNT_ATTR_NODEV = 0x00000004 /* Disallow access to device special files */
|
|
||||||
MOUNT_ATTR_NOEXEC = 0x00000008 /* Disallow program execution */
|
|
||||||
MOUNT_ATTR__ATIME = 0x00000070 /* Setting on how atime should be updated */
|
|
||||||
MOUNT_ATTR_RELATIME = 0x00000000 /* - Update atime relative to mtime/ctime. */
|
|
||||||
MOUNT_ATTR_NOATIME = 0x00000010 /* - Do not update access times. */
|
|
||||||
MOUNT_ATTR_STRICTATIME = 0x00000020 /* - Always perform atime updates */
|
|
||||||
MOUNT_ATTR_NODIRATIME = 0x00000080 /* Do not update directory access times */
|
|
||||||
MOUNT_ATTR_IDMAP = 0x00100000 /* Idmap mount to @userns_fd in struct mount_attr. */
|
|
||||||
MOUNT_ATTR_NOSYMFOLLOW = 0x00200000 /* Do not follow symlinks */
|
|
||||||
)
|
|
||||||
|
|
||||||
// FS provides low-level wrappers around the suite of file-descriptor-based
|
|
||||||
// mount facilities in Linux.
|
|
||||||
type FS struct {
|
|
||||||
fd uintptr
|
|
||||||
c runtime.Cleanup
|
|
||||||
}
|
|
||||||
|
|
||||||
// newFS allocates a new [FS] for the specified fd.
|
|
||||||
func newFS(fd uintptr) *FS {
|
|
||||||
fs := FS{fd: fd}
|
|
||||||
fs.c = runtime.AddCleanup(&fs, func(fd uintptr) {
|
|
||||||
_ = syscall.Close(int(fd))
|
|
||||||
}, fd)
|
|
||||||
return &fs
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close closes the underlying filesystem context.
|
|
||||||
func (fs *FS) Close() error {
|
|
||||||
if fs == nil {
|
|
||||||
return syscall.EINVAL
|
|
||||||
}
|
|
||||||
err := syscall.Close(int(fs.fd))
|
|
||||||
fs.c.Stop()
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// OpenFS creates a new filesystem context.
|
|
||||||
func OpenFS(fsname string, flags int) (fs *FS, err error) {
|
|
||||||
var s *byte
|
|
||||||
s, err = syscall.BytePtrFromString(fsname)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fd, _, errno := syscall.Syscall(
|
|
||||||
SYS_FSOPEN,
|
|
||||||
uintptr(unsafe.Pointer(s)),
|
|
||||||
uintptr(flags|FSOPEN_CLOEXEC),
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
if errno != 0 {
|
|
||||||
err = os.NewSyscallError("fsopen", errno)
|
|
||||||
} else {
|
|
||||||
fs = newFS(fd)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// PickFS selects filesystem for reconfiguration.
|
|
||||||
func PickFS(dirfd int, pathname string, flags int) (fs *FS, err error) {
|
|
||||||
var s *byte
|
|
||||||
s, err = syscall.BytePtrFromString(pathname)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fd, _, errno := syscall.Syscall(
|
|
||||||
SYS_FSPICK,
|
|
||||||
uintptr(dirfd),
|
|
||||||
uintptr(unsafe.Pointer(s)),
|
|
||||||
uintptr(flags|FSPICK_CLOEXEC),
|
|
||||||
)
|
|
||||||
if errno != 0 {
|
|
||||||
err = os.NewSyscallError("fspick", errno)
|
|
||||||
} else {
|
|
||||||
fs = newFS(fd)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// config configures new or existing filesystem context.
|
|
||||||
func (fs *FS) config(cmd uint, key *byte, value unsafe.Pointer, aux int) (err error) {
|
|
||||||
_, _, errno := syscall.Syscall6(
|
|
||||||
SYS_FSCONFIG,
|
|
||||||
fs.fd,
|
|
||||||
uintptr(cmd),
|
|
||||||
uintptr(unsafe.Pointer(key)),
|
|
||||||
uintptr(value),
|
|
||||||
uintptr(aux),
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
if errno != 0 {
|
|
||||||
err = os.NewSyscallError("fsconfig", errno)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetFlag sets the flag parameter named by key. ([FSCONFIG_SET_FLAG])
|
|
||||||
func (fs *FS) SetFlag(key string) (err error) {
|
|
||||||
var s *byte
|
|
||||||
s, err = syscall.BytePtrFromString(key)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
return fs.config(FSCONFIG_SET_FLAG, s, nil, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetString sets the string parameter named by key to the value specified by
|
|
||||||
// value. ([FSCONFIG_SET_STRING])
|
|
||||||
func (fs *FS) SetString(key, value string) (err error) {
|
|
||||||
var s0 *byte
|
|
||||||
s0, err = syscall.BytePtrFromString(key)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var s1 *byte
|
|
||||||
s1, err = syscall.BytePtrFromString(value)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
return fs.config(FSCONFIG_SET_STRING, s0, unsafe.Pointer(s1), 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// mount instantiates mount object from filesystem context.
|
|
||||||
func (fs *FS) mount(flags, attrFlags int) (fsfd int, err error) {
|
|
||||||
r, _, errno := syscall.Syscall(
|
|
||||||
SYS_FSMOUNT,
|
|
||||||
fs.fd,
|
|
||||||
uintptr(flags|FSMOUNT_CLOEXEC),
|
|
||||||
uintptr(attrFlags),
|
|
||||||
)
|
|
||||||
fsfd = int(r)
|
|
||||||
if errno != 0 {
|
|
||||||
err = os.NewSyscallError("fsmount", errno)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// MoveMount moves or attaches mount object to filesystem.
|
|
||||||
func MoveMount(
|
|
||||||
fromDirfd int,
|
|
||||||
fromPathname string,
|
|
||||||
toDirfd int,
|
|
||||||
toPathname string,
|
|
||||||
flags int,
|
|
||||||
) (err error) {
|
|
||||||
var s0 *byte
|
|
||||||
s0, err = syscall.BytePtrFromString(fromPathname)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var s1 *byte
|
|
||||||
s1, err = syscall.BytePtrFromString(toPathname)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
_, _, errno := syscall.Syscall6(
|
|
||||||
SYS_MOVE_MOUNT,
|
|
||||||
uintptr(fromDirfd),
|
|
||||||
uintptr(unsafe.Pointer(s0)),
|
|
||||||
uintptr(toDirfd),
|
|
||||||
uintptr(unsafe.Pointer(s1)),
|
|
||||||
uintptr(flags),
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
if errno != 0 {
|
|
||||||
err = os.NewSyscallError("move_mount", errno)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mount attaches the underlying filesystem context to the specified pathname.
|
|
||||||
func (fs *FS) Mount(pathname string, attrFlags int) error {
|
|
||||||
if err := fs.config(FSCONFIG_CMD_CREATE_EXCL, nil, nil, 0); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fd, err := fs.mount(0, attrFlags)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err = MoveMount(
|
|
||||||
fd, "",
|
|
||||||
-1, pathname,
|
|
||||||
MOVE_MOUNT_F_EMPTY_PATH,
|
|
||||||
)
|
|
||||||
closeErr := syscall.Close(fd)
|
|
||||||
if err == nil {
|
|
||||||
err = closeErr
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
216
internal/pkg/asm.go
Normal file
216
internal/pkg/asm.go
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
package pkg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type asmOutLine struct {
|
||||||
|
pos int
|
||||||
|
word int
|
||||||
|
kindData int64
|
||||||
|
valueData []byte
|
||||||
|
indent int
|
||||||
|
kind string
|
||||||
|
value string
|
||||||
|
}
|
||||||
|
|
||||||
|
var spacingLine = asmOutLine{
|
||||||
|
pos: -1,
|
||||||
|
kindData: -1,
|
||||||
|
valueData: nil,
|
||||||
|
indent: 0,
|
||||||
|
kind: "",
|
||||||
|
value: "",
|
||||||
|
}
|
||||||
|
|
||||||
|
func Disassemble(r io.Reader, real bool, showHeader bool, force bool, raw bool) (s string, err error) {
|
||||||
|
var lines []asmOutLine
|
||||||
|
sb := new(strings.Builder)
|
||||||
|
header := true
|
||||||
|
pos := new(int)
|
||||||
|
|
||||||
|
for err == nil {
|
||||||
|
if header {
|
||||||
|
var kind uint64
|
||||||
|
var size uint64
|
||||||
|
var bsize []byte
|
||||||
|
p := *pos
|
||||||
|
if _, kind, err = nextUint64(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if bsize, size, err = nextUint64(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if showHeader {
|
||||||
|
lines = append(lines, asmOutLine{p, 8, int64(kind), bsize, 0, "head " + intToKind(kind), ""})
|
||||||
|
}
|
||||||
|
for i := 0; uint64(i) < size; i++ {
|
||||||
|
var did Checksum
|
||||||
|
var dkind uint64
|
||||||
|
p := *pos
|
||||||
|
if _, dkind, err = nextUint64(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if _, did, err = nextIdent(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if showHeader {
|
||||||
|
lines = append(lines, asmOutLine{p, 8, int64(dkind), nil, 1, intToKind(dkind), Encode(did)})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
header = false
|
||||||
|
}
|
||||||
|
var k uint32
|
||||||
|
p := *pos
|
||||||
|
if _, k, err = nextUint32(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
kind := IRValueKind(k)
|
||||||
|
switch kind {
|
||||||
|
case IRKindEnd:
|
||||||
|
var a uint32
|
||||||
|
var ba []byte
|
||||||
|
if ba, a, err = nextUint32(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if a&1 != 0 {
|
||||||
|
var sum Checksum
|
||||||
|
if _, sum, err = nextIdent(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
lines = append(lines, asmOutLine{p, 4, int64(kind), ba, 1, "end ", Encode(sum)})
|
||||||
|
} else {
|
||||||
|
lines = append(lines, asmOutLine{p, 4, int64(kind), []byte{0, 0, 0, 0}, 1, "end ", ""})
|
||||||
|
}
|
||||||
|
lines = append(lines, spacingLine)
|
||||||
|
header = true
|
||||||
|
continue
|
||||||
|
|
||||||
|
case IRKindIdent:
|
||||||
|
var a []byte
|
||||||
|
// discard ancillary
|
||||||
|
if a, _, err = nextUint32(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
var sum Checksum
|
||||||
|
if _, sum, err = nextIdent(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
lines = append(lines, asmOutLine{p, 4, int64(kind), a, 1, "id ", Encode(sum)})
|
||||||
|
continue
|
||||||
|
case IRKindUint32:
|
||||||
|
var i uint32
|
||||||
|
var bi []byte
|
||||||
|
if bi, i, err = nextUint32(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
lines = append(lines, asmOutLine{p, 4, int64(kind), bi, 1, "int ", strconv.FormatUint(uint64(i), 10)})
|
||||||
|
case IRKindString:
|
||||||
|
var l uint32
|
||||||
|
var bl []byte
|
||||||
|
if bl, l, err = nextUint32(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
s := make([]byte, l+(wordSize-(l)%wordSize)%wordSize)
|
||||||
|
var n int
|
||||||
|
if n, err = r.Read(s); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
*pos = *pos + n
|
||||||
|
|
||||||
|
lines = append(lines, asmOutLine{p, 4, int64(kind), bl, 1, "str ", strconv.Quote(string(s[:l]))})
|
||||||
|
continue
|
||||||
|
default:
|
||||||
|
var bi []byte
|
||||||
|
if bi, _, err = nextUint32(r, pos); err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
lines = append(lines, asmOutLine{p, 4, int64(kind), bi, 1, "????", ""})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != io.EOF {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
err = nil
|
||||||
|
for _, line := range lines {
|
||||||
|
if raw {
|
||||||
|
if line.pos != -1 {
|
||||||
|
sb.WriteString(fmt.Sprintf("%s\t%s\n", line.kind, line.value))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if line.pos == -1 {
|
||||||
|
sb.WriteString("\n")
|
||||||
|
} else if line.word == 4 {
|
||||||
|
sb.WriteString(fmt.Sprintf("%06x: %04x %04x%s %s %s\n", line.pos, binary.LittleEndian.AppendUint32(nil, uint32(line.kindData)), line.valueData, headerSpacing(showHeader), line.kind, line.value))
|
||||||
|
} else {
|
||||||
|
kind := binary.LittleEndian.AppendUint64(nil, uint64(line.kindData))
|
||||||
|
value := line.valueData
|
||||||
|
if len(value) == 8 {
|
||||||
|
sb.WriteString(fmt.Sprintf("%06x: %04x %04x %04x %04x %s %s\n", line.pos, kind[:4], kind[4:], value[:4], value[4:], line.kind, line.value))
|
||||||
|
} else {
|
||||||
|
sb.WriteString(fmt.Sprintf("%06x: %04x %04x %s %s\n", line.pos, kind[:4], kind[4:], line.kind, line.value))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
return sb.String(), err
|
||||||
|
}
|
||||||
|
func nextUint32(r io.Reader, pos *int) ([]byte, uint32, error) {
|
||||||
|
i := make([]byte, 4)
|
||||||
|
_, err := r.Read(i)
|
||||||
|
if err != nil {
|
||||||
|
return i, 0, err
|
||||||
|
}
|
||||||
|
p := *pos + 4
|
||||||
|
*pos = p
|
||||||
|
return i, binary.LittleEndian.Uint32(i), nil
|
||||||
|
}
|
||||||
|
func nextUint64(r io.Reader, pos *int) ([]byte, uint64, error) {
|
||||||
|
i := make([]byte, 8)
|
||||||
|
_, err := r.Read(i)
|
||||||
|
if err != nil {
|
||||||
|
return i, 0, err
|
||||||
|
}
|
||||||
|
p := *pos + 8
|
||||||
|
*pos = p
|
||||||
|
return i, binary.LittleEndian.Uint64(i), nil
|
||||||
|
}
|
||||||
|
func nextIdent(r io.Reader, pos *int) ([]byte, Checksum, error) {
|
||||||
|
i := make([]byte, 48)
|
||||||
|
if _, err := r.Read(i); err != nil {
|
||||||
|
return i, Checksum{}, err
|
||||||
|
}
|
||||||
|
p := *pos + 48
|
||||||
|
*pos = p
|
||||||
|
return i, Checksum(i), nil
|
||||||
|
}
|
||||||
|
func intToKind(i uint64) string {
|
||||||
|
switch Kind(i) {
|
||||||
|
case KindHTTPGet:
|
||||||
|
return "http"
|
||||||
|
case KindTar:
|
||||||
|
return "tar "
|
||||||
|
case KindExec:
|
||||||
|
return "exec"
|
||||||
|
case KindExecNet:
|
||||||
|
return "exen"
|
||||||
|
case KindFile:
|
||||||
|
return "file"
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("$%d ", i-KindCustomOffset)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func headerSpacing(showHeader bool) string {
|
||||||
|
if showHeader {
|
||||||
|
return " "
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
@@ -432,12 +432,6 @@ func (e InvalidKindError) Error() string {
|
|||||||
// register is not safe for concurrent use. register must not be called after
|
// register is not safe for concurrent use. register must not be called after
|
||||||
// the first instance of [Cache] has been opened.
|
// the first instance of [Cache] has been opened.
|
||||||
func register(k Kind, f IRReadFunc) {
|
func register(k Kind, f IRReadFunc) {
|
||||||
openMu.Lock()
|
|
||||||
defer openMu.Unlock()
|
|
||||||
|
|
||||||
if opened {
|
|
||||||
panic("attempting to register after open")
|
|
||||||
}
|
|
||||||
if _, ok := irArtifact[k]; ok {
|
if _, ok := irArtifact[k]; ok {
|
||||||
panic("attempting to register " + strconv.Itoa(int(k)) + " twice")
|
panic("attempting to register " + strconv.Itoa(int(k)) + " twice")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"slices"
|
"slices"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
@@ -71,64 +70,6 @@ func MustDecode(s string) (checksum Checksum) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
// extension is a string uniquely identifying a set of custom [Artifact]
|
|
||||||
// implementations registered by calling [Register].
|
|
||||||
extension string
|
|
||||||
|
|
||||||
// openMu synchronises access to global state for initialisation.
|
|
||||||
openMu sync.Mutex
|
|
||||||
// opened is false if [Open] was never called.
|
|
||||||
opened bool
|
|
||||||
)
|
|
||||||
|
|
||||||
// Extension returns a string uniquely identifying the currently registered set
|
|
||||||
// of custom [Artifact], or the zero value if none was registered.
|
|
||||||
func Extension() string { return extension }
|
|
||||||
|
|
||||||
// ValidExtension returns whether s is valid for use in a call to SetExtension.
|
|
||||||
func ValidExtension(s string) bool {
|
|
||||||
if l := len(s); l == 0 || l > 128 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, v := range s {
|
|
||||||
if v < 'a' || v > 'z' {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrInvalidExtension is returned for a variant identification string for which
|
|
||||||
// [ValidExtension] returns false.
|
|
||||||
var ErrInvalidExtension = errors.New("invalid extension variant identification string")
|
|
||||||
|
|
||||||
// SetExtension sets the extension variant identification string. SetExtension
|
|
||||||
// must be called before [Open] if custom [Artifact] implementations had been
|
|
||||||
// recorded by calling [Register].
|
|
||||||
//
|
|
||||||
// The variant identification string must be between 1 and 128 bytes long and
|
|
||||||
// consists of only bytes between 'a' and 'z'.
|
|
||||||
//
|
|
||||||
// SetExtension is not safe for concurrent use. SetExtension is called at most
|
|
||||||
// once and must not be called after the first instance of Cache has been opened.
|
|
||||||
func SetExtension(s string) {
|
|
||||||
openMu.Lock()
|
|
||||||
defer openMu.Unlock()
|
|
||||||
|
|
||||||
if opened {
|
|
||||||
panic("attempting to set extension after open")
|
|
||||||
}
|
|
||||||
if extension != "" {
|
|
||||||
panic("attempting to set extension twice")
|
|
||||||
}
|
|
||||||
if !ValidExtension(s) {
|
|
||||||
panic(ErrInvalidExtension)
|
|
||||||
}
|
|
||||||
extension = s
|
|
||||||
statusHeader = makeStatusHeader(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
// common holds elements and receives methods shared between different contexts.
|
// common holds elements and receives methods shared between different contexts.
|
||||||
type common struct {
|
type common struct {
|
||||||
// Context specific to this [Artifact]. The toplevel context in [Cache] must
|
// Context specific to this [Artifact]. The toplevel context in [Cache] must
|
||||||
@@ -161,27 +102,19 @@ type TContext struct {
|
|||||||
common
|
common
|
||||||
}
|
}
|
||||||
|
|
||||||
// makeStatusHeader creates the header written to every status file. This should
|
// statusHeader is the header written to all status files in dirStatus.
|
||||||
// not be called directly, its result is stored in statusHeader and will not
|
var statusHeader = func() string {
|
||||||
// change after the first [Cache] is opened.
|
|
||||||
func makeStatusHeader(extension string) string {
|
|
||||||
s := programName
|
s := programName
|
||||||
if v := info.Version(); v != info.FallbackVersion {
|
if v := info.Version(); v != info.FallbackVersion {
|
||||||
s += " " + v
|
s += " " + v
|
||||||
}
|
}
|
||||||
if extension != "" {
|
|
||||||
s += " with " + extension + " extensions"
|
|
||||||
}
|
|
||||||
s += " (" + runtime.GOARCH + ")"
|
s += " (" + runtime.GOARCH + ")"
|
||||||
if name, err := os.Hostname(); err == nil {
|
if name, err := os.Hostname(); err == nil {
|
||||||
s += " on " + name
|
s += " on " + name
|
||||||
}
|
}
|
||||||
s += "\n\n"
|
s += "\n\n"
|
||||||
return s
|
return s
|
||||||
}
|
}()
|
||||||
|
|
||||||
// statusHeader is the header written to all status files in dirStatus.
|
|
||||||
var statusHeader = makeStatusHeader("")
|
|
||||||
|
|
||||||
// prepareStatus initialises the status file once.
|
// prepareStatus initialises the status file once.
|
||||||
func (t *TContext) prepareStatus() error {
|
func (t *TContext) prepareStatus() error {
|
||||||
@@ -494,9 +427,6 @@ const (
|
|||||||
// KindFile is the kind of [Artifact] returned by [NewFile].
|
// KindFile is the kind of [Artifact] returned by [NewFile].
|
||||||
KindFile
|
KindFile
|
||||||
|
|
||||||
// _kindEnd is the total number of kinds and does not denote a kind.
|
|
||||||
_kindEnd
|
|
||||||
|
|
||||||
// KindCustomOffset is the first [Kind] value reserved for implementations
|
// KindCustomOffset is the first [Kind] value reserved for implementations
|
||||||
// not from this package.
|
// not from this package.
|
||||||
KindCustomOffset = 1 << 31
|
KindCustomOffset = 1 << 31
|
||||||
@@ -511,9 +441,6 @@ const (
|
|||||||
// fileLock is the file name appended to Cache.base for guaranteeing
|
// fileLock is the file name appended to Cache.base for guaranteeing
|
||||||
// exclusive access to the cache directory.
|
// exclusive access to the cache directory.
|
||||||
fileLock = "lock"
|
fileLock = "lock"
|
||||||
// fileVariant is the file name appended to Cache.base holding the variant
|
|
||||||
// identification string set by a prior call to [SetExtension].
|
|
||||||
fileVariant = "variant"
|
|
||||||
|
|
||||||
// dirIdentifier is the directory name appended to Cache.base for storing
|
// dirIdentifier is the directory name appended to Cache.base for storing
|
||||||
// artifacts named after their [ID].
|
// artifacts named after their [ID].
|
||||||
@@ -613,10 +540,6 @@ const (
|
|||||||
// impurity due to [KindExecNet] being [KnownChecksum]. This flag exists
|
// impurity due to [KindExecNet] being [KnownChecksum]. This flag exists
|
||||||
// to support kernels without Landlock LSM enabled.
|
// to support kernels without Landlock LSM enabled.
|
||||||
CHostAbstract
|
CHostAbstract
|
||||||
|
|
||||||
// CPromoteVariant allows [pkg.Open] to promote an unextended on-disk cache
|
|
||||||
// to the current extension variant. This is a one-way operation.
|
|
||||||
CPromoteVariant
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// toplevel holds [context.WithCancel] over caller-supplied context, where all
|
// toplevel holds [context.WithCancel] over caller-supplied context, where all
|
||||||
@@ -2007,20 +1930,6 @@ func (c *Cache) Close() {
|
|||||||
c.unlock()
|
c.unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnsupportedVariantError describes an on-disk cache with an extension variant
|
|
||||||
// identification string that differs from the value returned by [Extension].
|
|
||||||
type UnsupportedVariantError string
|
|
||||||
|
|
||||||
func (e UnsupportedVariantError) Error() string {
|
|
||||||
return "unsupported variant " + strconv.Quote(string(e))
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
// ErrWouldPromote is returned by [Open] if the [CPromoteVariant] bit is not
|
|
||||||
// set and the on-disk cache requires variant promotion.
|
|
||||||
ErrWouldPromote = errors.New("operation would promote unextended cache")
|
|
||||||
)
|
|
||||||
|
|
||||||
// Open returns the address of a newly opened instance of [Cache].
|
// Open returns the address of a newly opened instance of [Cache].
|
||||||
//
|
//
|
||||||
// Concurrent cures of a [FloodArtifact] dependency graph is limited to the
|
// Concurrent cures of a [FloodArtifact] dependency graph is limited to the
|
||||||
@@ -2052,14 +1961,6 @@ func open(
|
|||||||
base *check.Absolute,
|
base *check.Absolute,
|
||||||
lock bool,
|
lock bool,
|
||||||
) (*Cache, error) {
|
) (*Cache, error) {
|
||||||
openMu.Lock()
|
|
||||||
defer openMu.Unlock()
|
|
||||||
opened = true
|
|
||||||
|
|
||||||
if extension == "" && len(irArtifact) != int(_kindEnd) {
|
|
||||||
panic("attempting to open cache with incomplete variant setup")
|
|
||||||
}
|
|
||||||
|
|
||||||
if cures < 1 {
|
if cures < 1 {
|
||||||
cures = runtime.NumCPU()
|
cures = runtime.NumCPU()
|
||||||
}
|
}
|
||||||
@@ -2073,10 +1974,8 @@ func open(
|
|||||||
dirStatus,
|
dirStatus,
|
||||||
dirWork,
|
dirWork,
|
||||||
} {
|
} {
|
||||||
if err := os.MkdirAll(
|
if err := os.MkdirAll(base.Append(name).String(), 0700); err != nil &&
|
||||||
base.Append(name).String(),
|
!errors.Is(err, os.ErrExist) {
|
||||||
0700,
|
|
||||||
); err != nil && !errors.Is(err, os.ErrExist) {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2114,45 +2013,6 @@ func open(
|
|||||||
c.unlock = func() {}
|
c.unlock = func() {}
|
||||||
}
|
}
|
||||||
|
|
||||||
variantPath := base.Append(fileVariant).String()
|
|
||||||
if p, err := os.ReadFile(variantPath); err != nil {
|
|
||||||
if !errors.Is(err, os.ErrNotExist) {
|
|
||||||
c.unlock()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// nonexistence implies newly created cache, or a cache predating
|
|
||||||
// variant identification strings, in which case it is silently promoted
|
|
||||||
if err = os.WriteFile(
|
|
||||||
variantPath,
|
|
||||||
[]byte(extension),
|
|
||||||
0400,
|
|
||||||
); err != nil {
|
|
||||||
c.unlock()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else if s := string(p); s == "" {
|
|
||||||
if extension != "" {
|
|
||||||
if flags&CPromoteVariant == 0 {
|
|
||||||
c.unlock()
|
|
||||||
return nil, ErrWouldPromote
|
|
||||||
}
|
|
||||||
if err = os.WriteFile(
|
|
||||||
variantPath,
|
|
||||||
[]byte(extension),
|
|
||||||
0400,
|
|
||||||
); err != nil {
|
|
||||||
c.unlock()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if !ValidExtension(s) {
|
|
||||||
c.unlock()
|
|
||||||
return nil, ErrInvalidExtension
|
|
||||||
} else if s != extension {
|
|
||||||
c.unlock()
|
|
||||||
return nil, UnsupportedVariantError(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &c, nil
|
return &c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -41,25 +41,6 @@ func unsafeOpen(
|
|||||||
lock bool,
|
lock bool,
|
||||||
) (*pkg.Cache, error)
|
) (*pkg.Cache, error)
|
||||||
|
|
||||||
var (
|
|
||||||
// extension is a string uniquely identifying a set of custom [Artifact]
|
|
||||||
// implementations registered by calling [Register].
|
|
||||||
//
|
|
||||||
//go:linkname extension hakurei.app/internal/pkg.extension
|
|
||||||
extension string
|
|
||||||
|
|
||||||
// opened is false if [Open] was never called.
|
|
||||||
//
|
|
||||||
//go:linkname opened hakurei.app/internal/pkg.opened
|
|
||||||
opened bool
|
|
||||||
|
|
||||||
// irArtifact refers to artifact IR interpretation functions and must not be
|
|
||||||
// written to directly.
|
|
||||||
//
|
|
||||||
//go:linkname irArtifact hakurei.app/internal/pkg.irArtifact
|
|
||||||
irArtifact map[pkg.Kind]pkg.IRReadFunc
|
|
||||||
)
|
|
||||||
|
|
||||||
// newRContext returns the address of a new [pkg.RContext] unsafely created for
|
// newRContext returns the address of a new [pkg.RContext] unsafely created for
|
||||||
// the specified [testing.TB].
|
// the specified [testing.TB].
|
||||||
func newRContext(tb testing.TB, c *pkg.Cache) *pkg.RContext {
|
func newRContext(tb testing.TB, c *pkg.Cache) *pkg.RContext {
|
||||||
@@ -361,20 +342,9 @@ func checkWithCache(t *testing.T, testCases []cacheTestCase) {
|
|||||||
restoreTemp = true
|
restoreTemp = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// destroy lock and variant file to avoid changing cache checksums
|
// destroy lock file to avoid changing cache checksums
|
||||||
for _, s := range []string{
|
if err := os.Remove(base.Append("lock").String()); err != nil {
|
||||||
"lock",
|
|
||||||
"variant",
|
|
||||||
} {
|
|
||||||
pathname := base.Append(s)
|
|
||||||
if p, err := os.ReadFile(pathname.String()); err != nil {
|
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
} else if len(p) != 0 {
|
|
||||||
t.Fatalf("file %q: %q", s, string(p))
|
|
||||||
}
|
|
||||||
if err := os.Remove(pathname.String()); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// destroy non-deterministic status files
|
// destroy non-deterministic status files
|
||||||
@@ -1131,10 +1101,6 @@ func TestErrors(t *testing.T) {
|
|||||||
Want: pkg.IRKindIdent,
|
Want: pkg.IRKindIdent,
|
||||||
Ancillary: 0xcafe,
|
Ancillary: 0xcafe,
|
||||||
}, "got invalid kind 48879 IR value (0xcafe) instead of ident"},
|
}, "got invalid kind 48879 IR value (0xcafe) instead of ident"},
|
||||||
|
|
||||||
{"UnsupportedVariantError", pkg.UnsupportedVariantError(
|
|
||||||
"rosa",
|
|
||||||
), `unsupported variant "rosa"`},
|
|
||||||
}
|
}
|
||||||
for _, tc := range testCases {
|
for _, tc := range testCases {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
@@ -1343,8 +1309,6 @@ func (a earlyFailureF) Cure(*pkg.FContext) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestDependencyCureErrorEarly(t *testing.T) {
|
func TestDependencyCureErrorEarly(t *testing.T) {
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
checkWithCache(t, []cacheTestCase{
|
checkWithCache(t, []cacheTestCase{
|
||||||
{"early", 0, nil, func(t *testing.T, _ *check.Absolute, c *pkg.Cache) {
|
{"early", 0, nil, func(t *testing.T, _ *check.Absolute, c *pkg.Cache) {
|
||||||
_, _, err := c.Cure(earlyFailureF(8))
|
_, _, err := c.Cure(earlyFailureF(8))
|
||||||
@@ -1355,7 +1319,7 @@ func TestDependencyCureErrorEarly(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOpen(t *testing.T) {
|
func TestNew(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
t.Run("nonexistent", func(t *testing.T) {
|
t.Run("nonexistent", func(t *testing.T) {
|
||||||
@@ -1403,219 +1367,3 @@ func TestOpen(t *testing.T) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExtensionRegister(t *testing.T) {
|
|
||||||
extensionOld := extension
|
|
||||||
openedOld := opened
|
|
||||||
t.Cleanup(func() { extension = extensionOld; opened = openedOld })
|
|
||||||
extension = ""
|
|
||||||
opened = false
|
|
||||||
|
|
||||||
t.Run("set", func(t *testing.T) {
|
|
||||||
t.Cleanup(func() { extension = "" })
|
|
||||||
|
|
||||||
const want = "rosa"
|
|
||||||
pkg.SetExtension(want)
|
|
||||||
if got := pkg.Extension(); got != want {
|
|
||||||
t.Fatalf("Extension: %q, want %q", got, want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("twice", func(t *testing.T) {
|
|
||||||
t.Cleanup(func() { extension = "" })
|
|
||||||
|
|
||||||
defer func() {
|
|
||||||
const wantPanic = "attempting to set extension twice"
|
|
||||||
if r := recover(); r != wantPanic {
|
|
||||||
t.Errorf("panic: %#v, want %q", r, wantPanic)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
pkg.SetExtension("rosa")
|
|
||||||
pkg.SetExtension("rosa")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("invalid", func(t *testing.T) {
|
|
||||||
defer func() {
|
|
||||||
var wantPanic = pkg.ErrInvalidExtension
|
|
||||||
if r := recover(); r != wantPanic {
|
|
||||||
t.Errorf("panic: %#v, want %#v", r, wantPanic)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
pkg.SetExtension(" ")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("opened", func(t *testing.T) {
|
|
||||||
t.Cleanup(func() { opened = false })
|
|
||||||
|
|
||||||
if _, err := pkg.Open(
|
|
||||||
t.Context(),
|
|
||||||
message.New(log.Default()),
|
|
||||||
0, 0, 0,
|
|
||||||
check.MustAbs(container.Nonexistent),
|
|
||||||
); !errors.Is(err, os.ErrNotExist) {
|
|
||||||
t.Fatalf("Open: error = %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("variant", func(t *testing.T) {
|
|
||||||
defer func() {
|
|
||||||
const wantPanic = "attempting to set extension after open"
|
|
||||||
if r := recover(); r != wantPanic {
|
|
||||||
t.Errorf("panic: %#v, want %q", r, wantPanic)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
pkg.SetExtension("rosa")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("register", func(t *testing.T) {
|
|
||||||
defer func() {
|
|
||||||
const wantPanic = "attempting to register after open"
|
|
||||||
if r := recover(); r != wantPanic {
|
|
||||||
t.Errorf("panic: %#v, want %q", r, wantPanic)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
pkg.Register(pkg.KindCustomOffset, nil)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("incomplete", func(t *testing.T) {
|
|
||||||
t.Cleanup(func() { delete(irArtifact, pkg.KindCustomOffset) })
|
|
||||||
|
|
||||||
defer func() {
|
|
||||||
const wantPanic = "attempting to open cache with incomplete variant setup"
|
|
||||||
if r := recover(); r != wantPanic {
|
|
||||||
t.Errorf("panic: %#v, want %q", r, wantPanic)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
pkg.Register(pkg.KindCustomOffset, nil)
|
|
||||||
|
|
||||||
t.Cleanup(func() { opened = false })
|
|
||||||
_, _ = pkg.Open(nil, nil, 0, 0, 0, nil)
|
|
||||||
panic("unreachable")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("create", func(t *testing.T) {
|
|
||||||
t.Cleanup(func() { extension = "" })
|
|
||||||
const want = "rosa"
|
|
||||||
pkg.SetExtension(want)
|
|
||||||
|
|
||||||
base := check.MustAbs(t.TempDir())
|
|
||||||
t.Cleanup(func() { opened = false })
|
|
||||||
if c, err := pkg.Open(
|
|
||||||
t.Context(), nil,
|
|
||||||
0, 0, 0,
|
|
||||||
base,
|
|
||||||
); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
} else {
|
|
||||||
c.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
if got, err := os.ReadFile(base.Append("variant").String()); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
} else if string(got) != want {
|
|
||||||
t.Fatalf("variant: %q", string(got))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("access", func(t *testing.T) {
|
|
||||||
base := check.MustAbs(t.TempDir())
|
|
||||||
t.Cleanup(func() { opened = false })
|
|
||||||
|
|
||||||
if err := os.WriteFile(base.Append("variant").String(), nil, 0); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
wantErr := &os.PathError{
|
|
||||||
Op: "open",
|
|
||||||
Path: base.Append("variant").String(),
|
|
||||||
Err: syscall.EACCES,
|
|
||||||
}
|
|
||||||
if _, err := pkg.Open(
|
|
||||||
t.Context(), nil,
|
|
||||||
0, 0, 0,
|
|
||||||
base,
|
|
||||||
); !reflect.DeepEqual(err, wantErr) {
|
|
||||||
t.Fatalf("Open: error = %v, want %v", err, wantErr)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("promote", func(t *testing.T) {
|
|
||||||
t.Cleanup(func() { extension = "" })
|
|
||||||
const want = "rosa"
|
|
||||||
pkg.SetExtension(want)
|
|
||||||
|
|
||||||
base := check.MustAbs(t.TempDir())
|
|
||||||
t.Cleanup(func() { opened = false })
|
|
||||||
|
|
||||||
variantPath := base.Append("variant")
|
|
||||||
if err := os.WriteFile(variantPath.String(), nil, 0600); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := pkg.Open(
|
|
||||||
t.Context(), nil,
|
|
||||||
0, 0, 0,
|
|
||||||
base,
|
|
||||||
); !reflect.DeepEqual(err, pkg.ErrWouldPromote) {
|
|
||||||
t.Fatalf("Open: error = %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if p, err := os.ReadFile(variantPath.String()); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
} else if len(p) != 0 {
|
|
||||||
t.Fatalf("variant: %q", string(p))
|
|
||||||
}
|
|
||||||
|
|
||||||
if c, err := pkg.Open(
|
|
||||||
t.Context(), nil,
|
|
||||||
pkg.CPromoteVariant, 0, 0,
|
|
||||||
base,
|
|
||||||
); err != nil {
|
|
||||||
t.Fatalf("Open: error = %v", err)
|
|
||||||
} else {
|
|
||||||
c.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
if p, err := os.ReadFile(variantPath.String()); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
} else if string(p) != want {
|
|
||||||
t.Fatalf("variant: %q, want %q", string(p), want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("open invalid", func(t *testing.T) {
|
|
||||||
base := check.MustAbs(t.TempDir())
|
|
||||||
t.Cleanup(func() { opened = false })
|
|
||||||
|
|
||||||
variantPath := base.Append("variant")
|
|
||||||
if err := os.WriteFile(variantPath.String(), make([]byte, 129), 0400); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := pkg.Open(
|
|
||||||
t.Context(), nil,
|
|
||||||
0, 0, 0,
|
|
||||||
base,
|
|
||||||
); !reflect.DeepEqual(err, pkg.ErrInvalidExtension) {
|
|
||||||
t.Fatalf("Open: error = %v", err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("unsupported", func(t *testing.T) {
|
|
||||||
base := check.MustAbs(t.TempDir())
|
|
||||||
t.Cleanup(func() { opened = false })
|
|
||||||
|
|
||||||
variantPath := base.Append("variant")
|
|
||||||
if err := os.WriteFile(variantPath.String(), []byte("rosa"), 0400); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := pkg.Open(
|
|
||||||
t.Context(), nil,
|
|
||||||
0, 0, 0,
|
|
||||||
base,
|
|
||||||
); !reflect.DeepEqual(err, pkg.UnsupportedVariantError("rosa")) {
|
|
||||||
t.Fatalf("Open: error = %v", err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
36
internal/pkg/testdata/main.go
vendored
36
internal/pkg/testdata/main.go
vendored
@@ -14,6 +14,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"hakurei.app/check"
|
||||||
"hakurei.app/fhs"
|
"hakurei.app/fhs"
|
||||||
"hakurei.app/vfs"
|
"hakurei.app/vfs"
|
||||||
)
|
)
|
||||||
@@ -158,24 +159,43 @@ func main() {
|
|||||||
m.Source != "overlay" || m.FsType != "overlay" {
|
m.Source != "overlay" || m.FsType != "overlay" {
|
||||||
log.Fatal("unexpected root mount entry")
|
log.Fatal("unexpected root mount entry")
|
||||||
}
|
}
|
||||||
var lowerdir []string
|
var lowerdir string
|
||||||
for _, o := range strings.Split(m.FsOptstr, ",") {
|
for _, o := range strings.Split(m.FsOptstr, ",") {
|
||||||
const lowerdirKey = "lowerdir+="
|
const lowerdirKey = "lowerdir="
|
||||||
if strings.HasPrefix(o, lowerdirKey) {
|
if strings.HasPrefix(o, lowerdirKey) {
|
||||||
lowerdir = append(lowerdir, o[len(lowerdirKey):])
|
lowerdir = o[len(lowerdirKey):]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !layers {
|
if !layers {
|
||||||
if len(lowerdir) != 1 || filepath.Base(lowerdir[0]) != checksumEmptyDir {
|
if filepath.Base(lowerdir) != checksumEmptyDir {
|
||||||
log.Fatal("unexpected artifact checksum")
|
log.Fatal("unexpected artifact checksum")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ident = "p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT"
|
ident = "p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT"
|
||||||
|
|
||||||
if len(lowerdir) != 2 ||
|
lowerdirsEscaped := strings.Split(lowerdir, ":")
|
||||||
filepath.Base(lowerdir[0]) != "MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU" ||
|
lowerdirs := lowerdirsEscaped[:0]
|
||||||
filepath.Base(lowerdir[1]) != "nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK" {
|
// ignore the option separator since it does not appear in ident
|
||||||
log.Fatalf("unexpected lowerdirs %s", strings.Join(lowerdir, ", "))
|
for i, e := range lowerdirsEscaped {
|
||||||
|
if len(e) > 0 &&
|
||||||
|
e[len(e)-1] == check.SpecialOverlayEscape[0] &&
|
||||||
|
(len(e) == 1 || e[len(e)-2] != check.SpecialOverlayEscape[0]) {
|
||||||
|
// ignore escaped pathname separator since it does not
|
||||||
|
// appear in ident
|
||||||
|
|
||||||
|
e = e[:len(e)-1]
|
||||||
|
if len(lowerdirsEscaped) != i {
|
||||||
|
lowerdirsEscaped[i+1] = e + lowerdirsEscaped[i+1]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lowerdirs = append(lowerdirs, e)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(lowerdirs) != 2 ||
|
||||||
|
filepath.Base(lowerdirs[0]) != "MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU" ||
|
||||||
|
filepath.Base(lowerdirs[1]) != "nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK" {
|
||||||
|
log.Fatalf("unexpected lowerdirs %s", strings.Join(lowerdirs, ", "))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -16,7 +16,9 @@ import (
|
|||||||
type PArtifact int
|
type PArtifact int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
LLVM PArtifact = iota
|
CompilerRT PArtifact = iota
|
||||||
|
LLVMRuntimes
|
||||||
|
Clang
|
||||||
|
|
||||||
// EarlyInit is the Rosa OS init program.
|
// EarlyInit is the Rosa OS init program.
|
||||||
EarlyInit
|
EarlyInit
|
||||||
@@ -72,12 +74,10 @@ const (
|
|||||||
HakureiDist
|
HakureiDist
|
||||||
IPTables
|
IPTables
|
||||||
Kmod
|
Kmod
|
||||||
LIT
|
|
||||||
LibX11
|
|
||||||
LibXau
|
LibXau
|
||||||
LibXext
|
|
||||||
Libbsd
|
Libbsd
|
||||||
Libcap
|
Libcap
|
||||||
|
Libclc
|
||||||
Libdrm
|
Libdrm
|
||||||
Libev
|
Libev
|
||||||
Libexpat
|
Libexpat
|
||||||
@@ -95,10 +95,8 @@ const (
|
|||||||
Libtool
|
Libtool
|
||||||
Libucontext
|
Libucontext
|
||||||
Libunistring
|
Libunistring
|
||||||
Libxshmfence
|
|
||||||
Libxml2
|
Libxml2
|
||||||
Libxslt
|
Libxslt
|
||||||
Libxtrans
|
|
||||||
M4
|
M4
|
||||||
MPC
|
MPC
|
||||||
MPFR
|
MPFR
|
||||||
@@ -131,29 +129,21 @@ const (
|
|||||||
PkgConfig
|
PkgConfig
|
||||||
Procps
|
Procps
|
||||||
Python
|
Python
|
||||||
PythonFlitCore
|
|
||||||
PythonHatchling
|
|
||||||
PythonIniConfig
|
PythonIniConfig
|
||||||
PythonMako
|
PythonMako
|
||||||
PythonMarkupSafe
|
PythonMarkupSafe
|
||||||
PythonPackaging
|
PythonPackaging
|
||||||
PythonPathspec
|
|
||||||
PythonPluggy
|
PythonPluggy
|
||||||
PythonPyTest
|
PythonPyTest
|
||||||
PythonPyYAML
|
PythonPyYAML
|
||||||
PythonPygments
|
PythonPygments
|
||||||
PythonSetuptools
|
|
||||||
PythonSetuptoolsSCM
|
|
||||||
PythonTroveClassifiers
|
|
||||||
PythonVCSVersioning
|
|
||||||
PythonWheel
|
|
||||||
QEMU
|
QEMU
|
||||||
Rdfind
|
Rdfind
|
||||||
Readline
|
Readline
|
||||||
Rsync
|
Rsync
|
||||||
Sed
|
Sed
|
||||||
|
Setuptools
|
||||||
SPIRVHeaders
|
SPIRVHeaders
|
||||||
SPIRVLLVMTranslator
|
|
||||||
SPIRVTools
|
SPIRVTools
|
||||||
SquashfsTools
|
SquashfsTools
|
||||||
Strace
|
Strace
|
||||||
@@ -170,7 +160,7 @@ const (
|
|||||||
XCBProto
|
XCBProto
|
||||||
XDGDBusProxy
|
XDGDBusProxy
|
||||||
XZ
|
XZ
|
||||||
XorgProto
|
Xproto
|
||||||
Zlib
|
Zlib
|
||||||
Zstd
|
Zstd
|
||||||
|
|
||||||
@@ -178,25 +168,11 @@ const (
|
|||||||
PresetUnexportedStart
|
PresetUnexportedStart
|
||||||
|
|
||||||
llvmSource = iota - 1
|
llvmSource = iota - 1
|
||||||
// earlyCompilerRT is an early, standalone compiler-rt installation for the
|
|
||||||
// standalone runtimes build.
|
|
||||||
//
|
|
||||||
// earlyCompilerRT must only be loaded by [LLVM].
|
|
||||||
earlyCompilerRT
|
|
||||||
// earlyRuntimes is an early, standalone installation of LLVM runtimes to
|
|
||||||
// work around the cmake build system leaking the system LLVM installation
|
|
||||||
// when invoking the newly built toolchain.
|
|
||||||
//
|
|
||||||
// earlyRuntimes must only be loaded by [LLVM].
|
|
||||||
earlyRuntimes
|
|
||||||
|
|
||||||
buildcatrust
|
buildcatrust
|
||||||
utilMacros
|
utilMacros
|
||||||
|
|
||||||
// Musl is a standalone libc that does not depend on the toolchain.
|
// Musl is a standalone libc that does not depend on the toolchain.
|
||||||
Musl
|
Musl
|
||||||
// muslHeaders is a system installation of [Musl] headers.
|
|
||||||
muslHeaders
|
|
||||||
|
|
||||||
// gcc is a hacked-to-pieces GCC toolchain meant for use in intermediate
|
// gcc is a hacked-to-pieces GCC toolchain meant for use in intermediate
|
||||||
// stages only. This preset and its direct output must never be exposed.
|
// stages only. This preset and its direct output must never be exposed.
|
||||||
@@ -341,29 +317,15 @@ var (
|
|||||||
}
|
}
|
||||||
// artifactsOnce is for lazy initialisation of artifacts.
|
// artifactsOnce is for lazy initialisation of artifacts.
|
||||||
artifactsOnce [_toolchainEnd][len(artifactsM)]sync.Once
|
artifactsOnce [_toolchainEnd][len(artifactsM)]sync.Once
|
||||||
|
|
||||||
// presetOpts globally modifies behaviour of presets.
|
|
||||||
presetOpts int
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
// OptSkipCheck skips running all test suites.
|
|
||||||
OptSkipCheck = 1 << iota
|
|
||||||
// OptLLVMNoLTO disables LTO in all [LLVM] stages.
|
|
||||||
OptLLVMNoLTO
|
|
||||||
)
|
|
||||||
|
|
||||||
// Flags returns the current preset flags
|
|
||||||
func Flags() int { return presetOpts }
|
|
||||||
|
|
||||||
// zero zeros the value pointed to by p.
|
// zero zeros the value pointed to by p.
|
||||||
func zero[T any](p *T) { var v T; *p = v }
|
func zero[T any](p *T) { var v T; *p = v }
|
||||||
|
|
||||||
// DropCaches arranges for all cached [pkg.Artifact] to be freed some time after
|
// DropCaches arranges for all cached [pkg.Artifact] to be freed some time after
|
||||||
// it returns. Must not be used concurrently with any other function from this
|
// it returns. Must not be used concurrently with any other function from this
|
||||||
// package.
|
// package.
|
||||||
func DropCaches(flags int) {
|
func DropCaches() {
|
||||||
presetOpts = flags
|
|
||||||
zero(&artifacts)
|
zero(&artifacts)
|
||||||
zero(&artifactsOnce)
|
zero(&artifactsOnce)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,16 +20,13 @@ func TestLoad(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkAll(b *testing.B) {
|
func BenchmarkAll(b *testing.B) {
|
||||||
flags := rosa.Flags()
|
|
||||||
b.Cleanup(func() { rosa.DropCaches(flags) })
|
|
||||||
|
|
||||||
for b.Loop() {
|
for b.Loop() {
|
||||||
for i := range rosa.PresetEnd {
|
for i := range rosa.PresetEnd {
|
||||||
rosa.Std.Load(rosa.PArtifact(i))
|
rosa.Std.Load(rosa.PArtifact(i))
|
||||||
}
|
}
|
||||||
|
|
||||||
b.StopTimer()
|
b.StopTimer()
|
||||||
rosa.DropCaches(0)
|
rosa.DropCaches()
|
||||||
b.StartTimer()
|
b.StartTimer()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newCMake() (pkg.Artifact, string) {
|
func (t Toolchain) newCMake() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "4.3.2"
|
version = "4.3.1"
|
||||||
checksum = "6QylwRVKletndTSkZTV2YBRwgd_9rUVgav_QW23HpjUgV21AVYZOUOal8tdBDmO7"
|
checksum = "RHpzZiM1kJ5bwLjo9CpXSeHJJg3hTtV9QxBYpQoYwKFtRh5YhGWpShrqZCSOzQN6"
|
||||||
)
|
)
|
||||||
return t.NewPackage("cmake", version, newFromGitHubRelease(
|
return t.NewPackage("cmake", version, newFromGitHubRelease(
|
||||||
"Kitware/CMake",
|
"Kitware/CMake",
|
||||||
@@ -122,18 +122,11 @@ type CMakeHelper struct {
|
|||||||
// Path elements joined with source.
|
// Path elements joined with source.
|
||||||
Append []string
|
Append []string
|
||||||
|
|
||||||
// Value of CMAKE_BUILD_TYPE. The zero value is equivalent to "Release".
|
|
||||||
BuildType string
|
|
||||||
// CMake CACHE entries.
|
// CMake CACHE entries.
|
||||||
Cache []KV
|
Cache []KV
|
||||||
// Runs after install.
|
// Runs after install.
|
||||||
Script string
|
Script string
|
||||||
|
|
||||||
// Replaces the default test command.
|
|
||||||
Test string
|
|
||||||
// Whether to skip running tests.
|
|
||||||
SkipTest bool
|
|
||||||
|
|
||||||
// Whether to generate Makefile instead.
|
// Whether to generate Makefile instead.
|
||||||
Make bool
|
Make bool
|
||||||
}
|
}
|
||||||
@@ -166,30 +159,20 @@ func (*CMakeHelper) wantsDir() string { return "/cure/" }
|
|||||||
// script generates the cure script.
|
// script generates the cure script.
|
||||||
func (attr *CMakeHelper) script(name string) string {
|
func (attr *CMakeHelper) script(name string) string {
|
||||||
if attr == nil {
|
if attr == nil {
|
||||||
attr = new(CMakeHelper)
|
attr = &CMakeHelper{
|
||||||
|
Cache: []KV{
|
||||||
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(attr.Cache) == 0 {
|
||||||
|
panic("CACHE must be non-empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
generate := "Ninja"
|
generate := "Ninja"
|
||||||
test := "ninja " + jobsFlagE + " test"
|
|
||||||
if attr.Make {
|
if attr.Make {
|
||||||
generate = "'Unix Makefiles'"
|
generate = "'Unix Makefiles'"
|
||||||
test = "make " + jobsFlagE + " test"
|
|
||||||
}
|
}
|
||||||
if attr.Test != "" {
|
|
||||||
test = attr.Test
|
|
||||||
}
|
|
||||||
|
|
||||||
script := attr.Script
|
|
||||||
if !attr.SkipTest && presetOpts&OptSkipCheck == 0 {
|
|
||||||
script += "\n" + test
|
|
||||||
}
|
|
||||||
|
|
||||||
cache := make([]KV, 1, 1+len(attr.Cache))
|
|
||||||
cache[0] = KV{"CMAKE_BUILD_TYPE", "Release"}
|
|
||||||
if attr.BuildType != "" {
|
|
||||||
cache[0][1] = attr.BuildType
|
|
||||||
}
|
|
||||||
cache = append(cache, attr.Cache...)
|
|
||||||
|
|
||||||
return `
|
return `
|
||||||
cmake -G ` + generate + ` \
|
cmake -G ` + generate + ` \
|
||||||
@@ -198,7 +181,7 @@ cmake -G ` + generate + ` \
|
|||||||
-DCMAKE_ASM_COMPILER_TARGET="${ROSA_TRIPLE}" \
|
-DCMAKE_ASM_COMPILER_TARGET="${ROSA_TRIPLE}" \
|
||||||
-DCMAKE_INSTALL_LIBDIR=lib \
|
-DCMAKE_INSTALL_LIBDIR=lib \
|
||||||
` + strings.Join(slices.Collect(func(yield func(string) bool) {
|
` + strings.Join(slices.Collect(func(yield func(string) bool) {
|
||||||
for _, v := range cache {
|
for _, v := range attr.Cache {
|
||||||
if !yield("-D" + v[0] + "=" + v[1]) {
|
if !yield("-D" + v[0] + "=" + v[1]) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -208,5 +191,5 @@ cmake -G ` + generate + ` \
|
|||||||
'/usr/src/` + name + `/` + filepath.Join(attr.Append...) + `'
|
'/usr/src/` + name + `/` + filepath.Join(attr.Append...) + `'
|
||||||
cmake --build . --parallel=` + jobsE + `
|
cmake --build . --parallel=` + jobsE + `
|
||||||
cmake --install . --prefix=/work/system
|
cmake --install . --prefix=/work/system
|
||||||
` + script
|
` + attr.Script
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newGit() (pkg.Artifact, string) {
|
func (t Toolchain) newGit() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "2.54.0"
|
version = "2.53.0"
|
||||||
checksum = "7vGKtFOJGqY8DO4e8UMRax7dLgImXKQz5MMalec6MlgYrsarffSJjgOughwRFpSH"
|
checksum = "rlqSTeNgSeVKJA7nvzGqddFH8q3eFEPB4qRZft-4zth8wTHnbTbm7J90kp_obHGm"
|
||||||
)
|
)
|
||||||
return t.NewPackage("git", version, newTar(
|
return t.NewPackage("git", version, newTar(
|
||||||
"https://www.kernel.org/pub/software/scm/git/"+
|
"https://www.kernel.org/pub/software/scm/git/"+
|
||||||
@@ -20,9 +20,6 @@ func (t Toolchain) newGit() (pkg.Artifact, string) {
|
|||||||
), &PackageAttr{
|
), &PackageAttr{
|
||||||
ScriptEarly: `
|
ScriptEarly: `
|
||||||
ln -s ../../system/bin/perl /usr/bin/ || true
|
ln -s ../../system/bin/perl /usr/bin/ || true
|
||||||
|
|
||||||
# test suite assumes apache
|
|
||||||
rm -f /system/bin/httpd
|
|
||||||
`,
|
`,
|
||||||
|
|
||||||
// uses source tree as scratch space
|
// uses source tree as scratch space
|
||||||
@@ -41,7 +38,6 @@ function disable_test {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
disable_test t1800-hook
|
|
||||||
disable_test t5319-multi-pack-index
|
disable_test t5319-multi-pack-index
|
||||||
disable_test t1305-config-include
|
disable_test t1305-config-include
|
||||||
disable_test t3900-i18n-commit
|
disable_test t3900-i18n-commit
|
||||||
@@ -67,9 +63,6 @@ disable_test t2200-add-update
|
|||||||
NO_INSTALL_HARDLINKS=1 \
|
NO_INSTALL_HARDLINKS=1 \
|
||||||
install`,
|
install`,
|
||||||
},
|
},
|
||||||
// test suite hangs on mksh
|
|
||||||
Bash,
|
|
||||||
|
|
||||||
Diffutils,
|
Diffutils,
|
||||||
Autoconf,
|
Autoconf,
|
||||||
Gettext,
|
Gettext,
|
||||||
|
|||||||
@@ -17,8 +17,9 @@ func (t Toolchain) newSPIRVHeaders() (pkg.Artifact, string) {
|
|||||||
"vulkan-sdk-"+version,
|
"vulkan-sdk-"+version,
|
||||||
checksum,
|
checksum,
|
||||||
), nil, &CMakeHelper{
|
), nil, &CMakeHelper{
|
||||||
// upstream has no tests
|
Cache: []KV{
|
||||||
SkipTest: true,
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
|
},
|
||||||
}), version
|
}), version
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
@@ -63,6 +64,7 @@ func (t Toolchain) newSPIRVTools() (pkg.Artifact, string) {
|
|||||||
checksum,
|
checksum,
|
||||||
), nil, &CMakeHelper{
|
), nil, &CMakeHelper{
|
||||||
Cache: []KV{
|
Cache: []KV{
|
||||||
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
{"SPIRV-Headers_SOURCE_DIR", "/system"},
|
{"SPIRV-Headers_SOURCE_DIR", "/system"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -84,8 +86,6 @@ func init() {
|
|||||||
},
|
},
|
||||||
|
|
||||||
ID: 14894,
|
ID: 14894,
|
||||||
|
|
||||||
latest: (*Versions).getStable,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -104,9 +104,11 @@ func (t Toolchain) newGlslang() (pkg.Artifact, string) {
|
|||||||
Chmod: true,
|
Chmod: true,
|
||||||
}, &CMakeHelper{
|
}, &CMakeHelper{
|
||||||
Cache: []KV{
|
Cache: []KV{
|
||||||
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
{"BUILD_SHARED_LIBS", "ON"},
|
{"BUILD_SHARED_LIBS", "ON"},
|
||||||
{"ALLOW_EXTERNAL_SPIRV_TOOLS", "ON"},
|
{"ALLOW_EXTERNAL_SPIRV_TOOLS", "ON"},
|
||||||
},
|
},
|
||||||
|
Script: "ctest",
|
||||||
},
|
},
|
||||||
Python,
|
Python,
|
||||||
Bash,
|
Bash,
|
||||||
@@ -126,65 +128,3 @@ func init() {
|
|||||||
ID: 205796,
|
ID: 205796,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newSPIRVLLVMTranslator() (pkg.Artifact, string) {
|
|
||||||
const (
|
|
||||||
version = "22.1.2"
|
|
||||||
checksum = "JZAaV5ewYcm-35YA_U2BM2IcsQouZtX1BLZR0zh2vSlfEXMsT5OCtY4Gh5RJkcGy"
|
|
||||||
)
|
|
||||||
return t.NewPackage("spirv-llvm-translator", version, newFromGitHub(
|
|
||||||
"KhronosGroup/SPIRV-LLVM-Translator",
|
|
||||||
"v"+version, checksum,
|
|
||||||
), &PackageAttr{
|
|
||||||
Patches: []KV{
|
|
||||||
{"remove-early-prefix", `diff --git a/CMakeLists.txt b/CMakeLists.txt
|
|
||||||
index c000a77e..86f79b03 100644
|
|
||||||
--- a/CMakeLists.txt
|
|
||||||
+++ b/CMakeLists.txt
|
|
||||||
@@ -172,5 +172,5 @@ install(
|
|
||||||
FILES
|
|
||||||
${CMAKE_BINARY_DIR}/LLVMSPIRVLib.pc
|
|
||||||
DESTINATION
|
|
||||||
- ${CMAKE_INSTALL_PREFIX}/lib${LLVM_LIBDIR_SUFFIX}/pkgconfig
|
|
||||||
+ lib${LLVM_LIBDIR_SUFFIX}/pkgconfig
|
|
||||||
)
|
|
||||||
`},
|
|
||||||
},
|
|
||||||
|
|
||||||
// litArgs emits shell syntax
|
|
||||||
ScriptEarly: `
|
|
||||||
export LIT_OPTS=` + litArgs(true,
|
|
||||||
// error: line 13: OpTypeCooperativeMatrixKHR Scope is limited to Workgroup and Subgroup
|
|
||||||
"cooperative_matrix_constant_null.spvasm") + `
|
|
||||||
`,
|
|
||||||
}, &CMakeHelper{
|
|
||||||
Cache: []KV{
|
|
||||||
{"CMAKE_SKIP_BUILD_RPATH", "ON"},
|
|
||||||
{"BUILD_SHARED_LIBS", "ON"},
|
|
||||||
{"LLVM_SPIRV_ENABLE_LIBSPIRV_DIS", "ON"},
|
|
||||||
{"LLVM_EXTERNAL_SPIRV_HEADERS_SOURCE_DIR", "/system"},
|
|
||||||
{"LLVM_EXTERNAL_LIT", "/system/bin/lit"},
|
|
||||||
{"LLVM_INCLUDE_TESTS", "ON"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Bash,
|
|
||||||
LIT,
|
|
||||||
|
|
||||||
SPIRVTools,
|
|
||||||
), version
|
|
||||||
}
|
|
||||||
func init() {
|
|
||||||
artifactsM[SPIRVLLVMTranslator] = Metadata{
|
|
||||||
f: Toolchain.newSPIRVLLVMTranslator,
|
|
||||||
|
|
||||||
Name: "spirv-llvm-translator",
|
|
||||||
Description: "bi-directional translation between SPIR-V and LLVM IR",
|
|
||||||
Website: "https://github.com/KhronosGroup/SPIRV-LLVM-Translator",
|
|
||||||
|
|
||||||
Dependencies: P{
|
|
||||||
SPIRVTools,
|
|
||||||
},
|
|
||||||
|
|
||||||
ID: 227273,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -2,47 +2,10 @@ package rosa
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"runtime"
|
"runtime"
|
||||||
"slices"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"hakurei.app/internal/pkg"
|
"hakurei.app/internal/pkg"
|
||||||
)
|
)
|
||||||
|
|
||||||
// skipGNUTests generates a string for skipping specific tests by number in a
|
|
||||||
// GNU test suite. This is nontrivial because the test suite does not support
|
|
||||||
// excluding tests in any way, so ranges for all but the skipped tests have to
|
|
||||||
// be specified instead.
|
|
||||||
//
|
|
||||||
// For example, to skip test 764, ranges around the skipped test must be
|
|
||||||
// specified:
|
|
||||||
//
|
|
||||||
// 1-763 765-
|
|
||||||
//
|
|
||||||
// Tests are numbered starting from 1. The resulting string is unquoted.
|
|
||||||
func skipGNUTests(tests ...int) string {
|
|
||||||
tests = slices.Clone(tests)
|
|
||||||
slices.Sort(tests)
|
|
||||||
|
|
||||||
var buf strings.Builder
|
|
||||||
|
|
||||||
if tests[0] != 1 {
|
|
||||||
buf.WriteString("1-")
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, n := range tests {
|
|
||||||
if n != 1 && (i == 0 || tests[i-1] != n-1) {
|
|
||||||
buf.WriteString(strconv.Itoa(n - 1))
|
|
||||||
buf.WriteString(" ")
|
|
||||||
}
|
|
||||||
if i == len(tests)-1 || tests[i+1] != n+1 {
|
|
||||||
buf.WriteString(strconv.Itoa(n + 1))
|
|
||||||
buf.WriteString("-")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return buf.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newM4() (pkg.Artifact, string) {
|
func (t Toolchain) newM4() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "1.4.21"
|
version = "1.4.21"
|
||||||
@@ -84,15 +47,7 @@ func (t Toolchain) newBison() (pkg.Artifact, string) {
|
|||||||
"https://ftpmirror.gnu.org/gnu/bison/bison-"+version+".tar.gz",
|
"https://ftpmirror.gnu.org/gnu/bison/bison-"+version+".tar.gz",
|
||||||
checksum,
|
checksum,
|
||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
), nil, &MakeHelper{
|
), nil, (*MakeHelper)(nil),
|
||||||
Check: []string{
|
|
||||||
"TESTSUITEFLAGS=" + jobsFlagE + "' " + skipGNUTests(
|
|
||||||
// clang miscompiles (SIGILL)
|
|
||||||
764,
|
|
||||||
) + "'",
|
|
||||||
"check",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
M4,
|
M4,
|
||||||
Diffutils,
|
Diffutils,
|
||||||
Sed,
|
Sed,
|
||||||
@@ -112,8 +67,8 @@ func init() {
|
|||||||
|
|
||||||
func (t Toolchain) newSed() (pkg.Artifact, string) {
|
func (t Toolchain) newSed() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "4.10"
|
version = "4.9"
|
||||||
checksum = "TXTRFQJCyflb-bpBRI2S5Y1DpplwvT7-KfXtpqN4AdZgZ5OtI6yStn1-bkhDKx51"
|
checksum = "pe7HWH4PHNYrazOTlUoE1fXmhn2GOPFN_xE62i0llOr3kYGrH1g2_orDz0UtZ9Nt"
|
||||||
)
|
)
|
||||||
return t.NewPackage("sed", version, newTar(
|
return t.NewPackage("sed", version, newTar(
|
||||||
"https://ftpmirror.gnu.org/gnu/sed/sed-"+version+".tar.gz",
|
"https://ftpmirror.gnu.org/gnu/sed/sed-"+version+".tar.gz",
|
||||||
@@ -121,8 +76,6 @@ func (t Toolchain) newSed() (pkg.Artifact, string) {
|
|||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
), nil, (*MakeHelper)(nil),
|
), nil, (*MakeHelper)(nil),
|
||||||
Diffutils,
|
Diffutils,
|
||||||
|
|
||||||
KernelHeaders,
|
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
@@ -231,9 +184,6 @@ func (t Toolchain) newLibtool() (pkg.Artifact, string) {
|
|||||||
checksum,
|
checksum,
|
||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
), nil, &MakeHelper{
|
), nil, &MakeHelper{
|
||||||
// _Z2a2c: symbol not found
|
|
||||||
SkipCheck: t.isStage0(),
|
|
||||||
|
|
||||||
Check: []string{
|
Check: []string{
|
||||||
"TESTSUITEFLAGS=" + jobsFlagE,
|
"TESTSUITEFLAGS=" + jobsFlagE,
|
||||||
"check",
|
"check",
|
||||||
@@ -424,8 +374,8 @@ func init() {
|
|||||||
|
|
||||||
func (t Toolchain) newCoreutils() (pkg.Artifact, string) {
|
func (t Toolchain) newCoreutils() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "9.11"
|
version = "9.10"
|
||||||
checksum = "t8UMed5wpFEoC56aa42_yidfOAaRGzOfj7MRtQkkqgGbpXiskNA8bd-EmVSQkZie"
|
checksum = "o-B9wssRnZySzJUI1ZJAgw-bZtj1RC67R9po2AcM2OjjS8FQIl16IRHpC6IwO30i"
|
||||||
)
|
)
|
||||||
return t.NewPackage("coreutils", version, newTar(
|
return t.NewPackage("coreutils", version, newTar(
|
||||||
"https://ftpmirror.gnu.org/gnu/coreutils/coreutils-"+version+".tar.gz",
|
"https://ftpmirror.gnu.org/gnu/coreutils/coreutils-"+version+".tar.gz",
|
||||||
@@ -437,13 +387,106 @@ func (t Toolchain) newCoreutils() (pkg.Artifact, string) {
|
|||||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||||
|
|
||||||
test_disable '#!/bin/sh' gnulib-tests/test-c32ispunct.sh
|
test_disable '#!/bin/sh' gnulib-tests/test-c32ispunct.sh
|
||||||
|
test_disable '#!/bin/sh' tests/split/line-bytes.sh
|
||||||
test_disable '#!/bin/sh' tests/ls/hyperlink.sh
|
test_disable '#!/bin/sh' tests/ls/hyperlink.sh
|
||||||
test_disable '#!/bin/sh' tests/misc/user.sh
|
|
||||||
test_disable 'int main(){return 0;}' gnulib-tests/test-chown.c
|
test_disable 'int main(){return 0;}' gnulib-tests/test-chown.c
|
||||||
test_disable 'int main(){return 0;}' gnulib-tests/test-fchownat.c
|
test_disable 'int main(){return 0;}' gnulib-tests/test-fchownat.c
|
||||||
test_disable 'int main(){return 0;}' gnulib-tests/test-lchown.c
|
test_disable 'int main(){return 0;}' gnulib-tests/test-lchown.c
|
||||||
`,
|
`,
|
||||||
|
|
||||||
|
Patches: []KV{
|
||||||
|
{"tests-fix-job-control", `From 21d287324aa43aa3a31f39619ade0deac7fd6013 Mon Sep 17 00:00:00 2001
|
||||||
|
From: =?UTF-8?q?P=C3=A1draig=20Brady?= <P@draigBrady.com>
|
||||||
|
Date: Tue, 24 Feb 2026 15:44:41 +0000
|
||||||
|
Subject: [PATCH] tests: fix job control triggering test termination
|
||||||
|
|
||||||
|
This avoids the test harness being terminated like:
|
||||||
|
make[1]: *** [Makefile:24419: check-recursive] Hangup
|
||||||
|
make[3]: *** [Makefile:24668: check-TESTS] Hangup
|
||||||
|
make: *** [Makefile:24922: check] Hangup
|
||||||
|
make[2]: *** [Makefile:24920: check-am] Hangup
|
||||||
|
make[4]: *** [Makefile:24685: tests/misc/usage_vs_refs.log] Error 129
|
||||||
|
...
|
||||||
|
|
||||||
|
This happened sometimes when the tests were being run non interactively.
|
||||||
|
For example when run like:
|
||||||
|
|
||||||
|
setsid make TESTS="tests/timeout/timeout.sh \
|
||||||
|
tests/tail/overlay-headers.sh" SUBDIRS=. -j2 check
|
||||||
|
|
||||||
|
Note the race window can be made bigger by adding a sleep
|
||||||
|
after tail is stopped in overlay-headers.sh
|
||||||
|
|
||||||
|
The race can trigger the kernel to induce its job control
|
||||||
|
mechanism to prevent stuck processes.
|
||||||
|
I.e. where it sends SIGHUP + SIGCONT to a process group
|
||||||
|
when it determines that group may become orphaned,
|
||||||
|
and there are stopped processes in that group.
|
||||||
|
|
||||||
|
* tests/tail/overlay-headers.sh: Use setsid(1) to keep the stopped
|
||||||
|
tail process in a separate process group, thus avoiding any kernel
|
||||||
|
job control protection mechanism.
|
||||||
|
* tests/timeout/timeout.sh: Use setsid(1) to avoid the kernel
|
||||||
|
checking the main process group when sleep(1) is reparented.
|
||||||
|
Fixes https://bugs.gnu.org/80477
|
||||||
|
---
|
||||||
|
tests/tail/overlay-headers.sh | 8 +++++++-
|
||||||
|
tests/timeout/timeout.sh | 11 ++++++++---
|
||||||
|
2 files changed, 15 insertions(+), 4 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/tests/tail/overlay-headers.sh b/tests/tail/overlay-headers.sh
|
||||||
|
index be9b6a7df..1e6da0a3f 100755
|
||||||
|
--- a/tests/tail/overlay-headers.sh
|
||||||
|
+++ b/tests/tail/overlay-headers.sh
|
||||||
|
@@ -20,6 +20,8 @@
|
||||||
|
. "${srcdir=.}/tests/init.sh"; path_prepend_ ./src
|
||||||
|
print_ver_ tail sleep
|
||||||
|
|
||||||
|
+setsid true || skip_ 'setsid required to control groups'
|
||||||
|
+
|
||||||
|
# Function to count number of lines from tail
|
||||||
|
# while ignoring transient errors due to resource limits
|
||||||
|
countlines_ ()
|
||||||
|
@@ -54,7 +56,11 @@ echo start > file2 || framework_failure_
|
||||||
|
env sleep 60 & sleep=$!
|
||||||
|
|
||||||
|
# Note don't use timeout(1) here as it currently
|
||||||
|
-# does not propagate SIGCONT
|
||||||
|
+# does not propagate SIGCONT.
|
||||||
|
+# Note use setsid here to ensure we're in a separate process group
|
||||||
|
+# as we're going to STOP this tail process, and this can trigger
|
||||||
|
+# the kernel to send SIGHUP to a group if other tests have
|
||||||
|
+# processes that are reparented. (See tests/timeout/timeout.sh).
|
||||||
|
tail $fastpoll --pid=$sleep -f file1 file2 > out & pid=$!
|
||||||
|
|
||||||
|
# Ensure tail is running
|
||||||
|
diff --git a/tests/timeout/timeout.sh b/tests/timeout/timeout.sh
|
||||||
|
index 9a395416b..fbb043312 100755
|
||||||
|
--- a/tests/timeout/timeout.sh
|
||||||
|
+++ b/tests/timeout/timeout.sh
|
||||||
|
@@ -56,9 +56,14 @@ returns_ 124 timeout --foreground -s0 -k1 .1 sleep 10 && fail=1
|
||||||
|
) || fail=1
|
||||||
|
|
||||||
|
# Don't be confused when starting off with a child (Bug#9098).
|
||||||
|
-out=$(sleep .1 & exec timeout .5 sh -c 'sleep 2; echo foo')
|
||||||
|
-status=$?
|
||||||
|
-test "$out" = "" && test $status = 124 || fail=1
|
||||||
|
+# Use setsid to avoid sleep being in the test's process group, as
|
||||||
|
+# upon reparenting it can trigger an orphaned process group SIGHUP
|
||||||
|
+# (if there were stopped processes in other tests).
|
||||||
|
+if setsid true; then
|
||||||
|
+ out=$(setsid sleep .1 & exec timeout .5 sh -c 'sleep 2; echo foo')
|
||||||
|
+ status=$?
|
||||||
|
+ test "$out" = "" && test $status = 124 || fail=1
|
||||||
|
+fi
|
||||||
|
|
||||||
|
# Verify --verbose output
|
||||||
|
cat > exp <<\EOF
|
||||||
|
--
|
||||||
|
2.53.0
|
||||||
|
`},
|
||||||
|
},
|
||||||
|
|
||||||
Flag: TEarly,
|
Flag: TEarly,
|
||||||
}, &MakeHelper{
|
}, &MakeHelper{
|
||||||
Configure: []KV{
|
Configure: []KV{
|
||||||
@@ -714,20 +757,15 @@ func init() {
|
|||||||
|
|
||||||
func (t Toolchain) newParallel() (pkg.Artifact, string) {
|
func (t Toolchain) newParallel() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "20260422"
|
version = "20260322"
|
||||||
checksum = "eTsepxgqhXpMEhPd55qh-W5y4vjKn0x9TD2mzbJCNZYtFf4lT4Wzoqr74HGJYBEH"
|
checksum = "gHoPmFkOO62ev4xW59HqyMlodhjp8LvTsBOwsVKHUUdfrt7KwB8koXmSVqQ4VOrB"
|
||||||
)
|
)
|
||||||
return t.NewPackage("parallel", version, newTar(
|
return t.NewPackage("parallel", version, newTar(
|
||||||
"https://ftpmirror.gnu.org/gnu/parallel/parallel-"+version+".tar.bz2",
|
"https://ftpmirror.gnu.org/gnu/parallel/parallel-"+version+".tar.bz2",
|
||||||
checksum,
|
checksum,
|
||||||
pkg.TarBzip2,
|
pkg.TarBzip2,
|
||||||
), &PackageAttr{
|
), nil, (*MakeHelper)(nil),
|
||||||
ScriptEarly: `
|
|
||||||
ln -s ../system/bin/bash /bin/
|
|
||||||
`,
|
|
||||||
}, (*MakeHelper)(nil),
|
|
||||||
Perl,
|
Perl,
|
||||||
Bash,
|
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
@@ -1105,11 +1143,10 @@ func init() {
|
|||||||
func (t Toolchain) newMPC() (pkg.Artifact, string) {
|
func (t Toolchain) newMPC() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "1.4.1"
|
version = "1.4.1"
|
||||||
checksum = "ZffaZyWkvIw0iPvRe5EJ7O-VvHtSkbbb3K_7SgPtK810NvGan7nbF0T5-6tozjQN"
|
checksum = "wdXAhplnS89FjVp20m2nC2CmLFQeyQqLpQAfViTy4vPxFdv2WYOTtfBKeIk5_Rec"
|
||||||
)
|
)
|
||||||
return t.NewPackage("mpc", version, newFromGitLab(
|
return t.NewPackage("mpc", version, t.newTagRemote(
|
||||||
"gitlab.inria.fr",
|
"https://gitlab.inria.fr/mpc/mpc.git",
|
||||||
"mpc/mpc",
|
|
||||||
version, checksum,
|
version, checksum,
|
||||||
), &PackageAttr{
|
), &PackageAttr{
|
||||||
// does not find mpc-impl.h otherwise
|
// does not find mpc-impl.h otherwise
|
||||||
|
|||||||
@@ -1,35 +0,0 @@
|
|||||||
package rosa
|
|
||||||
|
|
||||||
import (
|
|
||||||
"slices"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSkipGNUTests(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
testCases := []struct {
|
|
||||||
tests []int
|
|
||||||
want string
|
|
||||||
}{
|
|
||||||
{[]int{764}, "1-763 765-"},
|
|
||||||
{[]int{764, 0xcafe, 37, 9}, "1-8 10-36 38-763 765-51965 51967-"},
|
|
||||||
{[]int{1, 2, 0xbed}, "3-3052 3054-"},
|
|
||||||
{[]int{3, 4}, "1-2 5-"},
|
|
||||||
}
|
|
||||||
for _, tc := range testCases {
|
|
||||||
t.Run(strings.Join(slices.Collect(func(yield func(string) bool) {
|
|
||||||
for _, n := range tc.tests {
|
|
||||||
yield(strconv.Itoa(n))
|
|
||||||
}
|
|
||||||
}), ","), func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
if got := skipGNUTests(tc.tests...); got != tc.want {
|
|
||||||
t.Errorf("skipGNUTests: %q, want %q", got, tc.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -10,9 +10,9 @@ import (
|
|||||||
// newGoBootstrap returns the Go bootstrap toolchain.
|
// newGoBootstrap returns the Go bootstrap toolchain.
|
||||||
func (t Toolchain) newGoBootstrap() pkg.Artifact {
|
func (t Toolchain) newGoBootstrap() pkg.Artifact {
|
||||||
const checksum = "8o9JL_ToiQKadCTb04nvBDkp8O1xiWOolAxVEqaTGodieNe4lOFEjlOxN3bwwe23"
|
const checksum = "8o9JL_ToiQKadCTb04nvBDkp8O1xiWOolAxVEqaTGodieNe4lOFEjlOxN3bwwe23"
|
||||||
return t.New("go1.4-bootstrap", 0, t.AppendPresets(nil,
|
return t.New("go1.4-bootstrap", 0, []pkg.Artifact{
|
||||||
Bash,
|
t.Load(Bash),
|
||||||
), nil, []string{
|
}, nil, []string{
|
||||||
"CGO_ENABLED=0",
|
"CGO_ENABLED=0",
|
||||||
}, `
|
}, `
|
||||||
mkdir -p /var/tmp/ /work/system/
|
mkdir -p /var/tmp/ /work/system/
|
||||||
@@ -35,9 +35,9 @@ func (t Toolchain) newGo(
|
|||||||
script string,
|
script string,
|
||||||
extra ...pkg.Artifact,
|
extra ...pkg.Artifact,
|
||||||
) pkg.Artifact {
|
) pkg.Artifact {
|
||||||
return t.New("go"+version, 0, t.AppendPresets(extra,
|
return t.New("go"+version, 0, slices.Concat([]pkg.Artifact{
|
||||||
Bash,
|
t.Load(Bash),
|
||||||
), nil, slices.Concat([]string{
|
}, extra), nil, slices.Concat([]string{
|
||||||
"CC=cc",
|
"CC=cc",
|
||||||
"GOCACHE=/tmp/gocache",
|
"GOCACHE=/tmp/gocache",
|
||||||
"GOROOT_BOOTSTRAP=/system/go",
|
"GOROOT_BOOTSTRAP=/system/go",
|
||||||
@@ -127,8 +127,8 @@ sed -i \
|
|||||||
)
|
)
|
||||||
|
|
||||||
go125 := t.newGo(
|
go125 := t.newGo(
|
||||||
"1.25.9",
|
"1.25.7",
|
||||||
"gShJb9uOMk5AxqPSwvn53ZO56S6PyP6nfojzrHUiJ3krAvrgjJpYa6-DPA-jxbpN",
|
"fyylHdBVRUobnBjYj3NKBaYPUw3kGmo2mEELiZonOYurPfbarNU1x77B99Fjut7Q",
|
||||||
[]string{"CGO_ENABLED=0"}, `
|
[]string{"CGO_ENABLED=0"}, `
|
||||||
sed -i \
|
sed -i \
|
||||||
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
||||||
@@ -151,14 +151,9 @@ rm \
|
|||||||
sed -i \
|
sed -i \
|
||||||
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
||||||
cmd/link/internal/`+runtime.GOARCH+`/obj.go
|
cmd/link/internal/`+runtime.GOARCH+`/obj.go
|
||||||
sed -i \
|
|
||||||
's/cpu.X86.HasAVX512VBMI/& \&\& cpu.X86.HasPOPCNT/' \
|
|
||||||
internal/runtime/gc/scan/scan_amd64.go
|
|
||||||
|
|
||||||
rm \
|
rm \
|
||||||
os/root_unix_test.go \
|
os/root_unix_test.go
|
||||||
cmd/cgo/internal/testsanitizers/tsan_test.go \
|
|
||||||
cmd/cgo/internal/testsanitizers/cshared_test.go
|
|
||||||
`, go125,
|
`, go125,
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,8 +7,9 @@ func (t Toolchain) newHakurei(
|
|||||||
withHostname bool,
|
withHostname bool,
|
||||||
) pkg.Artifact {
|
) pkg.Artifact {
|
||||||
hostname := `
|
hostname := `
|
||||||
echo 'Building test helper (hostname).'
|
echo '# Building test helper (hostname).'
|
||||||
go build -o /bin/hostname /usr/src/hostname/main.go
|
go build -v -o /bin/hostname /usr/src/hostname/main.go
|
||||||
|
echo
|
||||||
`
|
`
|
||||||
if !withHostname {
|
if !withHostname {
|
||||||
hostname = ""
|
hostname = ""
|
||||||
|
|||||||
@@ -2,12 +2,12 @@ package rosa
|
|||||||
|
|
||||||
import "hakurei.app/internal/pkg"
|
import "hakurei.app/internal/pkg"
|
||||||
|
|
||||||
const kernelVersion = "6.12.84"
|
const kernelVersion = "6.12.81"
|
||||||
|
|
||||||
var kernelSource = newTar(
|
var kernelSource = newTar(
|
||||||
"https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/"+
|
"https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/"+
|
||||||
"snapshot/linux-"+kernelVersion+".tar.gz",
|
"snapshot/linux-"+kernelVersion+".tar.gz",
|
||||||
"GJLUEu68r3DpLYoTcMl4wA_ThMBs_Zwc0gZsp82ii_3AOfcVxpI639IKfq2jAAY2",
|
"fBkNwf82DQXh74in6gaF2Jot7Vg-Vlcp9BUtCEipL9mvcM1EXLVFdV7FcrO20Eve",
|
||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
#
|
#
|
||||||
# Automatically generated file; DO NOT EDIT.
|
# Automatically generated file; DO NOT EDIT.
|
||||||
# Linux/x86 6.12.84 Kernel Configuration
|
# Linux/x86 6.12.80 Kernel Configuration
|
||||||
#
|
#
|
||||||
CONFIG_CC_VERSION_TEXT="clang version 22.1.4"
|
CONFIG_CC_VERSION_TEXT="clang version 22.1.2"
|
||||||
CONFIG_GCC_VERSION=0
|
CONFIG_GCC_VERSION=0
|
||||||
CONFIG_CC_IS_CLANG=y
|
CONFIG_CC_IS_CLANG=y
|
||||||
CONFIG_CLANG_VERSION=220104
|
CONFIG_CLANG_VERSION=220102
|
||||||
CONFIG_AS_IS_LLVM=y
|
CONFIG_AS_IS_LLVM=y
|
||||||
CONFIG_AS_VERSION=220104
|
CONFIG_AS_VERSION=220102
|
||||||
CONFIG_LD_VERSION=0
|
CONFIG_LD_VERSION=0
|
||||||
CONFIG_LD_IS_LLD=y
|
CONFIG_LD_IS_LLD=y
|
||||||
CONFIG_LLD_VERSION=220104
|
CONFIG_LLD_VERSION=220102
|
||||||
CONFIG_RUSTC_VERSION=0
|
CONFIG_RUSTC_VERSION=0
|
||||||
CONFIG_RUSTC_LLVM_VERSION=0
|
CONFIG_RUSTC_LLVM_VERSION=0
|
||||||
CONFIG_CC_HAS_ASM_GOTO_OUTPUT=y
|
CONFIG_CC_HAS_ASM_GOTO_OUTPUT=y
|
||||||
@@ -3175,8 +3175,14 @@ CONFIG_PATA_ACPI=y
|
|||||||
CONFIG_ATA_GENERIC=y
|
CONFIG_ATA_GENERIC=y
|
||||||
CONFIG_PATA_LEGACY=m
|
CONFIG_PATA_LEGACY=m
|
||||||
CONFIG_MD=y
|
CONFIG_MD=y
|
||||||
# CONFIG_BLK_DEV_MD is not set
|
CONFIG_BLK_DEV_MD=m
|
||||||
CONFIG_MD_BITMAP_FILE=y
|
CONFIG_MD_BITMAP_FILE=y
|
||||||
|
CONFIG_MD_LINEAR=m
|
||||||
|
CONFIG_MD_RAID0=m
|
||||||
|
CONFIG_MD_RAID1=m
|
||||||
|
CONFIG_MD_RAID10=m
|
||||||
|
CONFIG_MD_RAID456=m
|
||||||
|
CONFIG_MD_CLUSTER=m
|
||||||
CONFIG_BCACHE=m
|
CONFIG_BCACHE=m
|
||||||
# CONFIG_BCACHE_DEBUG is not set
|
# CONFIG_BCACHE_DEBUG is not set
|
||||||
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
|
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
|
||||||
@@ -3199,7 +3205,7 @@ CONFIG_DM_ERA=m
|
|||||||
CONFIG_DM_CLONE=m
|
CONFIG_DM_CLONE=m
|
||||||
CONFIG_DM_MIRROR=m
|
CONFIG_DM_MIRROR=m
|
||||||
CONFIG_DM_LOG_USERSPACE=m
|
CONFIG_DM_LOG_USERSPACE=m
|
||||||
# CONFIG_DM_RAID is not set
|
CONFIG_DM_RAID=m
|
||||||
CONFIG_DM_ZERO=m
|
CONFIG_DM_ZERO=m
|
||||||
CONFIG_DM_MULTIPATH=m
|
CONFIG_DM_MULTIPATH=m
|
||||||
CONFIG_DM_MULTIPATH_QL=m
|
CONFIG_DM_MULTIPATH_QL=m
|
||||||
@@ -11630,7 +11636,10 @@ CONFIG_RANDSTRUCT_NONE=y
|
|||||||
|
|
||||||
CONFIG_XOR_BLOCKS=m
|
CONFIG_XOR_BLOCKS=m
|
||||||
CONFIG_ASYNC_CORE=m
|
CONFIG_ASYNC_CORE=m
|
||||||
|
CONFIG_ASYNC_MEMCPY=m
|
||||||
CONFIG_ASYNC_XOR=m
|
CONFIG_ASYNC_XOR=m
|
||||||
|
CONFIG_ASYNC_PQ=m
|
||||||
|
CONFIG_ASYNC_RAID6_RECOV=m
|
||||||
CONFIG_CRYPTO=y
|
CONFIG_CRYPTO=y
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -11916,6 +11925,8 @@ CONFIG_BINARY_PRINTF=y
|
|||||||
#
|
#
|
||||||
# Library routines
|
# Library routines
|
||||||
#
|
#
|
||||||
|
CONFIG_RAID6_PQ=m
|
||||||
|
CONFIG_RAID6_PQ_BENCHMARK=y
|
||||||
CONFIG_LINEAR_RANGES=y
|
CONFIG_LINEAR_RANGES=y
|
||||||
CONFIG_PACKING=y
|
CONFIG_PACKING=y
|
||||||
CONFIG_BITREVERSE=y
|
CONFIG_BITREVERSE=y
|
||||||
@@ -12460,6 +12471,7 @@ CONFIG_RUNTIME_TESTING_MENU=y
|
|||||||
# CONFIG_INTERVAL_TREE_TEST is not set
|
# CONFIG_INTERVAL_TREE_TEST is not set
|
||||||
# CONFIG_PERCPU_TEST is not set
|
# CONFIG_PERCPU_TEST is not set
|
||||||
# CONFIG_ATOMIC64_SELFTEST is not set
|
# CONFIG_ATOMIC64_SELFTEST is not set
|
||||||
|
# CONFIG_ASYNC_RAID6_TEST is not set
|
||||||
# CONFIG_TEST_HEXDUMP is not set
|
# CONFIG_TEST_HEXDUMP is not set
|
||||||
# CONFIG_TEST_KSTRTOX is not set
|
# CONFIG_TEST_KSTRTOX is not set
|
||||||
# CONFIG_TEST_PRINTF is not set
|
# CONFIG_TEST_PRINTF is not set
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
#
|
#
|
||||||
# Automatically generated file; DO NOT EDIT.
|
# Automatically generated file; DO NOT EDIT.
|
||||||
# Linux/arm64 6.12.83 Kernel Configuration
|
# Linux/arm64 6.12.80 Kernel Configuration
|
||||||
#
|
#
|
||||||
CONFIG_CC_VERSION_TEXT="clang version 22.1.4"
|
CONFIG_CC_VERSION_TEXT="clang version 21.1.8"
|
||||||
CONFIG_GCC_VERSION=0
|
CONFIG_GCC_VERSION=0
|
||||||
CONFIG_CC_IS_CLANG=y
|
CONFIG_CC_IS_CLANG=y
|
||||||
CONFIG_CLANG_VERSION=220104
|
CONFIG_CLANG_VERSION=210108
|
||||||
CONFIG_AS_IS_LLVM=y
|
CONFIG_AS_IS_LLVM=y
|
||||||
CONFIG_AS_VERSION=220104
|
CONFIG_AS_VERSION=210108
|
||||||
CONFIG_LD_VERSION=0
|
CONFIG_LD_VERSION=0
|
||||||
CONFIG_LD_IS_LLD=y
|
CONFIG_LD_IS_LLD=y
|
||||||
CONFIG_LLD_VERSION=220104
|
CONFIG_LLD_VERSION=210108
|
||||||
CONFIG_RUSTC_VERSION=0
|
CONFIG_RUSTC_VERSION=0
|
||||||
CONFIG_RUSTC_LLVM_VERSION=0
|
CONFIG_RUSTC_LLVM_VERSION=0
|
||||||
CONFIG_CC_HAS_ASM_GOTO_OUTPUT=y
|
CONFIG_CC_HAS_ASM_GOTO_OUTPUT=y
|
||||||
@@ -3253,8 +3253,14 @@ CONFIG_PATA_ACPI=y
|
|||||||
CONFIG_ATA_GENERIC=y
|
CONFIG_ATA_GENERIC=y
|
||||||
CONFIG_PATA_LEGACY=m
|
CONFIG_PATA_LEGACY=m
|
||||||
CONFIG_MD=y
|
CONFIG_MD=y
|
||||||
# CONFIG_BLK_DEV_MD is not set
|
CONFIG_BLK_DEV_MD=m
|
||||||
CONFIG_MD_BITMAP_FILE=y
|
CONFIG_MD_BITMAP_FILE=y
|
||||||
|
CONFIG_MD_LINEAR=m
|
||||||
|
CONFIG_MD_RAID0=m
|
||||||
|
CONFIG_MD_RAID1=m
|
||||||
|
CONFIG_MD_RAID10=m
|
||||||
|
CONFIG_MD_RAID456=m
|
||||||
|
CONFIG_MD_CLUSTER=m
|
||||||
CONFIG_BCACHE=m
|
CONFIG_BCACHE=m
|
||||||
# CONFIG_BCACHE_DEBUG is not set
|
# CONFIG_BCACHE_DEBUG is not set
|
||||||
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
|
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
|
||||||
@@ -3277,7 +3283,7 @@ CONFIG_DM_ERA=m
|
|||||||
CONFIG_DM_CLONE=m
|
CONFIG_DM_CLONE=m
|
||||||
CONFIG_DM_MIRROR=m
|
CONFIG_DM_MIRROR=m
|
||||||
CONFIG_DM_LOG_USERSPACE=m
|
CONFIG_DM_LOG_USERSPACE=m
|
||||||
# CONFIG_DM_RAID is not set
|
CONFIG_DM_RAID=m
|
||||||
CONFIG_DM_ZERO=m
|
CONFIG_DM_ZERO=m
|
||||||
CONFIG_DM_MULTIPATH=m
|
CONFIG_DM_MULTIPATH=m
|
||||||
CONFIG_DM_MULTIPATH_QL=m
|
CONFIG_DM_MULTIPATH_QL=m
|
||||||
@@ -10294,6 +10300,7 @@ CONFIG_ALTERA_MSGDMA=m
|
|||||||
# CONFIG_AMBA_PL08X is not set
|
# CONFIG_AMBA_PL08X is not set
|
||||||
CONFIG_APPLE_ADMAC=m
|
CONFIG_APPLE_ADMAC=m
|
||||||
CONFIG_AXI_DMAC=m
|
CONFIG_AXI_DMAC=m
|
||||||
|
CONFIG_BCM_SBA_RAID=m
|
||||||
CONFIG_DMA_BCM2835=m
|
CONFIG_DMA_BCM2835=m
|
||||||
CONFIG_DMA_SUN6I=m
|
CONFIG_DMA_SUN6I=m
|
||||||
CONFIG_DW_AXI_DMAC=m
|
CONFIG_DW_AXI_DMAC=m
|
||||||
@@ -13285,7 +13292,12 @@ CONFIG_RANDSTRUCT_NONE=y
|
|||||||
|
|
||||||
CONFIG_XOR_BLOCKS=m
|
CONFIG_XOR_BLOCKS=m
|
||||||
CONFIG_ASYNC_CORE=m
|
CONFIG_ASYNC_CORE=m
|
||||||
|
CONFIG_ASYNC_MEMCPY=m
|
||||||
CONFIG_ASYNC_XOR=m
|
CONFIG_ASYNC_XOR=m
|
||||||
|
CONFIG_ASYNC_PQ=m
|
||||||
|
CONFIG_ASYNC_RAID6_RECOV=m
|
||||||
|
CONFIG_ASYNC_TX_DISABLE_PQ_VAL_DMA=y
|
||||||
|
CONFIG_ASYNC_TX_DISABLE_XOR_VAL_DMA=y
|
||||||
CONFIG_CRYPTO=y
|
CONFIG_CRYPTO=y
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -13628,6 +13640,8 @@ CONFIG_BINARY_PRINTF=y
|
|||||||
#
|
#
|
||||||
# Library routines
|
# Library routines
|
||||||
#
|
#
|
||||||
|
CONFIG_RAID6_PQ=m
|
||||||
|
CONFIG_RAID6_PQ_BENCHMARK=y
|
||||||
CONFIG_LINEAR_RANGES=y
|
CONFIG_LINEAR_RANGES=y
|
||||||
CONFIG_PACKING=y
|
CONFIG_PACKING=y
|
||||||
CONFIG_BITREVERSE=y
|
CONFIG_BITREVERSE=y
|
||||||
@@ -14158,6 +14172,7 @@ CONFIG_RUNTIME_TESTING_MENU=y
|
|||||||
# CONFIG_INTERVAL_TREE_TEST is not set
|
# CONFIG_INTERVAL_TREE_TEST is not set
|
||||||
# CONFIG_PERCPU_TEST is not set
|
# CONFIG_PERCPU_TEST is not set
|
||||||
# CONFIG_ATOMIC64_SELFTEST is not set
|
# CONFIG_ATOMIC64_SELFTEST is not set
|
||||||
|
# CONFIG_ASYNC_RAID6_TEST is not set
|
||||||
# CONFIG_TEST_HEXDUMP is not set
|
# CONFIG_TEST_HEXDUMP is not set
|
||||||
# CONFIG_TEST_KSTRTOX is not set
|
# CONFIG_TEST_KSTRTOX is not set
|
||||||
# CONFIG_TEST_PRINTF is not set
|
# CONFIG_TEST_PRINTF is not set
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#
|
#
|
||||||
# Automatically generated file; DO NOT EDIT.
|
# Automatically generated file; DO NOT EDIT.
|
||||||
# Linux/riscv 6.12.80 Kernel Configuration
|
# Linux/riscv 6.12.77 Kernel Configuration
|
||||||
#
|
#
|
||||||
CONFIG_CC_VERSION_TEXT="clang version 22.1.2"
|
CONFIG_CC_VERSION_TEXT="clang version 22.1.2"
|
||||||
CONFIG_GCC_VERSION=0
|
CONFIG_GCC_VERSION=0
|
||||||
@@ -37,6 +37,11 @@ CONFIG_BUILD_SALT=""
|
|||||||
CONFIG_HAVE_KERNEL_GZIP=y
|
CONFIG_HAVE_KERNEL_GZIP=y
|
||||||
CONFIG_HAVE_KERNEL_ZSTD=y
|
CONFIG_HAVE_KERNEL_ZSTD=y
|
||||||
# CONFIG_KERNEL_GZIP is not set
|
# CONFIG_KERNEL_GZIP is not set
|
||||||
|
# CONFIG_KERNEL_BZIP2 is not set
|
||||||
|
# CONFIG_KERNEL_LZMA is not set
|
||||||
|
# CONFIG_KERNEL_XZ is not set
|
||||||
|
# CONFIG_KERNEL_LZO is not set
|
||||||
|
# CONFIG_KERNEL_LZ4 is not set
|
||||||
CONFIG_KERNEL_ZSTD=y
|
CONFIG_KERNEL_ZSTD=y
|
||||||
CONFIG_DEFAULT_INIT=""
|
CONFIG_DEFAULT_INIT=""
|
||||||
CONFIG_DEFAULT_HOSTNAME="rosa-early"
|
CONFIG_DEFAULT_HOSTNAME="rosa-early"
|
||||||
@@ -2843,8 +2848,14 @@ CONFIG_PATA_ACPI=y
|
|||||||
CONFIG_ATA_GENERIC=y
|
CONFIG_ATA_GENERIC=y
|
||||||
CONFIG_PATA_LEGACY=m
|
CONFIG_PATA_LEGACY=m
|
||||||
CONFIG_MD=y
|
CONFIG_MD=y
|
||||||
# CONFIG_BLK_DEV_MD is not set
|
CONFIG_BLK_DEV_MD=m
|
||||||
CONFIG_MD_BITMAP_FILE=y
|
CONFIG_MD_BITMAP_FILE=y
|
||||||
|
CONFIG_MD_LINEAR=m
|
||||||
|
CONFIG_MD_RAID0=m
|
||||||
|
CONFIG_MD_RAID1=m
|
||||||
|
CONFIG_MD_RAID10=m
|
||||||
|
CONFIG_MD_RAID456=m
|
||||||
|
CONFIG_MD_CLUSTER=m
|
||||||
CONFIG_BCACHE=m
|
CONFIG_BCACHE=m
|
||||||
# CONFIG_BCACHE_DEBUG is not set
|
# CONFIG_BCACHE_DEBUG is not set
|
||||||
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
|
# CONFIG_BCACHE_ASYNC_REGISTRATION is not set
|
||||||
@@ -2867,7 +2878,7 @@ CONFIG_DM_ERA=m
|
|||||||
CONFIG_DM_CLONE=m
|
CONFIG_DM_CLONE=m
|
||||||
CONFIG_DM_MIRROR=m
|
CONFIG_DM_MIRROR=m
|
||||||
CONFIG_DM_LOG_USERSPACE=m
|
CONFIG_DM_LOG_USERSPACE=m
|
||||||
# CONFIG_DM_RAID is not set
|
CONFIG_DM_RAID=m
|
||||||
CONFIG_DM_ZERO=m
|
CONFIG_DM_ZERO=m
|
||||||
CONFIG_DM_MULTIPATH=m
|
CONFIG_DM_MULTIPATH=m
|
||||||
CONFIG_DM_MULTIPATH_QL=m
|
CONFIG_DM_MULTIPATH_QL=m
|
||||||
@@ -10644,7 +10655,10 @@ CONFIG_RANDSTRUCT_NONE=y
|
|||||||
|
|
||||||
CONFIG_XOR_BLOCKS=m
|
CONFIG_XOR_BLOCKS=m
|
||||||
CONFIG_ASYNC_CORE=m
|
CONFIG_ASYNC_CORE=m
|
||||||
|
CONFIG_ASYNC_MEMCPY=m
|
||||||
CONFIG_ASYNC_XOR=m
|
CONFIG_ASYNC_XOR=m
|
||||||
|
CONFIG_ASYNC_PQ=m
|
||||||
|
CONFIG_ASYNC_RAID6_RECOV=m
|
||||||
CONFIG_CRYPTO=y
|
CONFIG_CRYPTO=y
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -10904,6 +10918,8 @@ CONFIG_BINARY_PRINTF=y
|
|||||||
#
|
#
|
||||||
# Library routines
|
# Library routines
|
||||||
#
|
#
|
||||||
|
CONFIG_RAID6_PQ=m
|
||||||
|
CONFIG_RAID6_PQ_BENCHMARK=y
|
||||||
CONFIG_LINEAR_RANGES=y
|
CONFIG_LINEAR_RANGES=y
|
||||||
CONFIG_PACKING=y
|
CONFIG_PACKING=y
|
||||||
CONFIG_BITREVERSE=y
|
CONFIG_BITREVERSE=y
|
||||||
@@ -11392,6 +11408,7 @@ CONFIG_RUNTIME_TESTING_MENU=y
|
|||||||
# CONFIG_INTERVAL_TREE_TEST is not set
|
# CONFIG_INTERVAL_TREE_TEST is not set
|
||||||
# CONFIG_PERCPU_TEST is not set
|
# CONFIG_PERCPU_TEST is not set
|
||||||
# CONFIG_ATOMIC64_SELFTEST is not set
|
# CONFIG_ATOMIC64_SELFTEST is not set
|
||||||
|
# CONFIG_ASYNC_RAID6_TEST is not set
|
||||||
# CONFIG_TEST_HEXDUMP is not set
|
# CONFIG_TEST_HEXDUMP is not set
|
||||||
# CONFIG_TEST_KSTRTOX is not set
|
# CONFIG_TEST_KSTRTOX is not set
|
||||||
# CONFIG_TEST_PRINTF is not set
|
# CONFIG_TEST_PRINTF is not set
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newLibexpat() (pkg.Artifact, string) {
|
func (t Toolchain) newLibexpat() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "2.8.0"
|
version = "2.7.5"
|
||||||
checksum = "pnwZ_JSif-OfoWIwk2JYXWHagOWMA3Sh-Ea0p-4Rz9U9mDEeAebhyvnfD7OYOMCk"
|
checksum = "vTRUjjg-qbHSXUBYKXgzVHkUO7UNyuhrkSYrE7ikApQm0g-OvQ8tspw4w55M-1Tp"
|
||||||
)
|
)
|
||||||
return t.NewPackage("libexpat", version, newFromGitHubRelease(
|
return t.NewPackage("libexpat", version, newFromGitHubRelease(
|
||||||
"libexpat/libexpat",
|
"libexpat/libexpat",
|
||||||
|
|||||||
@@ -5,11 +5,10 @@ import "hakurei.app/internal/pkg"
|
|||||||
func (t Toolchain) newLibxml2() (pkg.Artifact, string) {
|
func (t Toolchain) newLibxml2() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "2.15.3"
|
version = "2.15.3"
|
||||||
checksum = "oJy74htGlEpf70KPvpW18fYJo0RQQkCXZRwqUz6NoXborS3HCq3Nm4gsyaSeNmUH"
|
checksum = "oWkNe53c3d4Lt4OzrXPHBcOLHJ3TWqpa0x7B7bh_DyZ-uIMiplpdZjQRgRWVal2h"
|
||||||
)
|
)
|
||||||
return t.NewPackage("libxml2", version, newFromGitLab(
|
return t.NewPackage("libxml2", version, t.newTagRemote(
|
||||||
"gitlab.gnome.org",
|
"https://gitlab.gnome.org/GNOME/libxml2.git",
|
||||||
"GNOME/libxml2",
|
|
||||||
"v"+version, checksum,
|
"v"+version, checksum,
|
||||||
), &PackageAttr{
|
), &PackageAttr{
|
||||||
// can't create shell.out: Read-only file system
|
// can't create shell.out: Read-only file system
|
||||||
|
|||||||
@@ -5,11 +5,10 @@ import "hakurei.app/internal/pkg"
|
|||||||
func (t Toolchain) newLibxslt() (pkg.Artifact, string) {
|
func (t Toolchain) newLibxslt() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "1.1.45"
|
version = "1.1.45"
|
||||||
checksum = "67ks7v8od2oWaEGf23Sst_Xbn_8brQyolQjqxPoO-lK35k_WJhi2Px5JJgbk-nfn"
|
checksum = "MZc_dyUWpHChkWDKa5iycrECxBsRd4ZMbYfL4VojTbung593mlH2tHGmxYB6NFYT"
|
||||||
)
|
)
|
||||||
return t.NewPackage("libxslt", version, newFromGitLab(
|
return t.NewPackage("libxslt", version, t.newTagRemote(
|
||||||
"gitlab.gnome.org",
|
"https://gitlab.gnome.org/GNOME/libxslt.git",
|
||||||
"GNOME/libxslt",
|
|
||||||
"v"+version, checksum,
|
"v"+version, checksum,
|
||||||
), nil, &MakeHelper{
|
), nil, &MakeHelper{
|
||||||
Generate: "NOCONFIGURE=1 ./autogen.sh",
|
Generate: "NOCONFIGURE=1 ./autogen.sh",
|
||||||
|
|||||||
@@ -1,54 +1,50 @@
|
|||||||
package rosa
|
package rosa
|
||||||
|
|
||||||
import (
|
import "hakurei.app/internal/pkg"
|
||||||
"regexp"
|
|
||||||
"runtime"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"hakurei.app/internal/pkg"
|
func init() {
|
||||||
)
|
artifactsM[llvmSource] = Metadata{
|
||||||
|
f: func(t Toolchain) (pkg.Artifact, string) {
|
||||||
|
return t.NewPatchedSource("llvm", llvmVersion, newFromGitHub(
|
||||||
|
"llvm/llvm-project",
|
||||||
|
"llvmorg-"+llvmVersion,
|
||||||
|
llvmChecksum,
|
||||||
|
), true, llvmPatches...), llvmVersion
|
||||||
|
},
|
||||||
|
|
||||||
// litArgs returns [LIT] arguments for optional verbosity and check skipping.
|
Name: "llvm-project",
|
||||||
func litArgs(verbose bool, skipChecks ...string) string {
|
Description: "LLVM monorepo with Rosa OS patches",
|
||||||
args := []string{"-sv"}
|
|
||||||
if verbose {
|
ID: 1830,
|
||||||
args[0] = "--verbose"
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(skipChecks) > 0 {
|
func (t Toolchain) newCompilerRT() (pkg.Artifact, string) {
|
||||||
skipChecks = slices.Clone(skipChecks)
|
muslHeaders, _ := t.newMusl(true)
|
||||||
for i, s := range skipChecks {
|
return t.NewPackage("compiler-rt", llvmVersion, t.Load(llvmSource), &PackageAttr{
|
||||||
s = regexp.QuoteMeta(s)
|
NonStage0: []pkg.Artifact{
|
||||||
s = strings.ReplaceAll(s, "/", "\\/")
|
muslHeaders,
|
||||||
skipChecks[i] = s
|
},
|
||||||
}
|
Env: stage0ExclConcat(t, []string{},
|
||||||
args = append(args,
|
"LDFLAGS="+earlyLDFLAGS(false),
|
||||||
"--filter-out='\\''"+strings.Join(skipChecks, "|")+"'\\''")
|
),
|
||||||
}
|
|
||||||
|
|
||||||
return "'" + strings.Join(args, " ") + "'"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newEarlyCompilerRT() (pkg.Artifact, string) {
|
|
||||||
version := t.Version(llvmSource)
|
|
||||||
major, _, _ := strings.Cut(version, ".")
|
|
||||||
return t.NewPackage("early-compiler-rt", version, t.Load(llvmSource), &PackageAttr{
|
|
||||||
Flag: TExclusive,
|
Flag: TExclusive,
|
||||||
}, &CMakeHelper{
|
}, &CMakeHelper{
|
||||||
Append: []string{"compiler-rt"},
|
Append: []string{"compiler-rt"},
|
||||||
|
|
||||||
Cache: []KV{
|
Cache: []KV{
|
||||||
// libc++ not yet available
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
{"CMAKE_CXX_COMPILER_TARGET", ""},
|
|
||||||
|
|
||||||
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
|
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
|
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
{"LLVM_ENABLE_PER_TARGET_RUNTIME_DIR", "ON"},
|
|
||||||
|
// libc++ not yet available
|
||||||
|
{"CMAKE_CXX_COMPILER_TARGET", ""},
|
||||||
|
|
||||||
{"COMPILER_RT_BUILD_BUILTINS", "ON"},
|
{"COMPILER_RT_BUILD_BUILTINS", "ON"},
|
||||||
{"COMPILER_RT_DEFAULT_TARGET_ONLY", "OFF"},
|
{"COMPILER_RT_DEFAULT_TARGET_ONLY", "OFF"},
|
||||||
{"COMPILER_RT_SANITIZERS_TO_BUILD", "asan"},
|
{"COMPILER_RT_SANITIZERS_TO_BUILD", "asan"},
|
||||||
|
{"LLVM_ENABLE_PER_TARGET_RUNTIME_DIR", "ON"},
|
||||||
|
|
||||||
// does not work without libunwind
|
// does not work without libunwind
|
||||||
{"COMPILER_RT_BUILD_CTX_PROFILE", "OFF"},
|
{"COMPILER_RT_BUILD_CTX_PROFILE", "OFF"},
|
||||||
@@ -57,12 +53,11 @@ func (t Toolchain) newEarlyCompilerRT() (pkg.Artifact, string) {
|
|||||||
{"COMPILER_RT_BUILD_PROFILE", "OFF"},
|
{"COMPILER_RT_BUILD_PROFILE", "OFF"},
|
||||||
{"COMPILER_RT_BUILD_XRAY", "OFF"},
|
{"COMPILER_RT_BUILD_XRAY", "OFF"},
|
||||||
},
|
},
|
||||||
SkipTest: true,
|
|
||||||
Script: `
|
Script: `
|
||||||
mkdir -p "/work/system/lib/clang/` + major + `/lib/"
|
mkdir -p "/work/system/lib/clang/` + llvmVersionMajor + `/lib/"
|
||||||
ln -s \
|
ln -s \
|
||||||
"../../../${ROSA_TRIPLE}" \
|
"../../../${ROSA_TRIPLE}" \
|
||||||
"/work/system/lib/clang/` + major + `/lib/"
|
"/work/system/lib/clang/` + llvmVersionMajor + `/lib/"
|
||||||
|
|
||||||
ln -s \
|
ln -s \
|
||||||
"clang_rt.crtbegin-` + linuxArch() + `.o" \
|
"clang_rt.crtbegin-` + linuxArch() + `.o" \
|
||||||
@@ -74,16 +69,16 @@ ln -s \
|
|||||||
},
|
},
|
||||||
Python,
|
Python,
|
||||||
|
|
||||||
muslHeaders,
|
|
||||||
KernelHeaders,
|
KernelHeaders,
|
||||||
), version
|
), llvmVersion
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
artifactsM[earlyCompilerRT] = Metadata{
|
artifactsM[CompilerRT] = Metadata{
|
||||||
f: Toolchain.newEarlyCompilerRT,
|
f: Toolchain.newCompilerRT,
|
||||||
|
|
||||||
Name: "early-compiler-rt",
|
Name: "compiler-rt",
|
||||||
Description: "early LLVM runtime: compiler-rt",
|
Description: "LLVM runtime: compiler-rt",
|
||||||
|
Website: "https://llvm.org/",
|
||||||
|
|
||||||
Dependencies: P{
|
Dependencies: P{
|
||||||
Musl,
|
Musl,
|
||||||
@@ -91,16 +86,18 @@ func init() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newEarlyRuntimes() (pkg.Artifact, string) {
|
func (t Toolchain) newLLVMRuntimes() (pkg.Artifact, string) {
|
||||||
version := t.Version(llvmSource)
|
return t.NewPackage("llvm-runtimes", llvmVersion, t.Load(llvmSource), &PackageAttr{
|
||||||
return t.NewPackage("early-runtimes", version, t.Load(llvmSource), &PackageAttr{
|
NonStage0: t.AppendPresets(nil, CompilerRT),
|
||||||
|
Env: stage0ExclConcat(t, []string{},
|
||||||
|
"LDFLAGS="+earlyLDFLAGS(false),
|
||||||
|
),
|
||||||
Flag: TExclusive,
|
Flag: TExclusive,
|
||||||
}, &CMakeHelper{
|
}, &CMakeHelper{
|
||||||
Append: []string{"runtimes"},
|
Append: []string{"runtimes"},
|
||||||
|
|
||||||
Cache: []KV{
|
Cache: []KV{
|
||||||
// libc++ not yet available
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
{"CMAKE_CXX_COMPILER_WORKS", "ON"},
|
|
||||||
|
|
||||||
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
|
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
|
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
@@ -109,179 +106,90 @@ func (t Toolchain) newEarlyRuntimes() (pkg.Artifact, string) {
|
|||||||
{"LIBUNWIND_USE_COMPILER_RT", "ON"},
|
{"LIBUNWIND_USE_COMPILER_RT", "ON"},
|
||||||
{"LIBCXX_HAS_MUSL_LIBC", "ON"},
|
{"LIBCXX_HAS_MUSL_LIBC", "ON"},
|
||||||
{"LIBCXX_USE_COMPILER_RT", "ON"},
|
{"LIBCXX_USE_COMPILER_RT", "ON"},
|
||||||
{"LIBCXX_HAS_ATOMIC_LIB", "OFF"},
|
|
||||||
{"LIBCXXABI_USE_COMPILER_RT", "ON"},
|
{"LIBCXXABI_USE_COMPILER_RT", "ON"},
|
||||||
{"LIBCXXABI_USE_LLVM_UNWINDER", "ON"},
|
{"LIBCXXABI_USE_LLVM_UNWINDER", "ON"},
|
||||||
|
|
||||||
|
// libc++ not yet available
|
||||||
|
{"CMAKE_CXX_COMPILER_WORKS", "ON"},
|
||||||
|
|
||||||
|
{"LIBCXX_HAS_ATOMIC_LIB", "OFF"},
|
||||||
{"LIBCXXABI_HAS_CXA_THREAD_ATEXIT_IMPL", "OFF"},
|
{"LIBCXXABI_HAS_CXA_THREAD_ATEXIT_IMPL", "OFF"},
|
||||||
|
|
||||||
{"LLVM_ENABLE_ZLIB", "FORCE_ON"},
|
{"LLVM_ENABLE_ZLIB", "OFF"},
|
||||||
{"LLVM_ENABLE_ZSTD", "FORCE_ON"},
|
{"LLVM_ENABLE_ZSTD", "OFF"},
|
||||||
{"LLVM_ENABLE_LIBXML2", "OFF"},
|
{"LLVM_ENABLE_LIBXML2", "OFF"},
|
||||||
},
|
},
|
||||||
SkipTest: true,
|
|
||||||
},
|
},
|
||||||
Python,
|
Python,
|
||||||
|
|
||||||
Zlib,
|
|
||||||
Zstd,
|
|
||||||
earlyCompilerRT,
|
|
||||||
KernelHeaders,
|
KernelHeaders,
|
||||||
), version
|
), llvmVersion
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
artifactsM[earlyRuntimes] = Metadata{
|
artifactsM[LLVMRuntimes] = Metadata{
|
||||||
f: Toolchain.newEarlyRuntimes,
|
f: Toolchain.newLLVMRuntimes,
|
||||||
|
|
||||||
Name: "early-runtimes",
|
Name: "llvm-runtimes",
|
||||||
Description: "early LLVM runtimes: libunwind, libcxx, libcxxabi",
|
Description: "LLVM runtimes: libunwind, libcxx, libcxxabi",
|
||||||
|
Website: "https://llvm.org/",
|
||||||
|
|
||||||
Dependencies: P{
|
Dependencies: P{
|
||||||
earlyCompilerRT,
|
CompilerRT,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newLLVM() (pkg.Artifact, string) {
|
func (t Toolchain) newClang() (pkg.Artifact, string) {
|
||||||
var early PArtifact = muslHeaders
|
target := "'AArch64;RISCV;X86'"
|
||||||
if t.isStage0() {
|
if t.isStage0() {
|
||||||
// The LLVM build system is buggy around LLVM_LINK_LLVM_DYLIB and leaks
|
target = "Native"
|
||||||
// the system installation when invoking the newly built toolchain. This
|
|
||||||
// is worked around in stage0 by providing standalone builds of
|
|
||||||
// runtimes. Later stages rely on 3-stage determinism and allows the
|
|
||||||
// system installation from its previous stage to leak through.
|
|
||||||
early = earlyRuntimes
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cache := []KV{
|
return t.NewPackage("clang", llvmVersion, t.Load(llvmSource), &PackageAttr{
|
||||||
{"ENABLE_LINKER_BUILD_ID", "ON"},
|
NonStage0: t.AppendPresets(nil, LLVMRuntimes),
|
||||||
{"COMPILER_RT_USE_BUILTINS_LIBRARY", "ON"},
|
Env: stage0ExclConcat(t, []string{},
|
||||||
{"COMPILER_RT_DEFAULT_TARGET_ONLY", "ON"},
|
"CFLAGS="+earlyCFLAGS,
|
||||||
{"COMPILER_RT_BUILD_GWP_ASAN", "OFF"},
|
"CXXFLAGS="+earlyCXXFLAGS(),
|
||||||
{"LIBCXX_CXX_ABI", "libcxxabi"},
|
"LDFLAGS="+earlyLDFLAGS(false),
|
||||||
{"LIBCXX_USE_COMPILER_RT", "ON"},
|
),
|
||||||
{"LIBCXX_ENABLE_STATIC_ABI_LIBRARY", "OFF"},
|
Flag: TExclusive,
|
||||||
{"LIBCXX_HAS_MUSL_LIBC", "ON"},
|
}, &CMakeHelper{
|
||||||
{"LIBCXX_HARDENING_MODE", "fast"},
|
Append: []string{"llvm"},
|
||||||
{"LIBCXXABI_USE_LLVM_UNWINDER", "ON"},
|
|
||||||
{"LIBCXXABI_ENABLE_STATIC_UNWINDER", "OFF"},
|
Cache: []KV{
|
||||||
{"LIBCXXABI_USE_COMPILER_RT", "ON"},
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
{"LLVM_INSTALL_BINUTILS_SYMLINKS", "ON"},
|
|
||||||
{"LLVM_INSTALL_UTILS", "ON"},
|
|
||||||
{"LLVM_BUILD_LLVM_DYLIB", "ON"},
|
|
||||||
{"LLVM_LINK_LLVM_DYLIB", "ON"},
|
|
||||||
{"LLVM_APPEND_VC_REV", "OFF"},
|
|
||||||
{"LLVM_ENABLE_RTTI", "ON"},
|
|
||||||
{"LLVM_ENABLE_ZLIB", "FORCE_ON"},
|
|
||||||
{"LLVM_ENABLE_ZSTD", "FORCE_ON"},
|
|
||||||
{"LLVM_ENABLE_PER_TARGET_RUNTIME_DIR", "ON"},
|
|
||||||
{"CLANG_DEFAULT_RTLIB", "compiler-rt"},
|
|
||||||
{"CLANG_DEFAULT_UNWINDLIB", "libunwind"},
|
|
||||||
{"CLANG_DEFAULT_CXX_STDLIB", "libc++"},
|
|
||||||
{"CLANG_CONFIG_FILE_SYSTEM_DIR", "/system/etc/clang"},
|
|
||||||
{"LLVM_ENABLE_FFI", "OFF"},
|
|
||||||
{"LLVM_ENABLE_LIBXML2", "OFF"},
|
|
||||||
{"LLVM_ENABLE_LIBCXX", "ON"},
|
|
||||||
{"LLVM_ENABLE_LLD", "ON"},
|
|
||||||
{"LIBUNWIND_ENABLE_ASSERTIONS", "OFF"},
|
|
||||||
{"LIBUNWIND_USE_COMPILER_RT", "ON"},
|
|
||||||
|
|
||||||
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
|
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
|
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
{"LLVM_ENABLE_PROJECTS", "'" + strings.Join([]string{
|
{"LLVM_ENABLE_PROJECTS", "'clang;lld'"},
|
||||||
"clang",
|
|
||||||
"lld",
|
|
||||||
}, ";") + "'"},
|
|
||||||
{"LLVM_ENABLE_RUNTIMES", "'" + strings.Join([]string{
|
|
||||||
"compiler-rt",
|
|
||||||
"libcxx",
|
|
||||||
"libcxxabi",
|
|
||||||
"libunwind",
|
|
||||||
"libclc",
|
|
||||||
}, ";") + "'"},
|
|
||||||
}
|
|
||||||
|
|
||||||
if !t.isStage0() {
|
{"LLVM_ENABLE_LIBCXX", "ON"},
|
||||||
skipChecks := []string{
|
{"LLVM_USE_LINKER", "lld"},
|
||||||
// expensive, pointless to run here
|
|
||||||
"benchmarks",
|
|
||||||
// LLVM ERROR: Tried to execute an unknown external function: roundevenf
|
|
||||||
"ExecutionEngine/Interpreter/intrinsics.ll",
|
|
||||||
// clang: deadlocks with LLVM_BUILD_LLVM_DYLIB
|
|
||||||
"crash-recovery-modules",
|
|
||||||
// clang: fatal error: '__config_site' file not found
|
|
||||||
"CodeGen/PowerPC/ppc-xmmintrin.c",
|
|
||||||
"CodeGen/PowerPC/ppc-mmintrin.c",
|
|
||||||
"CodeGen/PowerPC/ppc-emmintrin.c",
|
|
||||||
"CodeGen/PowerPC/ppc-pmmintrin.c",
|
|
||||||
"CodeGen/PowerPC/ppc-tmmintrin.c",
|
|
||||||
"CodeGen/PowerPC/ppc-smmintrin.c",
|
|
||||||
"CodeGenCUDA/amdgpu-alias-undef-symbols.cu",
|
|
||||||
// cxx: fails on musl
|
|
||||||
"close.dont-get-rid-of-buffer",
|
|
||||||
"re/re.traits",
|
|
||||||
"std/time",
|
|
||||||
"localization/locales",
|
|
||||||
"localization/locale.categories",
|
|
||||||
"selftest/dsl/dsl.sh.py",
|
|
||||||
"input.output/iostream.format",
|
|
||||||
"locale-specific_form",
|
|
||||||
// cxx: deadlocks
|
|
||||||
"std/thread/thread.jthread",
|
|
||||||
// unwind: fails on musl
|
|
||||||
"eh_frame_fde_pc_range",
|
|
||||||
}
|
|
||||||
switch runtime.GOARCH {
|
|
||||||
case "arm64":
|
|
||||||
skipChecks = append(skipChecks,
|
|
||||||
// LLVM: intermittently crashes
|
|
||||||
"ExecutionEngine/OrcLazy/multiple-compile-threads-basic.ll",
|
|
||||||
// unwind: unexpectedly passes
|
|
||||||
"unwind_leaffunction",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if presetOpts&OptLLVMNoLTO == 0 {
|
{"LLVM_INSTALL_BINUTILS_SYMLINKS", "ON"},
|
||||||
cache = append(cache, []KV{
|
{"LLVM_INSTALL_CCTOOLS_SYMLINKS", "ON"},
|
||||||
// very expensive
|
|
||||||
{"LLVM_ENABLE_LTO", "Thin"},
|
|
||||||
}...)
|
|
||||||
}
|
|
||||||
|
|
||||||
cache = append(cache, []KV{
|
{"LLVM_LIT_ARGS", "'--verbose'"},
|
||||||
// symbols: clock_gettime, mallopt
|
|
||||||
{"COMPILER_RT_INCLUDE_TESTS", "OFF"},
|
|
||||||
|
|
||||||
{"LLVM_BUILD_TESTS", "ON"},
|
{"CLANG_DEFAULT_LINKER", "lld"},
|
||||||
{"LLVM_LIT_ARGS", litArgs(true, skipChecks...)},
|
{"CLANG_DEFAULT_CXX_STDLIB", "libc++"},
|
||||||
}...)
|
{"CLANG_DEFAULT_RTLIB", "compiler-rt"},
|
||||||
}
|
{"CLANG_DEFAULT_UNWINDLIB", "libunwind"},
|
||||||
|
|
||||||
version := t.Version(llvmSource)
|
{"LLVM_TARGETS_TO_BUILD", target},
|
||||||
return t.NewPackage("llvm", version, t.Load(llvmSource), nil, &CMakeHelper{
|
{"CMAKE_CROSSCOMPILING", "OFF"},
|
||||||
Append: []string{"llvm"},
|
{"CXX_SUPPORTS_CUSTOM_LINKER", "ON"},
|
||||||
|
|
||||||
Cache: cache,
|
{"LLVM_ENABLE_ZLIB", "OFF"},
|
||||||
|
{"LLVM_ENABLE_ZSTD", "OFF"},
|
||||||
|
{"LLVM_ENABLE_LIBXML2", "OFF"},
|
||||||
|
},
|
||||||
Script: `
|
Script: `
|
||||||
ln -s ld.lld /work/system/bin/ld
|
ln -s ld.lld /work/system/bin/ld
|
||||||
|
|
||||||
ln -s clang /work/system/bin/cc
|
ln -s clang /work/system/bin/cc
|
||||||
ln -s clang /work/system/bin/cpp
|
|
||||||
ln -s clang++ /work/system/bin/c++
|
ln -s clang++ /work/system/bin/c++
|
||||||
`,
|
|
||||||
|
|
||||||
// LLVM_LINK_LLVM_DYLIB causes llvm test suite to leak system
|
|
||||||
// installation into test environment, and the tests end up testing the
|
|
||||||
// system installation instead. Tests are disabled on stage0 and relies
|
|
||||||
// on 3-stage determinism to test later stages.
|
|
||||||
SkipTest: t.isStage0(),
|
|
||||||
|
|
||||||
Test: `
|
|
||||||
chmod +w /bin && ln -s \
|
|
||||||
../system/bin/chmod \
|
|
||||||
../system/bin/mkdir \
|
|
||||||
../system/bin/rm \
|
|
||||||
../system/bin/tr \
|
|
||||||
../system/bin/awk \
|
|
||||||
/bin
|
|
||||||
ninja ` + jobsFlagE + ` check-all
|
ninja ` + jobsFlagE + ` check-all
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
@@ -293,44 +201,44 @@ ninja ` + jobsFlagE + ` check-all
|
|||||||
Coreutils,
|
Coreutils,
|
||||||
Findutils,
|
Findutils,
|
||||||
|
|
||||||
Zlib,
|
|
||||||
Zstd,
|
|
||||||
early,
|
|
||||||
KernelHeaders,
|
KernelHeaders,
|
||||||
), version
|
), llvmVersion
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
const (
|
artifactsM[Clang] = Metadata{
|
||||||
version = "22.1.4"
|
f: Toolchain.newClang,
|
||||||
checksum = "Bk3t-tV5sD5T0bqefFMcLeFuAwXnhFipywZmqst5hAZs97QQWGKB_5XyAFjj5tDB"
|
|
||||||
)
|
|
||||||
|
|
||||||
artifactsM[llvmSource] = Metadata{
|
Name: "clang",
|
||||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
Description: `an "LLVM native" C/C++/Objective-C compiler`,
|
||||||
return t.NewPatchedSource("llvm", version, newFromGitHub(
|
Website: "https://llvm.org/",
|
||||||
"llvm/llvm-project",
|
|
||||||
"llvmorg-"+version,
|
|
||||||
checksum,
|
|
||||||
), true, llvmPatches...), version
|
|
||||||
},
|
|
||||||
|
|
||||||
Name: "llvm-project",
|
|
||||||
Description: "LLVM monorepo with Rosa OS patches",
|
|
||||||
|
|
||||||
ID: 1830,
|
|
||||||
}
|
|
||||||
|
|
||||||
artifactsM[LLVM] = Metadata{
|
|
||||||
f: Toolchain.newLLVM,
|
|
||||||
|
|
||||||
Name: "llvm",
|
|
||||||
Description: "a collection of modular and reusable compiler and toolchain technologies",
|
|
||||||
Website: "https://llvm.org",
|
|
||||||
|
|
||||||
Dependencies: P{
|
Dependencies: P{
|
||||||
Zlib,
|
LLVMRuntimes,
|
||||||
Zstd,
|
|
||||||
Musl,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t Toolchain) newLibclc() (pkg.Artifact, string) {
|
||||||
|
return t.NewPackage("libclc", llvmVersion, t.Load(llvmSource), nil, &CMakeHelper{
|
||||||
|
Append: []string{"libclc"},
|
||||||
|
|
||||||
|
Cache: []KV{
|
||||||
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
|
|
||||||
|
{"LLVM_HOST_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
|
{"LLVM_DEFAULT_TARGET_TRIPLE", `"${ROSA_TRIPLE}"`},
|
||||||
|
|
||||||
|
{"LIBCLC_TARGETS_TO_BUILD", "all"},
|
||||||
|
},
|
||||||
|
Script: "ninja " + jobsFlagE + " test",
|
||||||
|
}), llvmVersion
|
||||||
|
}
|
||||||
|
func init() {
|
||||||
|
artifactsM[Libclc] = Metadata{
|
||||||
|
f: Toolchain.newLibclc,
|
||||||
|
|
||||||
|
Name: "libclc",
|
||||||
|
Description: "an open source, BSD/MIT dual licensed implementation of the library requirements of the OpenCL C programming language",
|
||||||
|
Website: "https://libclc.llvm.org/",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
9
internal/rosa/llvm_latest.go
Normal file
9
internal/rosa/llvm_latest.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package rosa
|
||||||
|
|
||||||
|
// latest version of LLVM, conditional to temporarily avoid broken new releases
|
||||||
|
const (
|
||||||
|
llvmVersionMajor = "22"
|
||||||
|
llvmVersion = llvmVersionMajor + ".1.3"
|
||||||
|
|
||||||
|
llvmChecksum = "CUwnpzua_y28HZ9oI0NmcKL2wClsSjFpgY9do5-7cCZJHI5KNF64vfwGvY0TYyR3"
|
||||||
|
)
|
||||||
@@ -91,10 +91,10 @@ index 8ac8d4eb9181..e46b04a898ca 100644
|
|||||||
`},
|
`},
|
||||||
|
|
||||||
{"path-system-libraries", `diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
|
{"path-system-libraries", `diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
|
||||||
index d525b417b4ea..fdc411f2239c 100644
|
index 8ac8d4eb9181..f4d1347ab64d 100644
|
||||||
--- a/clang/lib/Driver/ToolChains/Linux.cpp
|
--- a/clang/lib/Driver/ToolChains/Linux.cpp
|
||||||
+++ b/clang/lib/Driver/ToolChains/Linux.cpp
|
+++ b/clang/lib/Driver/ToolChains/Linux.cpp
|
||||||
@@ -302,6 +302,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
@@ -282,6 +282,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
||||||
const bool IsHexagon = Arch == llvm::Triple::hexagon;
|
const bool IsHexagon = Arch == llvm::Triple::hexagon;
|
||||||
const bool IsRISCV = Triple.isRISCV();
|
const bool IsRISCV = Triple.isRISCV();
|
||||||
const bool IsCSKY = Triple.isCSKY();
|
const bool IsCSKY = Triple.isCSKY();
|
||||||
@@ -102,7 +102,7 @@ index d525b417b4ea..fdc411f2239c 100644
|
|||||||
|
|
||||||
if (IsCSKY && !SelectedMultilibs.empty())
|
if (IsCSKY && !SelectedMultilibs.empty())
|
||||||
SysRoot = SysRoot + SelectedMultilibs.back().osSuffix();
|
SysRoot = SysRoot + SelectedMultilibs.back().osSuffix();
|
||||||
@@ -337,12 +338,23 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
@@ -318,12 +319,23 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
||||||
const std::string OSLibDir = std::string(getOSLibDir(Triple, Args));
|
const std::string OSLibDir = std::string(getOSLibDir(Triple, Args));
|
||||||
const std::string MultiarchTriple = getMultiarchTriple(D, Triple, SysRoot);
|
const std::string MultiarchTriple = getMultiarchTriple(D, Triple, SysRoot);
|
||||||
|
|
||||||
@@ -110,7 +110,7 @@ index d525b417b4ea..fdc411f2239c 100644
|
|||||||
+ ExtraOpts.push_back("-rpath");
|
+ ExtraOpts.push_back("-rpath");
|
||||||
+ ExtraOpts.push_back("/system/lib");
|
+ ExtraOpts.push_back("/system/lib");
|
||||||
+ ExtraOpts.push_back("-rpath");
|
+ ExtraOpts.push_back("-rpath");
|
||||||
+ ExtraOpts.push_back(concat("/system/lib", Triple.str()));
|
+ ExtraOpts.push_back(concat("/system/lib", MultiarchTriple));
|
||||||
+ }
|
+ }
|
||||||
+
|
+
|
||||||
// mips32: Debian multilib, we use /libo32, while in other case, /lib is
|
// mips32: Debian multilib, we use /libo32, while in other case, /lib is
|
||||||
@@ -128,7 +128,7 @@ index d525b417b4ea..fdc411f2239c 100644
|
|||||||
}
|
}
|
||||||
Generic_GCC::AddMultilibPaths(D, SysRoot, OSLibDir, MultiarchTriple, Paths);
|
Generic_GCC::AddMultilibPaths(D, SysRoot, OSLibDir, MultiarchTriple, Paths);
|
||||||
|
|
||||||
@@ -360,18 +372,30 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
@@ -341,18 +353,30 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
||||||
Paths);
|
Paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -164,7 +164,7 @@ index d525b417b4ea..fdc411f2239c 100644
|
|||||||
}
|
}
|
||||||
|
|
||||||
ToolChain::RuntimeLibType Linux::GetDefaultRuntimeLibType() const {
|
ToolChain::RuntimeLibType Linux::GetDefaultRuntimeLibType() const {
|
||||||
@@ -572,6 +596,9 @@ std::string Linux::getDynamicLinker(const ArgList &Args) const {
|
@@ -457,6 +481,9 @@ std::string Linux::getDynamicLinker(const ArgList &Args) const {
|
||||||
return Triple.isArch64Bit() ? "/system/bin/linker64" : "/system/bin/linker";
|
return Triple.isArch64Bit() ? "/system/bin/linker64" : "/system/bin/linker";
|
||||||
}
|
}
|
||||||
if (Triple.isMusl()) {
|
if (Triple.isMusl()) {
|
||||||
@@ -175,10 +175,10 @@ index d525b417b4ea..fdc411f2239c 100644
|
|||||||
bool IsArm = false;
|
bool IsArm = false;
|
||||||
|
|
||||||
diff --git a/clang/tools/clang-installapi/Options.cpp b/clang/tools/clang-installapi/Options.cpp
|
diff --git a/clang/tools/clang-installapi/Options.cpp b/clang/tools/clang-installapi/Options.cpp
|
||||||
index f484d6f33ad8..dca55e72d67e 100644
|
index 64324a3f8b01..15ce70b68217 100644
|
||||||
--- a/clang/tools/clang-installapi/Options.cpp
|
--- a/clang/tools/clang-installapi/Options.cpp
|
||||||
+++ b/clang/tools/clang-installapi/Options.cpp
|
+++ b/clang/tools/clang-installapi/Options.cpp
|
||||||
@@ -514,7 +514,7 @@ bool Options::processFrontendOptions(InputArgList &Args) {
|
@@ -515,7 +515,7 @@ bool Options::processFrontendOptions(InputArgList &Args) {
|
||||||
FEOpts.FwkPaths = std::move(FrameworkPaths);
|
FEOpts.FwkPaths = std::move(FrameworkPaths);
|
||||||
|
|
||||||
// Add default framework/library paths.
|
// Add default framework/library paths.
|
||||||
@@ -19,7 +19,7 @@ cd "$(mktemp -d)"
|
|||||||
--build="${ROSA_TRIPLE}" \
|
--build="${ROSA_TRIPLE}" \
|
||||||
--disable-dependency-tracking
|
--disable-dependency-tracking
|
||||||
./build.sh
|
./build.sh
|
||||||
./make DESTDIR=/work install
|
./make DESTDIR=/work install check
|
||||||
`, pkg.Path(AbsUsrSrc.Append("make"), false, newTar(
|
`, pkg.Path(AbsUsrSrc.Append("make"), false, newTar(
|
||||||
"https://ftpmirror.gnu.org/gnu/make/make-"+version+".tar.gz",
|
"https://ftpmirror.gnu.org/gnu/make/make-"+version+".tar.gz",
|
||||||
checksum,
|
checksum,
|
||||||
@@ -194,7 +194,7 @@ make \
|
|||||||
}
|
}
|
||||||
scriptMake += "\n"
|
scriptMake += "\n"
|
||||||
|
|
||||||
if !attr.SkipCheck && presetOpts&OptSkipCheck == 0 {
|
if !attr.SkipCheck {
|
||||||
scriptMake += attr.ScriptCheckEarly + `make \
|
scriptMake += attr.ScriptCheckEarly + `make \
|
||||||
` + jobsFlagE + ` \
|
` + jobsFlagE + ` \
|
||||||
`
|
`
|
||||||
|
|||||||
@@ -30,8 +30,8 @@ func init() {
|
|||||||
|
|
||||||
func (t Toolchain) newLibdrm() (pkg.Artifact, string) {
|
func (t Toolchain) newLibdrm() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "2.4.133"
|
version = "2.4.131"
|
||||||
checksum = "bfj296NcR9DndO11hqDbSRFPqaweSLMqRk3dlCPZpM6FONX1WZ9J4JdbTDMUd1rU"
|
checksum = "riHPSpvTnvCPbR-iT4jt7_X-z4rpwm6oNh9ZN2zP6RBFkFVxBRKmedG4eEXSADIh"
|
||||||
)
|
)
|
||||||
return t.NewPackage("libdrm", version, newFromGitLab(
|
return t.NewPackage("libdrm", version, newFromGitLab(
|
||||||
"gitlab.freedesktop.org",
|
"gitlab.freedesktop.org",
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newMeson() (pkg.Artifact, string) {
|
func (t Toolchain) newMeson() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "1.11.1"
|
version = "1.11.0"
|
||||||
checksum = "uvILRxdopwc6Dy17UbIeClcQr0qHqyTaqyk1M9OqWKN9PwB9N6UVAiyN8kSSz3r2"
|
checksum = "QJolMPzypTiS65GReSNPPlkUjHI6b1EDpZ-avIk3n6b6TQ93KfUM57DVUpY97Hf7"
|
||||||
)
|
)
|
||||||
return t.NewPackage("meson", version, newFromGitHub(
|
return t.NewPackage("meson", version, newFromGitHub(
|
||||||
"mesonbuild/meson",
|
"mesonbuild/meson",
|
||||||
@@ -22,7 +22,7 @@ func (t Toolchain) newMeson() (pkg.Artifact, string) {
|
|||||||
},
|
},
|
||||||
}, &PipHelper{
|
}, &PipHelper{
|
||||||
EnterSource: true,
|
EnterSource: true,
|
||||||
Check: `
|
Script: `
|
||||||
cd 'test cases'
|
cd 'test cases'
|
||||||
rm -rf \
|
rm -rf \
|
||||||
'common/32 has header' \
|
'common/32 has header' \
|
||||||
@@ -44,7 +44,7 @@ python3 ./run_project_tests.py \
|
|||||||
--backend=ninja
|
--backend=ninja
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
PythonSetuptools,
|
Setuptools,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
CMake,
|
CMake,
|
||||||
Ninja,
|
Ninja,
|
||||||
@@ -117,7 +117,7 @@ func (attr *MesonHelper) script(name string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var scriptTest string
|
var scriptTest string
|
||||||
if !attr.SkipTest && presetOpts&OptSkipCheck == 0 {
|
if !attr.SkipTest {
|
||||||
scriptTest = `
|
scriptTest = `
|
||||||
meson test \
|
meson test \
|
||||||
--print-errorlogs`
|
--print-errorlogs`
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ func (t Toolchain) newMksh() (pkg.Artifact, string) {
|
|||||||
version = "59c"
|
version = "59c"
|
||||||
checksum = "0Zj-k4nXEu3IuJY4lvwD2OrC2t27GdZj8SPy4DoaeuBRH1padWb7oREpYgwY8JNq"
|
checksum = "0Zj-k4nXEu3IuJY4lvwD2OrC2t27GdZj8SPy4DoaeuBRH1padWb7oREpYgwY8JNq"
|
||||||
)
|
)
|
||||||
return t.New("mksh-"+version, 0, t.AppendPresets(nil,
|
return t.New("mksh-"+version, 0, stage0Concat(t, []pkg.Artifact{},
|
||||||
Perl,
|
t.Load(Perl),
|
||||||
Coreutils,
|
t.Load(Coreutils),
|
||||||
), nil, []string{
|
), nil, []string{
|
||||||
"LDSTATIC=-static",
|
"LDSTATIC=-static",
|
||||||
"CPPFLAGS=-DMKSH_DEFAULT_PROFILEDIR=\\\"/system/etc\\\"",
|
"CPPFLAGS=-DMKSH_DEFAULT_PROFILEDIR=\\\"/system/etc\\\"",
|
||||||
|
|||||||
@@ -2,7 +2,10 @@ package rosa
|
|||||||
|
|
||||||
import "hakurei.app/internal/pkg"
|
import "hakurei.app/internal/pkg"
|
||||||
|
|
||||||
func (t Toolchain) newMusl(headers bool) (pkg.Artifact, string) {
|
func (t Toolchain) newMusl(
|
||||||
|
headers bool,
|
||||||
|
extra ...pkg.Artifact,
|
||||||
|
) (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "1.2.6"
|
version = "1.2.6"
|
||||||
checksum = "WtWb_OV_XxLDAB5NerOL9loLlHVadV00MmGk65PPBU1evaolagoMHfvpZp_vxEzS"
|
checksum = "WtWb_OV_XxLDAB5NerOL9loLlHVadV00MmGk65PPBU1evaolagoMHfvpZp_vxEzS"
|
||||||
@@ -33,26 +36,25 @@ rmdir -v /work/lib
|
|||||||
helper.Script = ""
|
helper.Script = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
env := []string{
|
|
||||||
"LDFLAGS=" + earlyLDFLAGS(false),
|
|
||||||
}
|
|
||||||
if t.isStage0() {
|
|
||||||
env = append(env,
|
|
||||||
"CC=clang",
|
|
||||||
"AR=ar",
|
|
||||||
"RANLIB=ranlib",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return t.NewPackage(name, version, newTar(
|
return t.NewPackage(name, version, newTar(
|
||||||
"https://musl.libc.org/releases/musl-"+version+".tar.gz",
|
"https://musl.libc.org/releases/musl-"+version+".tar.gz",
|
||||||
checksum,
|
checksum,
|
||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
), &PackageAttr{
|
), &PackageAttr{
|
||||||
|
NonStage0: extra,
|
||||||
|
|
||||||
// expected to be writable in copies
|
// expected to be writable in copies
|
||||||
Chmod: true,
|
Chmod: true,
|
||||||
|
|
||||||
Env: env,
|
Env: stage0ExclConcat(t, []string{
|
||||||
|
"CC=clang",
|
||||||
|
"LIBCC=/system/lib/clang/" + llvmVersionMajor + "/lib/" +
|
||||||
|
triplet() + "/libclang_rt.builtins.a",
|
||||||
|
"AR=ar",
|
||||||
|
"RANLIB=ranlib",
|
||||||
|
},
|
||||||
|
"LDFLAGS="+earlyLDFLAGS(false),
|
||||||
|
),
|
||||||
}, &helper,
|
}, &helper,
|
||||||
Coreutils,
|
Coreutils,
|
||||||
), version
|
), version
|
||||||
@@ -60,7 +62,7 @@ rmdir -v /work/lib
|
|||||||
func init() {
|
func init() {
|
||||||
artifactsM[Musl] = Metadata{
|
artifactsM[Musl] = Metadata{
|
||||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
f: func(t Toolchain) (pkg.Artifact, string) {
|
||||||
return t.newMusl(false)
|
return t.newMusl(false, t.Load(CompilerRT))
|
||||||
},
|
},
|
||||||
|
|
||||||
Name: "musl",
|
Name: "musl",
|
||||||
@@ -69,13 +71,4 @@ func init() {
|
|||||||
|
|
||||||
ID: 11688,
|
ID: 11688,
|
||||||
}
|
}
|
||||||
|
|
||||||
artifactsM[muslHeaders] = Metadata{
|
|
||||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
|
||||||
return t.newMusl(true)
|
|
||||||
},
|
|
||||||
|
|
||||||
Name: "musl-headers",
|
|
||||||
Description: "system installation of musl headers",
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newNSS() (pkg.Artifact, string) {
|
func (t Toolchain) newNSS() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "3.123.1"
|
version = "3.123"
|
||||||
checksum = "g811Z_fc74ssg-s6BeXRG-ipSfJggD6hrxjVJxrOBIz98CE7piv0OLwzIRLMQpwR"
|
checksum = "pwBz0FO8jmhejPblfzNQLGsqBBGT0DwAw-z9yBJH3V3hVJBMKSc1l0R8GC0_BnzF"
|
||||||
|
|
||||||
version0 = "4_38_2"
|
version0 = "4_38_2"
|
||||||
checksum0 = "25x2uJeQnOHIiq_zj17b4sYqKgeoU8-IsySUptoPcdHZ52PohFZfGuIisBreWzx0"
|
checksum0 = "25x2uJeQnOHIiq_zj17b4sYqKgeoU8-IsySUptoPcdHZ52PohFZfGuIisBreWzx0"
|
||||||
@@ -82,24 +82,30 @@ func init() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func (t Toolchain) newBuildCATrust() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "0.5.1"
|
version = "0.5.1"
|
||||||
checksum = "oxjnuIrPVMPvD6x8VFLqB7EdbfuhouGQdtPuHDpEHGzoyH5nkxqtYN9UthMY9noA"
|
checksum = "g9AqIksz-hvCUceSR7ZKwfqf8Y_UsJU_3_zLUIdc4IkxFVkgdv9kKVvhFjE4s1-7"
|
||||||
)
|
|
||||||
artifactsM[buildcatrust] = newPythonPackage(
|
|
||||||
"buildcatrust", 233988,
|
|
||||||
"transform certificate stores between formats",
|
|
||||||
"https://github.com/nix-community/buildcatrust",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"nix-community/buildcatrust",
|
|
||||||
"v"+version, checksum,
|
|
||||||
), &PackageAttr{
|
|
||||||
ScriptEarly: `
|
|
||||||
rm buildcatrust/tests/test_nonhermetic.py
|
|
||||||
`,
|
|
||||||
}, nil, P{PythonFlitCore},
|
|
||||||
)
|
)
|
||||||
|
return t.newViaPip("buildcatrust", version,
|
||||||
|
"https://github.com/nix-community/buildcatrust/releases/"+
|
||||||
|
"download/v"+version+"/buildcatrust-"+version+"-py3-none-any.whl",
|
||||||
|
checksum), version
|
||||||
|
}
|
||||||
|
func init() {
|
||||||
|
artifactsM[buildcatrust] = Metadata{
|
||||||
|
f: Toolchain.newBuildCATrust,
|
||||||
|
|
||||||
|
Name: "buildcatrust",
|
||||||
|
Description: "transform certificate stores between formats",
|
||||||
|
Website: "https://github.com/nix-community/buildcatrust",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Python,
|
||||||
|
},
|
||||||
|
|
||||||
|
ID: 233988,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newNSSCACert() (pkg.Artifact, string) {
|
func (t Toolchain) newNSSCACert() (pkg.Artifact, string) {
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ func (t Toolchain) newPerl() (pkg.Artifact, string) {
|
|||||||
|
|
||||||
ScriptEarly: `
|
ScriptEarly: `
|
||||||
echo 'print STDOUT "1..0 # Skip broken test\n";' > ext/Pod-Html/t/htmldir3.t
|
echo 'print STDOUT "1..0 # Skip broken test\n";' > ext/Pod-Html/t/htmldir3.t
|
||||||
chmod +w /system/bin && rm -f /system/bin/ps # perl does not like toybox ps
|
rm -f /system/bin/ps # perl does not like toybox ps
|
||||||
`,
|
`,
|
||||||
|
|
||||||
Flag: TEarly,
|
Flag: TEarly,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package rosa
|
package rosa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"path"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -22,32 +23,6 @@ func (t Toolchain) newPython() (pkg.Artifact, string) {
|
|||||||
Writable: true,
|
Writable: true,
|
||||||
Chmod: true,
|
Chmod: true,
|
||||||
|
|
||||||
Patches: []KV{
|
|
||||||
{"zipfile-no-default-strict_timestamps", `diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py
|
|
||||||
index 19aea290b58..51603ba9510 100644
|
|
||||||
--- a/Lib/zipfile/__init__.py
|
|
||||||
+++ b/Lib/zipfile/__init__.py
|
|
||||||
@@ -617,7 +617,7 @@ def _decodeExtra(self, filename_crc):
|
|
||||||
extra = extra[ln+4:]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
- def from_file(cls, filename, arcname=None, *, strict_timestamps=True):
|
|
||||||
+ def from_file(cls, filename, arcname=None, *, strict_timestamps=False):
|
|
||||||
"""Construct an appropriate ZipInfo for a file on the filesystem.
|
|
||||||
|
|
||||||
filename should be the path to a file or directory on the filesystem.
|
|
||||||
@@ -1412,7 +1412,7 @@ class ZipFile:
|
|
||||||
_windows_illegal_name_trans_table = None
|
|
||||||
|
|
||||||
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True,
|
|
||||||
- compresslevel=None, *, strict_timestamps=True, metadata_encoding=None):
|
|
||||||
+ compresslevel=None, *, strict_timestamps=False, metadata_encoding=None):
|
|
||||||
"""Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',
|
|
||||||
or append 'a'."""
|
|
||||||
if mode not in ('r', 'w', 'x', 'a'):
|
|
||||||
`},
|
|
||||||
},
|
|
||||||
|
|
||||||
Env: []string{
|
Env: []string{
|
||||||
"EXTRATESTOPTS=-j0 -x " + strings.Join([]string{
|
"EXTRATESTOPTS=-j0 -x " + strings.Join([]string{
|
||||||
// requires internet access (http://www.pythontest.net/)
|
// requires internet access (http://www.pythontest.net/)
|
||||||
@@ -61,7 +36,7 @@ index 19aea290b58..51603ba9510 100644
|
|||||||
"test_os",
|
"test_os",
|
||||||
"test_subprocess",
|
"test_subprocess",
|
||||||
|
|
||||||
// patched out insane strict_timestamps default
|
// somehow picks up mtime of source code
|
||||||
"test_zipfile",
|
"test_zipfile",
|
||||||
|
|
||||||
// requires gcc
|
// requires gcc
|
||||||
@@ -108,31 +83,18 @@ func init() {
|
|||||||
|
|
||||||
// PipHelper is the [Python] pip packaging helper.
|
// PipHelper is the [Python] pip packaging helper.
|
||||||
type PipHelper struct {
|
type PipHelper struct {
|
||||||
// Path elements joined with source.
|
|
||||||
Append []string
|
|
||||||
// Whether to omit --no-build-isolation.
|
// Whether to omit --no-build-isolation.
|
||||||
BuildIsolation bool
|
BuildIsolation bool
|
||||||
// Whether to enter source after install.
|
// Whether to enter source after install.
|
||||||
EnterSource bool
|
EnterSource bool
|
||||||
// Whether to install to build environment after install.
|
|
||||||
Install bool
|
|
||||||
// Whether to skip running tests.
|
|
||||||
SkipCheck bool
|
|
||||||
// Replaces pytest if non-empty.
|
|
||||||
Check string
|
|
||||||
// Runs after install.
|
// Runs after install.
|
||||||
Script string
|
Script string
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ Helper = new(PipHelper)
|
var _ Helper = new(PipHelper)
|
||||||
|
|
||||||
// extra returns python, or pytest if defaults are assumed.
|
// extra returns python.
|
||||||
func (attr *PipHelper) extra(int) P {
|
func (*PipHelper) extra(int) P { return P{Python} }
|
||||||
if attr == nil || (!attr.SkipCheck && attr.Check == "") {
|
|
||||||
return P{PythonPyTest}
|
|
||||||
}
|
|
||||||
return P{Python}
|
|
||||||
}
|
|
||||||
|
|
||||||
// wantsChmod returns true.
|
// wantsChmod returns true.
|
||||||
func (*PipHelper) wantsChmod() bool { return true }
|
func (*PipHelper) wantsChmod() bool { return true }
|
||||||
@@ -154,7 +116,6 @@ func (attr *PipHelper) script(name string) string {
|
|||||||
if attr == nil {
|
if attr == nil {
|
||||||
attr = new(PipHelper)
|
attr = new(PipHelper)
|
||||||
}
|
}
|
||||||
sourcePath := AbsUsrSrc.Append(name).Append(attr.Append...)
|
|
||||||
|
|
||||||
var extra string
|
var extra string
|
||||||
if !attr.BuildIsolation {
|
if !attr.BuildIsolation {
|
||||||
@@ -162,56 +123,61 @@ func (attr *PipHelper) script(name string) string {
|
|||||||
--no-build-isolation \`
|
--no-build-isolation \`
|
||||||
}
|
}
|
||||||
|
|
||||||
var script string
|
script := attr.Script
|
||||||
if attr.Install {
|
|
||||||
script += `pip3 install \
|
|
||||||
--no-index \
|
|
||||||
--prefix=/system \
|
|
||||||
--no-build-isolation \
|
|
||||||
'` + sourcePath.String() + `'
|
|
||||||
`
|
|
||||||
}
|
|
||||||
if attr.EnterSource {
|
if attr.EnterSource {
|
||||||
script += "cd '/usr/src/" + name + "'\n"
|
script = "cd '/usr/src/" + name + "'\n" + script
|
||||||
}
|
}
|
||||||
if !attr.SkipCheck {
|
|
||||||
if attr.Check == "" {
|
|
||||||
// some test suites fall apart when ran out-of-tree
|
|
||||||
script += "(cd '" + sourcePath.String() + "' && pytest)\n"
|
|
||||||
} else {
|
|
||||||
script += attr.Check
|
|
||||||
}
|
|
||||||
}
|
|
||||||
script += attr.Script
|
|
||||||
|
|
||||||
return `
|
return `
|
||||||
pip3 install \
|
pip3 install \
|
||||||
--no-index \
|
--no-index \
|
||||||
--prefix=/system \
|
--prefix=/system \
|
||||||
--root=/work \` + extra + `
|
--root=/work \` + extra + `
|
||||||
'` + sourcePath.String() + `'
|
'/usr/src/` + name + `'
|
||||||
` + script
|
` + script
|
||||||
}
|
}
|
||||||
|
|
||||||
// newPythonPackage creates [Metadata] for a [Python] package.
|
// newViaPip installs a pip wheel from a url.
|
||||||
func newPythonPackage(
|
func (t Toolchain) newViaPip(
|
||||||
name string, id int, description, website, version string,
|
name, version, url, checksum string,
|
||||||
source pkg.Artifact, attrP *PackageAttr, attr *PipHelper,
|
extra ...PArtifact,
|
||||||
build P, extra ...PArtifact,
|
) pkg.Artifact {
|
||||||
|
return t.New(name+"-"+version, 0, t.AppendPresets(nil,
|
||||||
|
slices.Concat(P{Python}, extra)...,
|
||||||
|
), nil, nil, `
|
||||||
|
pip3 install \
|
||||||
|
--no-index \
|
||||||
|
--prefix=/system \
|
||||||
|
--root=/work \
|
||||||
|
'/usr/src/`+path.Base(url)+`'
|
||||||
|
`, pkg.Path(AbsUsrSrc.Append(path.Base(url)), false, pkg.NewHTTPGet(
|
||||||
|
nil, url,
|
||||||
|
mustDecode(checksum),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// newPypi creates [Metadata] for a [pypi] package.
|
||||||
|
//
|
||||||
|
// [pypi]: https://pypi.org/
|
||||||
|
func newPypi(
|
||||||
|
name string, id int,
|
||||||
|
description, version, interpreter, abi, platform, checksum string,
|
||||||
|
extra ...PArtifact,
|
||||||
) Metadata {
|
) Metadata {
|
||||||
name = "python-" + name
|
|
||||||
return Metadata{
|
return Metadata{
|
||||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
f: func(t Toolchain) (pkg.Artifact, string) {
|
||||||
return t.NewPackage(name, version, source, attrP, attr, slices.Concat(
|
return t.newViaPip(name, version, "https://files.pythonhosted.org/"+path.Join(
|
||||||
P{Python},
|
"packages",
|
||||||
extra,
|
interpreter,
|
||||||
build,
|
string(name[0]),
|
||||||
)...), version
|
name,
|
||||||
|
name+"-"+version+"-"+interpreter+"-"+abi+"-"+platform+".whl",
|
||||||
|
), checksum, extra...), version
|
||||||
},
|
},
|
||||||
|
|
||||||
Name: name,
|
Name: "python-" + name,
|
||||||
Description: description,
|
Description: description,
|
||||||
Website: website,
|
Website: "https://pypi.org/project/" + name + "/",
|
||||||
|
|
||||||
Dependencies: slices.Concat(P{Python}, extra),
|
Dependencies: slices.Concat(P{Python}, extra),
|
||||||
|
|
||||||
@@ -219,368 +185,100 @@ func newPythonPackage(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func (t Toolchain) newSetuptools() (pkg.Artifact, string) {
|
||||||
const (
|
|
||||||
version = "0.47.0"
|
|
||||||
checksum = "HZ-MvkUP8mbbx2YmsRNswj_bbOCIiXckuHqL5Qbvb5NxN5DYfWnqwkGNyS7OrId0"
|
|
||||||
)
|
|
||||||
artifactsM[PythonWheel] = newPythonPackage(
|
|
||||||
"wheel", 11428,
|
|
||||||
"the official binary distribution format for Python",
|
|
||||||
"https://peps.python.org/pep-0427/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/wheel",
|
|
||||||
version, checksum,
|
|
||||||
), nil, &PipHelper{
|
|
||||||
Install: true,
|
|
||||||
}, P{PythonFlitCore, PythonSetuptools},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
const (
|
const (
|
||||||
version = "82.0.1"
|
version = "82.0.1"
|
||||||
checksum = "nznP46Tj539yqswtOrIM4nQgwLA1h-ApKX7z7ghazROCpyF5swtQGwsZoI93wkhc"
|
checksum = "nznP46Tj539yqswtOrIM4nQgwLA1h-ApKX7z7ghazROCpyF5swtQGwsZoI93wkhc"
|
||||||
)
|
)
|
||||||
artifactsM[PythonSetuptools] = newPythonPackage(
|
return t.NewPackage("setuptools", version, newFromGitHub(
|
||||||
"setuptools", 4021,
|
|
||||||
"the autotools of the Python ecosystem",
|
|
||||||
"https://pypi.org/project/setuptools/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/setuptools",
|
"pypa/setuptools",
|
||||||
"v"+version, checksum,
|
"v"+version, checksum,
|
||||||
), nil, &PipHelper{
|
), nil, &PipHelper{
|
||||||
// error: invalid command 'dist_info'
|
|
||||||
BuildIsolation: true,
|
BuildIsolation: true,
|
||||||
// pytest circular dependency
|
}), version
|
||||||
SkipCheck: true,
|
|
||||||
}, nil)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
const (
|
artifactsM[Setuptools] = Metadata{
|
||||||
version = "1.1.1"
|
f: Toolchain.newSetuptools,
|
||||||
checksum = "rXZixTsZcRcIoUC1LvWrjySsiXSv5uhW6ng2P-yXZrbdj7FrSrDeJLCfC2b-ladV"
|
|
||||||
)
|
|
||||||
artifactsM[PythonVCSVersioning] = newPythonPackage(
|
|
||||||
"vcs-versioning", 389421,
|
|
||||||
"core VCS versioning functionality extracted as a standalone library",
|
|
||||||
"https://setuptools-scm.readthedocs.io/en/latest/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/setuptools-scm",
|
|
||||||
"vcs-versioning-v"+version, checksum,
|
|
||||||
), &PackageAttr{
|
|
||||||
Env: []string{
|
|
||||||
"SETUPTOOLS_SCM_PRETEND_VERSION=" + version,
|
|
||||||
},
|
|
||||||
}, &PipHelper{
|
|
||||||
// upstream is monorepo of two packages (setuptools-scm)
|
|
||||||
Append: []string{"vcs-versioning"},
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, nil,
|
|
||||||
PythonSetuptools,
|
|
||||||
PythonPackaging,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
Name: "python-setuptools",
|
||||||
const (
|
Description: "the autotools of the Python ecosystem",
|
||||||
version = "10.0.5"
|
Website: "https://pypi.org/project/setuptools/",
|
||||||
checksum = "vTN_TPd-b4Wbsw5WmAcsWjrs-FNXXznOeVTDnb54NtXve9Oy-eb2HPy-RG3FzNqp"
|
|
||||||
)
|
|
||||||
artifactsM[PythonSetuptoolsSCM] = newPythonPackage(
|
|
||||||
"setuptools-scm", 7874,
|
|
||||||
"extracts Python package versions from Git or Mercurial metadata",
|
|
||||||
"https://setuptools-scm.readthedocs.io/en/latest/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/setuptools-scm",
|
|
||||||
"setuptools-scm-v"+version, checksum,
|
|
||||||
), &PackageAttr{
|
|
||||||
Env: []string{
|
|
||||||
"SETUPTOOLS_SCM_PRETEND_VERSION=" + version,
|
|
||||||
},
|
|
||||||
}, &PipHelper{
|
|
||||||
// upstream is monorepo of two packages
|
|
||||||
Append: []string{"setuptools-scm"},
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, nil,
|
|
||||||
PythonSetuptools,
|
|
||||||
PythonVCSVersioning,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
const (
|
|
||||||
version = "3.12.0"
|
|
||||||
checksum = "VcTsiGiDU1aPLbjSPe38f9OjJDCLcxFz9loObJqUI1ZxDHXAaQMxBpNyLz_G1Rff"
|
|
||||||
)
|
|
||||||
artifactsM[PythonFlitCore] = newPythonPackage(
|
|
||||||
"flit-core", 44841,
|
|
||||||
"a PEP 517 build backend for packages using Flit",
|
|
||||||
"https://flit.pypa.io/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/flit",
|
|
||||||
version, checksum,
|
|
||||||
), nil, &PipHelper{
|
|
||||||
// upstream has other unused packages with many dependencies
|
|
||||||
Append: []string{"flit_core"},
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, nil,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
const (
|
|
||||||
version = "26.2"
|
|
||||||
checksum = "rdpGa2EkPFbj1mFtLKLnSwIX9gPfELcuneiICjRVDNw6By49szTFVoW8gtMMZ6ZS"
|
|
||||||
)
|
|
||||||
artifactsM[PythonPackaging] = newPythonPackage(
|
|
||||||
"packaging", 60461,
|
|
||||||
"reusable core utilities for various Python Packaging interoperability specifications",
|
|
||||||
"https://packaging.pypa.io/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/packaging",
|
|
||||||
version, checksum,
|
|
||||||
), nil, &PipHelper{
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, P{PythonFlitCore},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
artifactsM[LIT] = Metadata{
|
|
||||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
|
||||||
version := t.Version(LLVM)
|
|
||||||
return t.NewPackage("lit", version, t.Load(llvmSource), nil, &PipHelper{
|
|
||||||
Append: []string{"llvm", "utils", "lit"},
|
|
||||||
// already checked during llvm
|
|
||||||
SkipCheck: true,
|
|
||||||
},
|
|
||||||
PythonSetuptools,
|
|
||||||
), version
|
|
||||||
},
|
|
||||||
|
|
||||||
Name: "lit",
|
|
||||||
Description: "a portable tool for executing LLVM and Clang style test suites",
|
|
||||||
Website: "https://llvm.org/docs/CommandGuide/lit.html",
|
|
||||||
|
|
||||||
Dependencies: P{
|
Dependencies: P{
|
||||||
Python,
|
Python,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
ID: 4021,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
const (
|
artifactsM[PythonPygments] = newPypi(
|
||||||
version = "1.1.1"
|
|
||||||
checksum = "1fVwoal6FoKXczoG3qRUi87TxSWESSGcgvnbEZDYuaOgsO25o36iF3SbAhwkr4Va"
|
|
||||||
)
|
|
||||||
artifactsM[PythonPathspec] = newPythonPackage(
|
|
||||||
"pathspec", 23424,
|
|
||||||
"utility library for gitignore style pattern matching of file paths",
|
|
||||||
"https://github.com/cpburnz/python-pathspec",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"cpburnz/python-pathspec",
|
|
||||||
"v"+version, checksum,
|
|
||||||
), nil, &PipHelper{
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, P{PythonFlitCore},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
const (
|
|
||||||
version = "2026.4.28.13"
|
|
||||||
checksum = "Z3MbmMXtmWHCM3-EvJehb9MzDqX7Ce_Xg86D5g5nxFRWMKqwHwnQ8R-AlKf-32HU"
|
|
||||||
)
|
|
||||||
artifactsM[PythonTroveClassifiers] = newPythonPackage(
|
|
||||||
"trove-classifiers", 88298,
|
|
||||||
"canonical source for classifiers on PyPI",
|
|
||||||
"https://pypi.org/p/trove-classifiers/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/trove-classifiers",
|
|
||||||
version, checksum,
|
|
||||||
), nil, &PipHelper{
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, P{PythonSetuptools},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
const (
|
|
||||||
version = "1.6.0"
|
|
||||||
checksum = "GiUgDkKjF8Xn1cmq6iMhTGXzcPIYeaJrvQpHBSAJapNVx4UyuiTXqd5eVlxSClJu"
|
|
||||||
)
|
|
||||||
artifactsM[PythonPluggy] = newPythonPackage(
|
|
||||||
"pluggy", 7500,
|
|
||||||
"the core framework used by the pytest, tox, and devpi projects",
|
|
||||||
"https://pluggy.readthedocs.io/en/latest/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pytest-dev/pluggy",
|
|
||||||
version, checksum,
|
|
||||||
), &PackageAttr{
|
|
||||||
Env: []string{
|
|
||||||
"SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PLUGGY=" + version,
|
|
||||||
},
|
|
||||||
}, &PipHelper{
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, P{PythonSetuptoolsSCM},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
const (
|
|
||||||
version = "1.16.5"
|
|
||||||
checksum = "V2eREtqZLZeV85yb4O-bfAJCUluHcQP76Qfs0QH5s7RF_Oc8xIP8jD0jl85qFyWk"
|
|
||||||
)
|
|
||||||
artifactsM[PythonHatchling] = newPythonPackage(
|
|
||||||
"hatchling", 16137,
|
|
||||||
"the extensible, standards compliant build backend used by Hatch",
|
|
||||||
"https://hatch.pypa.io/latest/",
|
|
||||||
version, newFromGitHub(
|
|
||||||
"pypa/hatch",
|
|
||||||
"hatch-v"+version, checksum,
|
|
||||||
), nil, &PipHelper{
|
|
||||||
// upstream has other unused packages with many dependencies
|
|
||||||
Append: []string{"backend"},
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, nil,
|
|
||||||
PythonPackaging,
|
|
||||||
PythonPathspec,
|
|
||||||
PythonTroveClassifiers,
|
|
||||||
PythonPluggy,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
const (
|
|
||||||
version = "2.20.0"
|
|
||||||
checksum = "L-2P6vn7c_CNZYliE5CJAWLxO1ziDQVVkf8bnZbHj8aSCQ43oWv11wC9KzU9MeCa"
|
|
||||||
)
|
|
||||||
artifactsM[PythonPygments] = newPythonPackage(
|
|
||||||
"pygments", 3986,
|
"pygments", 3986,
|
||||||
" a syntax highlighting package written in Python",
|
" a syntax highlighting package written in Python",
|
||||||
"https://pygments.org/",
|
"2.20.0", "py3", "none", "any",
|
||||||
version, newFromGitHub(
|
"qlyqX2YSXcV0Z8XgGaPttc_gkq-xsu_nYs6NFOcYnk-CX7qmcj45gG-h6DpwPIcO",
|
||||||
"pygments/pygments",
|
|
||||||
version, checksum,
|
|
||||||
), nil, &PipHelper{
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, P{PythonHatchling},
|
|
||||||
)
|
)
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
artifactsM[PythonPluggy] = newPypi(
|
||||||
const (
|
"pluggy", 7500,
|
||||||
version = "2.3.0"
|
"the core framework used by the pytest, tox, and devpi projects",
|
||||||
checksum = "mH7VBZaXcYatBPE3RQQZvSzz_Ay8IPPek60NpPHZulPq4ReAFUUsA4EPWfiyMknZ"
|
"1.6.0", "py3", "none", "any",
|
||||||
|
"2HWYBaEwM66-y1hSUcWI1MyE7dVVuNNRW24XD6iJBey4YaUdAK8WeXdtFMQGC-4J",
|
||||||
)
|
)
|
||||||
artifactsM[PythonIniConfig] = newPythonPackage(
|
|
||||||
|
artifactsM[PythonPackaging] = newPypi(
|
||||||
|
"packaging", 60461,
|
||||||
|
"reusable core utilities for various Python Packaging interoperability specifications",
|
||||||
|
"26.1", "py3", "none", "any",
|
||||||
|
"6WZjBJeRb0eZZavxM8cLPcgD-ch-1FblsHoCFKC_9VUC5XAmd397LwliVhsnQcSN",
|
||||||
|
)
|
||||||
|
|
||||||
|
artifactsM[PythonIniConfig] = newPypi(
|
||||||
"iniconfig", 114778,
|
"iniconfig", 114778,
|
||||||
"a small and simple INI-file parser module",
|
"a small and simple INI-file parser module",
|
||||||
"https://github.com/pytest-dev/iniconfig",
|
"2.3.0", "py3", "none", "any",
|
||||||
version, newFromGitHub(
|
"SDgs4S5bXi77aVOeKTPv2TUrS3M9rduiK4DpU0hCmDsSBWqnZcWInq9lsx6INxut",
|
||||||
"pytest-dev/iniconfig",
|
|
||||||
"v"+version, checksum,
|
|
||||||
), &PackageAttr{
|
|
||||||
Env: []string{
|
|
||||||
"SETUPTOOLS_SCM_PRETEND_VERSION_FOR_INICONFIG=" + version,
|
|
||||||
},
|
|
||||||
}, &PipHelper{
|
|
||||||
// pytest circular dependency
|
|
||||||
SkipCheck: true,
|
|
||||||
}, P{PythonSetuptoolsSCM},
|
|
||||||
)
|
)
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
artifactsM[PythonPyTest] = newPypi(
|
||||||
const (
|
|
||||||
version = "9.0.3"
|
|
||||||
checksum = "qfLL_znWhbJCDbNJvrx9H3-orJ86z4ifhaW0bIn21jl2sDP-FVoX_1yieOypArQe"
|
|
||||||
)
|
|
||||||
artifactsM[PythonPyTest] = newPythonPackage(
|
|
||||||
"pytest", 3765,
|
"pytest", 3765,
|
||||||
"the pytest framework",
|
"the pytest framework",
|
||||||
"https://pytest.org",
|
"9.0.3", "py3", "none", "any",
|
||||||
version, newFromGitHub(
|
"57WLrIVOfyoRDjt5qD6LGOaDcDCtzQnKDSTUb7GzHyJDtry_nGHHs4-0tW0tiIJr",
|
||||||
"pytest-dev/pytest",
|
|
||||||
version, checksum,
|
|
||||||
), &PackageAttr{
|
|
||||||
Env: []string{
|
|
||||||
"SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST=" + version,
|
|
||||||
},
|
|
||||||
}, &PipHelper{
|
|
||||||
// many dependencies
|
|
||||||
SkipCheck: true,
|
|
||||||
}, P{PythonSetuptoolsSCM},
|
|
||||||
PythonIniConfig,
|
PythonIniConfig,
|
||||||
PythonPackaging,
|
PythonPackaging,
|
||||||
PythonPluggy,
|
PythonPluggy,
|
||||||
PythonPygments,
|
PythonPygments,
|
||||||
)
|
)
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
artifactsM[PythonMarkupSafe] = newPypi(
|
||||||
const (
|
|
||||||
version = "3.0.3"
|
|
||||||
checksum = "txRGYdWE3his1lHHRI-lZADw0-ILvUg2l5OGdFHtFXIb_QowGxwdxHCUSJIgmjQs"
|
|
||||||
)
|
|
||||||
artifactsM[PythonMarkupSafe] = newPythonPackage(
|
|
||||||
"markupsafe", 3918,
|
"markupsafe", 3918,
|
||||||
"implements a text object that escapes characters so it is safe to use in HTML and XML",
|
"implements a text object that escapes characters so it is safe to use in HTML and XML",
|
||||||
"https://markupsafe.palletsprojects.com/",
|
"3.0.3", "cp314", "cp314", "musllinux_1_2_"+linuxArch(),
|
||||||
version, newFromGitHub(
|
perArch[string]{
|
||||||
"pallets/markupsafe",
|
"amd64": "E2mo9ig_FKgTpGon_8qqviSEULwhnmxTIqd9vfyNxNpK4yofVYM7eLW_VE-LKbtO",
|
||||||
version, checksum,
|
"arm64": "iG_hqsncOs8fA7bCaAg0x9XenXWlo9sqblyPcSG7yA9sfGLvM9KZznCpwWfOCwFC",
|
||||||
), nil, &PipHelper{
|
"riscv64": "7DI7U0M3jvr7U4uZml25GLw3m3EvMubCtNukZmss1gkVJ_DVkhV5DgX3Wt_sztbv",
|
||||||
// ModuleNotFoundError: No module named 'markupsafe'
|
}.unwrap(),
|
||||||
Install: true,
|
|
||||||
}, P{PythonSetuptools},
|
|
||||||
)
|
)
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
artifactsM[PythonMako] = newPypi(
|
||||||
const (
|
|
||||||
version = "1.3.12"
|
|
||||||
checksum = "OZbBsQe2MzRuAo5Mr4qRwWHGqU1EEZeBuSprDDIceAtMLIUJtO7SbERlxHIxNhLk"
|
|
||||||
)
|
|
||||||
artifactsM[PythonMako] = newPythonPackage(
|
|
||||||
"mako", 3915,
|
"mako", 3915,
|
||||||
"a template library written in Python",
|
"a template library written in Python",
|
||||||
"https://www.makotemplates.org/",
|
"1.3.11", "py3", "none", "any",
|
||||||
version, newFromGitHub(
|
"WJ_hxYI-nNiuDiM6QhfAG84uO5U-M2aneB0JS9AQ2J2Oi6YXAbBxIdOeOEng6CoS",
|
||||||
"sqlalchemy/mako",
|
|
||||||
"rel_"+strings.Join(strings.SplitN(version, ".", 3), "_"),
|
|
||||||
checksum,
|
|
||||||
), nil, nil, P{PythonSetuptools},
|
|
||||||
PythonMarkupSafe,
|
PythonMarkupSafe,
|
||||||
)
|
)
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
artifactsM[PythonPyYAML] = newPypi(
|
||||||
const (
|
|
||||||
version = "6.0.3"
|
|
||||||
checksum = "7wDv0RW9chBdu9l5Q4Hun5F2HHdo105ZSIixwdFPKbEYbftW9YxmsegfL-zafnbJ"
|
|
||||||
)
|
|
||||||
artifactsM[PythonPyYAML] = newPythonPackage(
|
|
||||||
"pyyaml", 4123,
|
"pyyaml", 4123,
|
||||||
"a YAML parser and emitter for Python",
|
"a YAML parser and emitter for Python",
|
||||||
"https://pyyaml.org/",
|
"6.0.3", "cp314", "cp314", "musllinux_1_2_"+linuxArch(),
|
||||||
version, newFromGitHub(
|
perArch[string]{
|
||||||
"yaml/pyyaml",
|
"amd64": "4_jhCFpUNtyrFp2HOMqUisR005u90MHId53eS7rkUbcGXkoaJ7JRsY21dREHEfGN",
|
||||||
version, checksum,
|
"arm64": "sQ818ZYSmC7Vj9prIPx3sEYqSDhZlWvLbgHV9w4GjxsfQ63ZSzappctKM7Lb0Whw",
|
||||||
), nil, &PipHelper{
|
}.unwrap(),
|
||||||
// ModuleNotFoundError: No module named 'yaml'
|
|
||||||
Install: true,
|
|
||||||
}, P{PythonSetuptools},
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import "hakurei.app/internal/pkg"
|
|||||||
|
|
||||||
func (t Toolchain) newQEMU() (pkg.Artifact, string) {
|
func (t Toolchain) newQEMU() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "11.0.0"
|
version = "10.2.2"
|
||||||
checksum = "C64gdi_Tkdg2fTwD9ERxtWGcf8vNn_6UvczW0c-x0KW1NZtd3NbEOIrlDhYGn15n"
|
checksum = "uNzRxlrVoLWe-EmZmBp75SezymgE512iE5XN90Bl7wi6CjE_oQGQB-9ocs7E16QG"
|
||||||
)
|
)
|
||||||
return t.NewPackage("qemu", version, newTar(
|
return t.NewPackage("qemu", version, newTar(
|
||||||
"https://download.qemu.org/qemu-"+version+".tar.bz2",
|
"https://download.qemu.org/qemu-"+version+".tar.bz2",
|
||||||
@@ -73,8 +73,6 @@ EOF
|
|||||||
},
|
},
|
||||||
Bash,
|
Bash,
|
||||||
Python,
|
Python,
|
||||||
PythonSetuptools,
|
|
||||||
PythonWheel,
|
|
||||||
Ninja,
|
Ninja,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
Diffutils,
|
Diffutils,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package rosa
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"log"
|
||||||
"path"
|
"path"
|
||||||
"runtime"
|
"runtime"
|
||||||
"slices"
|
"slices"
|
||||||
@@ -14,12 +15,6 @@ import (
|
|||||||
"hakurei.app/internal/pkg"
|
"hakurei.app/internal/pkg"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Extension is the variant identification string of custom artifact
|
|
||||||
// implementations registered by package rosa.
|
|
||||||
const Extension = "rosa"
|
|
||||||
|
|
||||||
func init() { pkg.SetExtension(Extension) }
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// kindEtc is the kind of [pkg.Artifact] of cureEtc.
|
// kindEtc is the kind of [pkg.Artifact] of cureEtc.
|
||||||
kindEtc = iota + pkg.KindCustomOffset
|
kindEtc = iota + pkg.KindCustomOffset
|
||||||
@@ -33,7 +28,7 @@ const (
|
|||||||
func mustDecode(s string) pkg.Checksum {
|
func mustDecode(s string) pkg.Checksum {
|
||||||
var fallback = pkg.Checksum{}
|
var fallback = pkg.Checksum{}
|
||||||
if s == "" {
|
if s == "" {
|
||||||
println(
|
log.Println(
|
||||||
"falling back to",
|
"falling back to",
|
||||||
pkg.Encode(fallback),
|
pkg.Encode(fallback),
|
||||||
"for unpopulated checksum",
|
"for unpopulated checksum",
|
||||||
@@ -107,6 +102,21 @@ func earlyLDFLAGS(static bool) string {
|
|||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// earlyCFLAGS is reference CFLAGS for the stage0 toolchain.
|
||||||
|
const earlyCFLAGS = "-Qunused-arguments " +
|
||||||
|
"-isystem/system/include"
|
||||||
|
|
||||||
|
// earlyCXXFLAGS returns reference CXXFLAGS for the stage0 toolchain
|
||||||
|
// corresponding to [runtime.GOARCH].
|
||||||
|
func earlyCXXFLAGS() string {
|
||||||
|
return "--start-no-unused-arguments " +
|
||||||
|
"-stdlib=libc++ " +
|
||||||
|
"--end-no-unused-arguments " +
|
||||||
|
"-isystem/system/include/c++/v1 " +
|
||||||
|
"-isystem/system/include/" + triplet() + "/c++/v1 " +
|
||||||
|
"-isystem/system/include "
|
||||||
|
}
|
||||||
|
|
||||||
// Toolchain denotes the infrastructure to compile a [pkg.Artifact] on.
|
// Toolchain denotes the infrastructure to compile a [pkg.Artifact] on.
|
||||||
type Toolchain uint32
|
type Toolchain uint32
|
||||||
|
|
||||||
@@ -176,6 +186,24 @@ func (t Toolchain) isStd() bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// stage0Concat concatenates s and values. If the current toolchain is
|
||||||
|
// toolchainStage0, stage0Concat returns s as is.
|
||||||
|
func stage0Concat[S ~[]E, E any](t Toolchain, s S, values ...E) S {
|
||||||
|
if t.isStage0() {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return slices.Concat(s, values)
|
||||||
|
}
|
||||||
|
|
||||||
|
// stage0ExclConcat concatenates s and values. If the current toolchain is not
|
||||||
|
// toolchainStage0, stage0ExclConcat returns s as is.
|
||||||
|
func stage0ExclConcat[S ~[]E, E any](t Toolchain, s S, values ...E) S {
|
||||||
|
if t.isStage0() {
|
||||||
|
return slices.Concat(s, values)
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
// lastIndexFunc is like [strings.LastIndexFunc] but for [slices].
|
// lastIndexFunc is like [strings.LastIndexFunc] but for [slices].
|
||||||
func lastIndexFunc[S ~[]E, E any](s S, f func(E) bool) (i int) {
|
func lastIndexFunc[S ~[]E, E any](s S, f func(E) bool) (i int) {
|
||||||
if i = slices.IndexFunc(s, f); i < 0 {
|
if i = slices.IndexFunc(s, f); i < 0 {
|
||||||
@@ -265,7 +293,6 @@ func (t Toolchain) New(
|
|||||||
|
|
||||||
case toolchainGentoo, toolchainStage0:
|
case toolchainGentoo, toolchainStage0:
|
||||||
name += "-boot"
|
name += "-boot"
|
||||||
support = append(support, extra...)
|
|
||||||
support = append(support, cureEtc{})
|
support = append(support, cureEtc{})
|
||||||
if t == toolchainStage0 {
|
if t == toolchainStage0 {
|
||||||
support = append(support, NewStage0())
|
support = append(support, NewStage0())
|
||||||
@@ -286,6 +313,7 @@ mkdir -vp /work/system/bin
|
|||||||
),
|
),
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
|
support = slices.Concat(support, extra)
|
||||||
env = fixupEnviron(env, []string{
|
env = fixupEnviron(env, []string{
|
||||||
EnvTriplet + "=" + triplet(),
|
EnvTriplet + "=" + triplet(),
|
||||||
lcMessages,
|
lcMessages,
|
||||||
@@ -305,7 +333,7 @@ mkdir -vp /work/system/bin
|
|||||||
toybox = toyboxEarly
|
toybox = toyboxEarly
|
||||||
}
|
}
|
||||||
|
|
||||||
base := LLVM
|
base := Clang
|
||||||
if flag&TNoToolchain != 0 {
|
if flag&TNoToolchain != 0 {
|
||||||
base = Musl
|
base = Musl
|
||||||
}
|
}
|
||||||
@@ -320,6 +348,11 @@ mkdir -vp /work/system/bin
|
|||||||
env = fixupEnviron(env, []string{
|
env = fixupEnviron(env, []string{
|
||||||
EnvTriplet + "=" + triplet(),
|
EnvTriplet + "=" + triplet(),
|
||||||
lcMessages,
|
lcMessages,
|
||||||
|
|
||||||
|
"AR=ar",
|
||||||
|
"RANLIB=ranlib",
|
||||||
|
"LIBCC=/system/lib/clang/" + llvmVersionMajor + "/lib/" + triplet() +
|
||||||
|
"/libclang_rt.builtins.a",
|
||||||
}, "/system/bin", "/bin")
|
}, "/system/bin", "/bin")
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@@ -377,8 +410,8 @@ cat /usr/src/` + name + `-patches/* | \
|
|||||||
`
|
`
|
||||||
aname += "-patched"
|
aname += "-patched"
|
||||||
}
|
}
|
||||||
return t.New(aname, 0, t.AppendPresets(nil,
|
return t.New(aname, 0, stage0Concat(t, []pkg.Artifact{},
|
||||||
Patch,
|
t.Load(Patch),
|
||||||
), nil, nil, script, paths...)
|
), nil, nil, script, paths...)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -425,6 +458,9 @@ type PackageAttr struct {
|
|||||||
// Passed to [Toolchain.NewPatchedSource].
|
// Passed to [Toolchain.NewPatchedSource].
|
||||||
Patches []KV
|
Patches []KV
|
||||||
|
|
||||||
|
// Dependencies not provided by stage0.
|
||||||
|
NonStage0 []pkg.Artifact
|
||||||
|
|
||||||
// Passed through to [Toolchain.New], before source.
|
// Passed through to [Toolchain.New], before source.
|
||||||
Paths []pkg.ExecPath
|
Paths []pkg.ExecPath
|
||||||
// Passed through to [Toolchain.New].
|
// Passed through to [Toolchain.New].
|
||||||
@@ -492,8 +528,14 @@ func (t Toolchain) NewPackage(
|
|||||||
panic("source must be non-nil")
|
panic("source must be non-nil")
|
||||||
}
|
}
|
||||||
wantsChmod, wantsWrite := helper.wantsChmod(), helper.wantsWrite()
|
wantsChmod, wantsWrite := helper.wantsChmod(), helper.wantsWrite()
|
||||||
extraRes := make([]pkg.Artifact, 0, 1<<3+len(extra))
|
dc := len(attr.NonStage0)
|
||||||
{
|
if !t.isStage0() {
|
||||||
|
dc += 1<<3 + len(extra)
|
||||||
|
}
|
||||||
|
|
||||||
|
extraRes := make([]pkg.Artifact, 0, dc)
|
||||||
|
extraRes = append(extraRes, attr.NonStage0...)
|
||||||
|
if !t.isStage0() {
|
||||||
pv := paGet()
|
pv := paGet()
|
||||||
for _, p := range helper.extra(attr.Flag) {
|
for _, p := range helper.extra(attr.Flag) {
|
||||||
extraRes = t.appendPreset(extraRes, pv, p)
|
extraRes = t.appendPreset(extraRes, pv, p)
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
||||||
rosa.DropCaches(rosa.OptLLVMNoLTO)
|
|
||||||
container.TryArgv0(nil)
|
container.TryArgv0(nil)
|
||||||
|
|
||||||
code := m.Run()
|
code := m.Run()
|
||||||
@@ -94,14 +93,11 @@ func TestCureAll(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkStage3(b *testing.B) {
|
func BenchmarkStage3(b *testing.B) {
|
||||||
flags := rosa.Flags()
|
|
||||||
b.Cleanup(func() { rosa.DropCaches(flags) })
|
|
||||||
|
|
||||||
for b.Loop() {
|
for b.Loop() {
|
||||||
rosa.Std.Load(rosa.LLVM)
|
rosa.Std.Load(rosa.Clang)
|
||||||
|
|
||||||
b.StopTimer()
|
b.StopTimer()
|
||||||
rosa.DropCaches(0)
|
rosa.DropCaches()
|
||||||
b.StartTimer()
|
b.StartTimer()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import "hakurei.app/internal/pkg"
|
|||||||
|
|
||||||
func (t Toolchain) newRsync() (pkg.Artifact, string) {
|
func (t Toolchain) newRsync() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "3.4.2"
|
version = "3.4.1"
|
||||||
checksum = "t7PxS4WHXzefLMKKc_3hJgxUmlGG6KgHMZ8i4DZvCQAUAizxbclNKwfLyOHyq5BX"
|
checksum = "VBlTsBWd9z3r2-ex7GkWeWxkUc5OrlgDzikAC0pK7ufTjAJ0MbmC_N04oSVTGPiv"
|
||||||
)
|
)
|
||||||
return t.NewPackage("rsync", version, newTar(
|
return t.NewPackage("rsync", version, newTar(
|
||||||
"https://download.samba.org/pub/rsync/src/"+
|
"https://download.samba.org/pub/rsync/src/"+
|
||||||
|
|||||||
@@ -3,25 +3,42 @@ package rosa
|
|||||||
import (
|
import (
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"hakurei.app/fhs"
|
|
||||||
"hakurei.app/internal/pkg"
|
"hakurei.app/internal/pkg"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (t Toolchain) newStage0() (pkg.Artifact, string) {
|
func (t Toolchain) newStage0() (pkg.Artifact, string) {
|
||||||
return t.New("rosa-stage0", 0, t.AppendPresets(nil,
|
return t.New("rosa-stage0", 0, []pkg.Artifact{
|
||||||
Bzip2,
|
t.Load(Musl),
|
||||||
), nil, nil, `
|
t.Load(CompilerRT),
|
||||||
|
t.Load(LLVMRuntimes),
|
||||||
|
t.Load(Clang),
|
||||||
|
|
||||||
|
t.Load(Zlib),
|
||||||
|
t.Load(Bzip2),
|
||||||
|
|
||||||
|
t.Load(Patch),
|
||||||
|
t.Load(Make),
|
||||||
|
t.Load(CMake),
|
||||||
|
t.Load(Ninja),
|
||||||
|
|
||||||
|
t.Load(Libffi),
|
||||||
|
t.Load(Python),
|
||||||
|
t.Load(Perl),
|
||||||
|
t.Load(Diffutils),
|
||||||
|
t.Load(Bash),
|
||||||
|
t.Load(Gawk),
|
||||||
|
t.Load(Coreutils),
|
||||||
|
t.Load(Findutils),
|
||||||
|
|
||||||
|
t.Load(KernelHeaders),
|
||||||
|
}, nil, nil, `
|
||||||
umask 377
|
umask 377
|
||||||
tar \
|
tar \
|
||||||
-vjc \
|
-vjc \
|
||||||
-C /stage0 \
|
-C / \
|
||||||
-f /work/stage0-`+triplet()+`.tar.bz2 \
|
-f /work/stage0-`+triplet()+`.tar.bz2 \
|
||||||
.
|
system bin usr/bin/env
|
||||||
`, pkg.Path(fhs.AbsRoot.Append("stage0"), false, t.AppendPresets(nil,
|
`), Unversioned
|
||||||
LLVM,
|
|
||||||
Mksh,
|
|
||||||
toyboxEarly,
|
|
||||||
)...)), Unversioned
|
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
artifactsM[Stage0] = Metadata{
|
artifactsM[Stage0] = Metadata{
|
||||||
@@ -43,11 +60,12 @@ var (
|
|||||||
func NewStage0() pkg.Artifact {
|
func NewStage0() pkg.Artifact {
|
||||||
stage0Once.Do(func() {
|
stage0Once.Do(func() {
|
||||||
stage0 = newTar(
|
stage0 = newTar(
|
||||||
"https://hakurei.app/seed/20260429/"+
|
"https://hakurei.app/seed/20260210/"+
|
||||||
"stage0-"+triplet()+".tar.bz2",
|
"stage0-"+triplet()+".tar.bz2",
|
||||||
perArch[string]{
|
perArch[string]{
|
||||||
"amd64": "ldz-WkSx2wxUK4ndi-tlaaU8ykOowbpGRcBsciAcIDdnX6-QfzQg_se3lsZYuzuK",
|
"amd64": "tqM1Li15BJ-uFG8zU-XjgFxoN_kuzh1VxrSDVUVa0vGmo-NeWapSftH739sY8EAg",
|
||||||
"arm64": "_mo39S_sgzPYaIQ_Wi13O46KPQuWqCCiZdildpz6a8MTh2khIt68tNIulyUGBV2z",
|
"arm64": "CJj3ZSnRyLmFHlWIQtTPQD9oikOZY4cD_mI3v_-LIYc2hhg-cq_CZFBLzQBAkFIn",
|
||||||
|
"riscv64": "FcszJjcVWdKAnn-bt8qmUn5GUUTjv_xQjXOWkUpOplRkG3Ckob3StUoAi5KQ5-QF",
|
||||||
}.unwrap(),
|
}.unwrap(),
|
||||||
pkg.TarBzip2,
|
pkg.TarBzip2,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -27,14 +27,8 @@ chmod -R +w ..
|
|||||||
sed -i \
|
sed -i \
|
||||||
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
||||||
cmd/link/internal/`+runtime.GOARCH+`/obj.go
|
cmd/link/internal/`+runtime.GOARCH+`/obj.go
|
||||||
sed -i \
|
|
||||||
's/cpu.X86.HasAVX512VBMI/& \&\& cpu.X86.HasPOPCNT/' \
|
|
||||||
internal/runtime/gc/scan/scan_amd64.go
|
|
||||||
|
|
||||||
rm \
|
rm \
|
||||||
os/root_unix_test.go \
|
os/root_unix_test.go
|
||||||
cmd/cgo/internal/testsanitizers/tsan_test.go \
|
|
||||||
cmd/cgo/internal/testsanitizers/cshared_test.go
|
|
||||||
|
|
||||||
./all.bash
|
./all.bash
|
||||||
`, pkg.Path(AbsUsrSrc.Append("tamago"), false, newFromGitHub(
|
`, pkg.Path(AbsUsrSrc.Append("tamago"), false, newFromGitHub(
|
||||||
|
|||||||
@@ -26,50 +26,19 @@ func init() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newLibxtrans() (pkg.Artifact, string) {
|
func (t Toolchain) newXproto() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "1.6.0"
|
version = "7.0.31"
|
||||||
checksum = "1cxDCF59fLf1HyGDMcjR1L50ZbjD0RTTEDUpOJYcHXu6HUK_Ds0x-KREY7rLNxu9"
|
checksum = "Cm69urWY5RctKpR78eGzuwrjDEfXGkvHRdodj6sjypOGy5FF4-lmnUttVHYV1ydg"
|
||||||
)
|
)
|
||||||
return t.NewPackage("libxtrans", version, newFromGitLab(
|
return t.NewPackage("xproto", version, newTar(
|
||||||
"gitlab.freedesktop.org",
|
"https://www.x.org/releases/individual/proto/"+
|
||||||
"xorg/lib/libxtrans",
|
"xproto-"+version+".tar.bz2",
|
||||||
"xtrans-"+version,
|
|
||||||
checksum,
|
checksum,
|
||||||
|
pkg.TarBzip2,
|
||||||
), nil, &MakeHelper{
|
), nil, &MakeHelper{
|
||||||
Generate: "NOCONFIGURE=1 ./autogen.sh",
|
// ancient configure script
|
||||||
},
|
Generate: "autoreconf -if",
|
||||||
Automake,
|
|
||||||
Libtool,
|
|
||||||
PkgConfig,
|
|
||||||
|
|
||||||
utilMacros,
|
|
||||||
), version
|
|
||||||
}
|
|
||||||
func init() {
|
|
||||||
artifactsM[Libxtrans] = Metadata{
|
|
||||||
f: Toolchain.newLibxtrans,
|
|
||||||
|
|
||||||
Name: "libxtrans",
|
|
||||||
Description: "X Window System Protocols Transport layer shared code",
|
|
||||||
Website: "https://gitlab.freedesktop.org/xorg/lib/libxtrans",
|
|
||||||
|
|
||||||
ID: 13441,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newXorgProto() (pkg.Artifact, string) {
|
|
||||||
const (
|
|
||||||
version = "2025.1"
|
|
||||||
checksum = "pTwJiBJHKA6Rgm3cVDXy1lyvXNIUzTRaukvvYdk1xWoJ_1G-Dfjm9MyewuyIjoHz"
|
|
||||||
)
|
|
||||||
return t.NewPackage("xorgproto", version, newFromGitLab(
|
|
||||||
"gitlab.freedesktop.org",
|
|
||||||
"xorg/proto/xorgproto",
|
|
||||||
"xorgproto-"+version,
|
|
||||||
checksum,
|
|
||||||
), nil, &MakeHelper{
|
|
||||||
Generate: "NOCONFIGURE=1 ./autogen.sh",
|
|
||||||
},
|
},
|
||||||
Automake,
|
Automake,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
@@ -78,14 +47,14 @@ func (t Toolchain) newXorgProto() (pkg.Artifact, string) {
|
|||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
artifactsM[XorgProto] = Metadata{
|
artifactsM[Xproto] = Metadata{
|
||||||
f: Toolchain.newXorgProto,
|
f: Toolchain.newXproto,
|
||||||
|
|
||||||
Name: "xorgproto",
|
Name: "xproto",
|
||||||
Description: "X Window System unified protocol definitions",
|
Description: "X Window System unified protocol definitions",
|
||||||
Website: "https://gitlab.freedesktop.org/xorg/proto/xorgproto",
|
Website: "https://gitlab.freedesktop.org/xorg/proto/xorgproto",
|
||||||
|
|
||||||
ID: 17190,
|
ID: 13650,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,7 +77,7 @@ func (t Toolchain) newLibXau() (pkg.Artifact, string) {
|
|||||||
PkgConfig,
|
PkgConfig,
|
||||||
|
|
||||||
utilMacros,
|
utilMacros,
|
||||||
XorgProto,
|
Xproto,
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
@@ -120,186 +89,13 @@ func init() {
|
|||||||
Website: "https://gitlab.freedesktop.org/xorg/lib/libxau",
|
Website: "https://gitlab.freedesktop.org/xorg/lib/libxau",
|
||||||
|
|
||||||
Dependencies: P{
|
Dependencies: P{
|
||||||
XorgProto,
|
Xproto,
|
||||||
},
|
},
|
||||||
|
|
||||||
ID: 1765,
|
ID: 1765,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newXCBProto() (pkg.Artifact, string) {
|
|
||||||
const (
|
|
||||||
version = "1.17.0"
|
|
||||||
checksum = "_NtbKaJ_iyT7XiJz25mXQ7y-niTzE8sHPvLXZPcqtNoV_-vTzqkezJ8Hp2U1enCv"
|
|
||||||
)
|
|
||||||
return t.NewPackage("xcb-proto", version, newTar(
|
|
||||||
"https://xcb.freedesktop.org/dist/xcb-proto-"+version+".tar.gz",
|
|
||||||
checksum,
|
|
||||||
pkg.TarGzip,
|
|
||||||
), nil, (*MakeHelper)(nil),
|
|
||||||
Python,
|
|
||||||
), version
|
|
||||||
}
|
|
||||||
func init() {
|
|
||||||
artifactsM[XCBProto] = Metadata{
|
|
||||||
f: Toolchain.newXCBProto,
|
|
||||||
|
|
||||||
Name: "xcb-proto",
|
|
||||||
Description: "XML-XCB protocol descriptions used by libxcb for the X11 protocol & extensions",
|
|
||||||
Website: "https://gitlab.freedesktop.org/xorg/proto/xcbproto",
|
|
||||||
|
|
||||||
ID: 13646,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newXCB() (pkg.Artifact, string) {
|
|
||||||
const (
|
|
||||||
version = "1.17.0"
|
|
||||||
checksum = "hjjsc79LpWM_hZjNWbDDS6qRQUXREjjekS6UbUsDq-RR1_AjgNDxhRvZf-1_kzDd"
|
|
||||||
)
|
|
||||||
return t.NewPackage("xcb", version, newTar(
|
|
||||||
"https://xcb.freedesktop.org/dist/libxcb-"+version+".tar.gz",
|
|
||||||
checksum,
|
|
||||||
pkg.TarGzip,
|
|
||||||
), nil, (*MakeHelper)(nil),
|
|
||||||
Python,
|
|
||||||
PkgConfig,
|
|
||||||
|
|
||||||
XCBProto,
|
|
||||||
LibXau,
|
|
||||||
), version
|
|
||||||
}
|
|
||||||
func init() {
|
|
||||||
artifactsM[XCB] = Metadata{
|
|
||||||
f: Toolchain.newXCB,
|
|
||||||
|
|
||||||
Name: "xcb",
|
|
||||||
Description: "The X protocol C-language Binding",
|
|
||||||
Website: "https://xcb.freedesktop.org/",
|
|
||||||
|
|
||||||
Dependencies: P{
|
|
||||||
XCBProto,
|
|
||||||
LibXau,
|
|
||||||
},
|
|
||||||
|
|
||||||
ID: 1767,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newLibX11() (pkg.Artifact, string) {
|
|
||||||
const (
|
|
||||||
version = "1.8.13"
|
|
||||||
checksum = "ARh-cuZY_U2v3DbPS1byc7ybh9NInZc-yav7SJiusk_C7408s058qWV83ocMd2pT"
|
|
||||||
)
|
|
||||||
return t.NewPackage("libX11", version, newFromGitLab(
|
|
||||||
"gitlab.freedesktop.org",
|
|
||||||
"xorg/lib/libx11",
|
|
||||||
"libX11-"+version,
|
|
||||||
checksum,
|
|
||||||
), nil, &MakeHelper{
|
|
||||||
Generate: "NOCONFIGURE=1 ./autogen.sh",
|
|
||||||
|
|
||||||
Configure: []KV{
|
|
||||||
{"enable-static"},
|
|
||||||
{"without-xmlto"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Automake,
|
|
||||||
Libtool,
|
|
||||||
PkgConfig,
|
|
||||||
|
|
||||||
utilMacros,
|
|
||||||
Libxtrans,
|
|
||||||
XorgProto,
|
|
||||||
XCB,
|
|
||||||
), version
|
|
||||||
}
|
|
||||||
func init() {
|
|
||||||
artifactsM[LibX11] = Metadata{
|
|
||||||
f: Toolchain.newLibX11,
|
|
||||||
|
|
||||||
Name: "libX11",
|
|
||||||
Description: `Core X11 protocol client library (aka "Xlib")`,
|
|
||||||
Website: "https://gitlab.freedesktop.org/xorg/lib/libx11",
|
|
||||||
|
|
||||||
Dependencies: P{
|
|
||||||
XCB,
|
|
||||||
},
|
|
||||||
|
|
||||||
ID: 1764,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newLibXext() (pkg.Artifact, string) {
|
|
||||||
const (
|
|
||||||
version = "1.3.7"
|
|
||||||
checksum = "-0wvUDaucLPLNOrK1pcKhHNoO-5nUqQyyw6JAbhx65gRjuMiNKKaF2_tcrbC_KNq"
|
|
||||||
)
|
|
||||||
return t.NewPackage("libXext", version, newFromGitLab(
|
|
||||||
"gitlab.freedesktop.org",
|
|
||||||
"xorg/lib/libxext",
|
|
||||||
"libXext-"+version,
|
|
||||||
checksum,
|
|
||||||
), nil, &MakeHelper{
|
|
||||||
Generate: "NOCONFIGURE=1 ./autogen.sh",
|
|
||||||
},
|
|
||||||
Automake,
|
|
||||||
Libtool,
|
|
||||||
PkgConfig,
|
|
||||||
|
|
||||||
utilMacros,
|
|
||||||
LibX11,
|
|
||||||
), version
|
|
||||||
}
|
|
||||||
func init() {
|
|
||||||
artifactsM[LibXext] = Metadata{
|
|
||||||
f: Toolchain.newLibXext,
|
|
||||||
|
|
||||||
Name: "libXext",
|
|
||||||
Description: "Xlib-based library for common extensions to the X11 protocol",
|
|
||||||
Website: "https://gitlab.freedesktop.org/xorg/lib/libxext",
|
|
||||||
|
|
||||||
Dependencies: P{
|
|
||||||
LibX11,
|
|
||||||
},
|
|
||||||
|
|
||||||
ID: 1774,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newLibxshmfence() (pkg.Artifact, string) {
|
|
||||||
const (
|
|
||||||
version = "1.3.3"
|
|
||||||
checksum = "JamExTPg81By2fs3vWdeo_dzlpBQeAwXr2sDXeHJqm9XBoLW5pamiD6FgAWtAKyA"
|
|
||||||
)
|
|
||||||
return t.NewPackage("libxshmfence", version, newFromGitLab(
|
|
||||||
"gitlab.freedesktop.org",
|
|
||||||
"xorg/lib/libxshmfence",
|
|
||||||
"libxshmfence-"+version,
|
|
||||||
checksum,
|
|
||||||
), nil, &MakeHelper{
|
|
||||||
Generate: "NOCONFIGURE=1 ./autogen.sh",
|
|
||||||
},
|
|
||||||
Automake,
|
|
||||||
Libtool,
|
|
||||||
PkgConfig,
|
|
||||||
|
|
||||||
utilMacros,
|
|
||||||
XorgProto,
|
|
||||||
), version
|
|
||||||
}
|
|
||||||
func init() {
|
|
||||||
artifactsM[Libxshmfence] = Metadata{
|
|
||||||
f: Toolchain.newLibxshmfence,
|
|
||||||
|
|
||||||
Name: "libxshmfence",
|
|
||||||
Description: "shared memory 'SyncFence' synchronization primitive",
|
|
||||||
Website: "https://gitlab.freedesktop.org/xorg/lib/libxshmfence",
|
|
||||||
|
|
||||||
ID: 1792,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Toolchain) newLibpciaccess() (pkg.Artifact, string) {
|
func (t Toolchain) newLibpciaccess() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "0.19"
|
version = "0.19"
|
||||||
|
|||||||
62
internal/rosa/xcb.go
Normal file
62
internal/rosa/xcb.go
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
package rosa
|
||||||
|
|
||||||
|
import "hakurei.app/internal/pkg"
|
||||||
|
|
||||||
|
func (t Toolchain) newXCBProto() (pkg.Artifact, string) {
|
||||||
|
const (
|
||||||
|
version = "1.17.0"
|
||||||
|
checksum = "_NtbKaJ_iyT7XiJz25mXQ7y-niTzE8sHPvLXZPcqtNoV_-vTzqkezJ8Hp2U1enCv"
|
||||||
|
)
|
||||||
|
return t.NewPackage("xcb-proto", version, newTar(
|
||||||
|
"https://xcb.freedesktop.org/dist/xcb-proto-"+version+".tar.gz",
|
||||||
|
checksum,
|
||||||
|
pkg.TarGzip,
|
||||||
|
), nil, (*MakeHelper)(nil),
|
||||||
|
Python,
|
||||||
|
), version
|
||||||
|
}
|
||||||
|
func init() {
|
||||||
|
artifactsM[XCBProto] = Metadata{
|
||||||
|
f: Toolchain.newXCBProto,
|
||||||
|
|
||||||
|
Name: "xcb-proto",
|
||||||
|
Description: "XML-XCB protocol descriptions used by libxcb for the X11 protocol & extensions",
|
||||||
|
Website: "https://gitlab.freedesktop.org/xorg/proto/xcbproto",
|
||||||
|
|
||||||
|
ID: 13646,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Toolchain) newXCB() (pkg.Artifact, string) {
|
||||||
|
const (
|
||||||
|
version = "1.17.0"
|
||||||
|
checksum = "hjjsc79LpWM_hZjNWbDDS6qRQUXREjjekS6UbUsDq-RR1_AjgNDxhRvZf-1_kzDd"
|
||||||
|
)
|
||||||
|
return t.NewPackage("xcb", version, newTar(
|
||||||
|
"https://xcb.freedesktop.org/dist/libxcb-"+version+".tar.gz",
|
||||||
|
checksum,
|
||||||
|
pkg.TarGzip,
|
||||||
|
), nil, (*MakeHelper)(nil),
|
||||||
|
Python,
|
||||||
|
PkgConfig,
|
||||||
|
|
||||||
|
XCBProto,
|
||||||
|
LibXau,
|
||||||
|
), version
|
||||||
|
}
|
||||||
|
func init() {
|
||||||
|
artifactsM[XCB] = Metadata{
|
||||||
|
f: Toolchain.newXCB,
|
||||||
|
|
||||||
|
Name: "xcb",
|
||||||
|
Description: "The X protocol C-language Binding",
|
||||||
|
Website: "https://xcb.freedesktop.org/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
XCBProto,
|
||||||
|
LibXau,
|
||||||
|
},
|
||||||
|
|
||||||
|
ID: 1767,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -13,8 +13,9 @@ func (t Toolchain) newZlib() (pkg.Artifact, string) {
|
|||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
), nil, &CMakeHelper{
|
), nil, &CMakeHelper{
|
||||||
Cache: []KV{
|
Cache: []KV{
|
||||||
{"CMAKE_C_FLAGS", "-fPIC"},
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
{"ZLIB_BUILD_TESTING", "ON"},
|
|
||||||
|
{"ZLIB_BUILD_TESTING", "OFF"},
|
||||||
{"ZLIB_BUILD_SHARED", "ON"},
|
{"ZLIB_BUILD_SHARED", "ON"},
|
||||||
{"ZLIB_BUILD_STATIC", "ON"},
|
{"ZLIB_BUILD_STATIC", "ON"},
|
||||||
{"ZLIB_BUILD_MINIZIP", "OFF"},
|
{"ZLIB_BUILD_MINIZIP", "OFF"},
|
||||||
|
|||||||
@@ -13,20 +13,12 @@ func (t Toolchain) newZstd() (pkg.Artifact, string) {
|
|||||||
"zstd-"+version+".tar.gz",
|
"zstd-"+version+".tar.gz",
|
||||||
checksum,
|
checksum,
|
||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
), &PackageAttr{
|
), nil, &CMakeHelper{
|
||||||
// tests Makefile assumes writable source
|
|
||||||
Writable: true,
|
|
||||||
Chmod: true,
|
|
||||||
}, &CMakeHelper{
|
|
||||||
Append: []string{"build", "cmake"},
|
Append: []string{"build", "cmake"},
|
||||||
Test: `
|
Cache: []KV{
|
||||||
make -C /usr/src/zstd/tests datagen
|
{"CMAKE_BUILD_TYPE", "Release"},
|
||||||
ZSTD_BIN=/cure/programs/zstd /usr/src/zstd/tests/playTests.sh
|
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
Make,
|
}), version
|
||||||
Diffutils,
|
|
||||||
), version
|
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
artifactsM[Zstd] = Metadata{
|
artifactsM[Zstd] = Metadata{
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ package
|
|||||||
|
|
||||||
|
|
||||||
*Default:*
|
*Default:*
|
||||||
` <derivation hakurei-static-x86_64-unknown-linux-musl-0.4.1> `
|
` <derivation hakurei-static-x86_64-unknown-linux-musl-0.4.0> `
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -842,7 +842,7 @@ package
|
|||||||
|
|
||||||
|
|
||||||
*Default:*
|
*Default:*
|
||||||
` <derivation hakurei-hsu-0.4.1> `
|
` <derivation hakurei-hsu-0.4.0> `
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@
|
|||||||
|
|
||||||
buildGo126Module rec {
|
buildGo126Module rec {
|
||||||
pname = "hakurei";
|
pname = "hakurei";
|
||||||
version = "0.4.1";
|
version = "0.4.0";
|
||||||
|
|
||||||
srcFiltered = builtins.path {
|
srcFiltered = builtins.path {
|
||||||
name = "${pname}-src";
|
name = "${pname}-src";
|
||||||
|
|||||||
@@ -237,8 +237,8 @@ in
|
|||||||
(ent "/dri" "/dev/dri" "rw,nosuid" "devtmpfs" "devtmpfs" ignore)
|
(ent "/dri" "/dev/dri" "rw,nosuid" "devtmpfs" "devtmpfs" ignore)
|
||||||
(ent "/var/tmp" "/var/tmp" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/tmp" "/var/tmp" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/var/cache" "/var/cache" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/cache" "/var/cache" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/" "/.hakurei/.ro-store" "rw,relatime" "overlay" "overlay" "ro,lowerdir+=/host/nix/.ro-store,lowerdir+=/host/nix/.rw-store/upper,redirect_dir=nofollow,userxattr")
|
(ent "/" "/.hakurei/.ro-store" "rw,relatime" "overlay" "overlay" "ro,lowerdir=/host/nix/.ro-store:/host/nix/.rw-store/upper,redirect_dir=nofollow,userxattr")
|
||||||
(ent "/" "/.hakurei/store" "rw,relatime" "overlay" "overlay" "rw,lowerdir+=/host/nix/.ro-store,lowerdir+=/host/nix/.rw-store/upper,upperdir=/host/tmp/.hakurei-store-rw/upper,workdir=/host/tmp/.hakurei-store-rw/work,redirect_dir=nofollow,userxattr")
|
(ent "/" "/.hakurei/store" "rw,relatime" "overlay" "overlay" "rw,lowerdir=/host/nix/.ro-store:/host/nix/.rw-store/upper,upperdir=/host/tmp/.hakurei-store-rw/upper,workdir=/host/tmp/.hakurei-store-rw/work,redirect_dir=nofollow,userxattr")
|
||||||
(ent "/etc" ignore "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/etc" ignore "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/var/lib/hakurei/u0/a4" "/var/lib/hakurei/u0/a4" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/lib/hakurei/u0/a4" "/var/lib/hakurei/u0/a4" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent ignore "/run/user/65534/pulse/native" "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent ignore "/run/user/65534/pulse/native" "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
|
|||||||
@@ -264,8 +264,8 @@ in
|
|||||||
(ent "/dri" "/dev/dri" "rw,nosuid" "devtmpfs" "devtmpfs" ignore)
|
(ent "/dri" "/dev/dri" "rw,nosuid" "devtmpfs" "devtmpfs" ignore)
|
||||||
(ent "/var/tmp" "/var/tmp" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/tmp" "/var/tmp" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/var/cache" "/var/cache" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/cache" "/var/cache" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/" "/.hakurei/.ro-store" "rw,relatime" "overlay" "overlay" "ro,lowerdir+=/host/nix/.ro-store,lowerdir+=/host/nix/.rw-store/upper,redirect_dir=nofollow,userxattr")
|
(ent "/" "/.hakurei/.ro-store" "rw,relatime" "overlay" "overlay" "ro,lowerdir=/host/nix/.ro-store:/host/nix/.rw-store/upper,redirect_dir=nofollow,userxattr")
|
||||||
(ent "/" "/.hakurei/store" "rw,relatime" "overlay" "overlay" "rw,lowerdir+=/host/nix/.ro-store,lowerdir+=/host/nix/.rw-store/upper,upperdir=/host/tmp/.hakurei-store-rw/upper,workdir=/host/tmp/.hakurei-store-rw/work,redirect_dir=nofollow,userxattr")
|
(ent "/" "/.hakurei/store" "rw,relatime" "overlay" "overlay" "rw,lowerdir=/host/nix/.ro-store:/host/nix/.rw-store/upper,upperdir=/host/tmp/.hakurei-store-rw/upper,workdir=/host/tmp/.hakurei-store-rw/work,redirect_dir=nofollow,userxattr")
|
||||||
(ent "/etc" ignore "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/etc" ignore "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/var/lib/hakurei/u0/a3" "/var/lib/hakurei/u0/a3" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/lib/hakurei/u0/a3" "/var/lib/hakurei/u0/a3" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent ignore "/run/user/1000/pulse/native" "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent ignore "/run/user/1000/pulse/native" "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
|
|||||||
@@ -270,8 +270,8 @@ in
|
|||||||
(ent "/dri" "/dev/dri" "rw,nosuid" "devtmpfs" "devtmpfs" ignore)
|
(ent "/dri" "/dev/dri" "rw,nosuid" "devtmpfs" "devtmpfs" ignore)
|
||||||
(ent "/var/tmp" "/var/tmp" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/tmp" "/var/tmp" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/var/cache" "/var/cache" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/cache" "/var/cache" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/" "/.hakurei/.ro-store" "rw,relatime" "overlay" "overlay" "ro,lowerdir+=/host/nix/.ro-store,lowerdir+=/host/nix/.rw-store/upper,redirect_dir=nofollow,userxattr")
|
(ent "/" "/.hakurei/.ro-store" "rw,relatime" "overlay" "overlay" "ro,lowerdir=/host/nix/.ro-store:/host/nix/.rw-store/upper,redirect_dir=nofollow,userxattr")
|
||||||
(ent "/" "/.hakurei/store" "rw,relatime" "overlay" "overlay" "rw,lowerdir+=/host/nix/.ro-store,lowerdir+=/host/nix/.rw-store/upper,upperdir=/host/tmp/.hakurei-store-rw/upper,workdir=/host/tmp/.hakurei-store-rw/work,redirect_dir=nofollow,uuid=on,userxattr")
|
(ent "/" "/.hakurei/store" "rw,relatime" "overlay" "overlay" "rw,lowerdir=/host/nix/.ro-store:/host/nix/.rw-store/upper,upperdir=/host/tmp/.hakurei-store-rw/upper,workdir=/host/tmp/.hakurei-store-rw/work,redirect_dir=nofollow,uuid=on,userxattr")
|
||||||
(ent "/etc" ignore "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/etc" ignore "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent "/var/lib/hakurei/u0/a2" "/var/lib/hakurei/u0/a2" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent "/var/lib/hakurei/u0/a2" "/var/lib/hakurei/u0/a2" "rw,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
(ent ignore "/run/user/65534/pulse/native" "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
(ent ignore "/run/user/65534/pulse/native" "ro,nosuid,nodev,relatime" "ext4" "/dev/vda" "rw")
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ def swaymsg(command: str = "", succeed=True, type="command"):
|
|||||||
|
|
||||||
|
|
||||||
def check_filter(check_offset, name, pname):
|
def check_filter(check_offset, name, pname):
|
||||||
pid = int(machine.wait_until_succeeds(f"pgrep -U {10000+check_offset} -x {pname}"))
|
pid = int(machine.wait_until_succeeds(f"pgrep -U {10000+check_offset} -x {pname}", timeout=60))
|
||||||
hash = machine.succeed(f"sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 WAYLAND_DISPLAY=wayland-1 check-sandbox-{name} hash")
|
hash = machine.succeed(f"sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 WAYLAND_DISPLAY=wayland-1 check-sandbox-{name} hash")
|
||||||
print(machine.succeed(f"hakurei-test -s {hash} filter {pid}"))
|
print(machine.succeed(f"hakurei-test -s {hash} filter {pid}"))
|
||||||
|
|
||||||
@@ -47,7 +47,7 @@ check_filter(0, "pdlike", "cat")
|
|||||||
|
|
||||||
# Check fd leak:
|
# Check fd leak:
|
||||||
swaymsg("exec exec 127</proc/cmdline && hakurei -v exec sleep infinity")
|
swaymsg("exec exec 127</proc/cmdline && hakurei -v exec sleep infinity")
|
||||||
pd_identity0_sleep_pid = int(machine.wait_until_succeeds("pgrep -U 10000 -x sleep"))
|
pd_identity0_sleep_pid = int(machine.wait_until_succeeds("pgrep -U 10000 -x sleep", timeout=60))
|
||||||
print(machine.succeed(f"hakurei-test fd {pd_identity0_sleep_pid}"))
|
print(machine.succeed(f"hakurei-test fd {pd_identity0_sleep_pid}"))
|
||||||
machine.succeed(f"kill -INT {pd_identity0_sleep_pid}")
|
machine.succeed(f"kill -INT {pd_identity0_sleep_pid}")
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ check_offset = 0
|
|||||||
def check_sandbox(name):
|
def check_sandbox(name):
|
||||||
global check_offset
|
global check_offset
|
||||||
swaymsg(f"exec script /dev/null -E always -qec check-sandbox-{name}")
|
swaymsg(f"exec script /dev/null -E always -qec check-sandbox-{name}")
|
||||||
machine.wait_for_file(f"/var/tmp/.hakurei-check-ok.{check_offset}")
|
machine.wait_for_file(f"/var/tmp/.hakurei-check-ok.{check_offset}", timeout=60)
|
||||||
check_filter(check_offset, name, "hakurei-test")
|
check_filter(check_offset, name, "hakurei-test")
|
||||||
check_offset += 1
|
check_offset += 1
|
||||||
|
|
||||||
@@ -79,7 +79,7 @@ check_sandbox("device")
|
|||||||
check_sandbox("pdlike")
|
check_sandbox("pdlike")
|
||||||
|
|
||||||
# Exit Sway and verify process exit status 0:
|
# Exit Sway and verify process exit status 0:
|
||||||
machine.wait_until_fails("pgrep -x hakurei")
|
machine.wait_until_fails("pgrep -x hakurei", timeout=5)
|
||||||
swaymsg("exit", succeed=False)
|
swaymsg("exit", succeed=False)
|
||||||
machine.wait_for_file("/tmp/sway-exit-ok")
|
machine.wait_for_file("/tmp/sway-exit-ok")
|
||||||
|
|
||||||
|
|||||||
52
test/test.py
52
test/test.py
@@ -119,7 +119,7 @@ def hakurei_identity(offset):
|
|||||||
|
|
||||||
# Start hakurei permissive defaults outside Wayland session:
|
# Start hakurei permissive defaults outside Wayland session:
|
||||||
print(machine.succeed("sudo -u alice -i hakurei -v exec -a 0 touch /tmp/pd-bare-ok"))
|
print(machine.succeed("sudo -u alice -i hakurei -v exec -a 0 touch /tmp/pd-bare-ok"))
|
||||||
machine.wait_for_file("/tmp/hakurei.0/tmpdir/0/pd-bare-ok")
|
machine.wait_for_file("/tmp/hakurei.0/tmpdir/0/pd-bare-ok", timeout=5)
|
||||||
|
|
||||||
# Verify silent output permissive defaults:
|
# Verify silent output permissive defaults:
|
||||||
output = machine.succeed("sudo -u alice -i hakurei exec -a 0 true &>/dev/stdout")
|
output = machine.succeed("sudo -u alice -i hakurei exec -a 0 true &>/dev/stdout")
|
||||||
@@ -132,11 +132,11 @@ def silent_output_interrupt(flags):
|
|||||||
wait_for_window("alice@machine")
|
wait_for_window("alice@machine")
|
||||||
# identity 0 does not have home-manager
|
# identity 0 does not have home-manager
|
||||||
machine.send_chars(f"exec hakurei exec {flags}-a 0 sh -c 'export PATH=/run/current-system/sw/bin:$PATH && touch /tmp/pd-silent-ready && sleep infinity' &>/tmp/pd-silent\n")
|
machine.send_chars(f"exec hakurei exec {flags}-a 0 sh -c 'export PATH=/run/current-system/sw/bin:$PATH && touch /tmp/pd-silent-ready && sleep infinity' &>/tmp/pd-silent\n")
|
||||||
machine.wait_for_file("/tmp/hakurei.0/tmpdir/0/pd-silent-ready")
|
machine.wait_for_file("/tmp/hakurei.0/tmpdir/0/pd-silent-ready", timeout=15)
|
||||||
machine.succeed("rm /tmp/hakurei.0/tmpdir/0/pd-silent-ready")
|
machine.succeed("rm /tmp/hakurei.0/tmpdir/0/pd-silent-ready")
|
||||||
machine.send_key("ctrl-c")
|
machine.send_key("ctrl-c")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
machine.wait_until_fails(f"pgrep -u alice -f 'hakurei exec {flags}-a 0 '")
|
machine.wait_until_fails(f"pgrep -u alice -f 'hakurei exec {flags}-a 0 '", timeout=5)
|
||||||
output = machine.succeed("cat /tmp/pd-silent && rm /tmp/pd-silent")
|
output = machine.succeed("cat /tmp/pd-silent && rm /tmp/pd-silent")
|
||||||
if output != "":
|
if output != "":
|
||||||
raise Exception(f"unexpected output\n{output}")
|
raise Exception(f"unexpected output\n{output}")
|
||||||
@@ -151,7 +151,7 @@ print(machine.fail("sudo -u alice -i hakurei -v exec --wayland true"))
|
|||||||
|
|
||||||
# Start hakurei permissive defaults within Wayland session:
|
# Start hakurei permissive defaults within Wayland session:
|
||||||
hakurei('-v exec --wayland --dbus --dbus-log notify-send -a "NixOS Tests" "Test notification" "Notification from within sandbox." && touch /tmp/dbus-ok')
|
hakurei('-v exec --wayland --dbus --dbus-log notify-send -a "NixOS Tests" "Test notification" "Notification from within sandbox." && touch /tmp/dbus-ok')
|
||||||
machine.wait_for_file("/tmp/dbus-ok")
|
machine.wait_for_file("/tmp/dbus-ok", timeout=15)
|
||||||
collect_state_ui("dbus_notify_exited")
|
collect_state_ui("dbus_notify_exited")
|
||||||
# not in pid namespace, verify termination
|
# not in pid namespace, verify termination
|
||||||
machine.wait_until_fails("pgrep xdg-dbus-proxy")
|
machine.wait_until_fails("pgrep xdg-dbus-proxy")
|
||||||
@@ -165,11 +165,11 @@ hakurei("-v exec --wayland -X --dbus --pulse -u p1 foot && touch /tmp/p1-exit-ok
|
|||||||
wait_for_window("p1@machine")
|
wait_for_window("p1@machine")
|
||||||
print(machine.succeed("getfacl --absolute-names --omit-header --numeric /tmp/hakurei.0/runtime | grep 10000"))
|
print(machine.succeed("getfacl --absolute-names --omit-header --numeric /tmp/hakurei.0/runtime | grep 10000"))
|
||||||
machine.send_chars("exit\n")
|
machine.send_chars("exit\n")
|
||||||
machine.wait_for_file("/tmp/p1-exit-ok")
|
machine.wait_for_file("/tmp/p1-exit-ok", timeout=15)
|
||||||
# Verify acl is kept alive:
|
# Verify acl is kept alive:
|
||||||
print(machine.succeed("getfacl --absolute-names --omit-header --numeric /tmp/hakurei.0/runtime | grep 10000"))
|
print(machine.succeed("getfacl --absolute-names --omit-header --numeric /tmp/hakurei.0/runtime | grep 10000"))
|
||||||
machine.send_chars("exit\n")
|
machine.send_chars("exit\n")
|
||||||
machine.wait_for_file("/tmp/p0-exit-ok")
|
machine.wait_for_file("/tmp/p0-exit-ok", timeout=15)
|
||||||
machine.fail("getfacl --absolute-names --omit-header --numeric /tmp/hakurei.0/runtime | grep 10000")
|
machine.fail("getfacl --absolute-names --omit-header --numeric /tmp/hakurei.0/runtime | grep 10000")
|
||||||
|
|
||||||
# Check invalid identifier fd behaviour:
|
# Check invalid identifier fd behaviour:
|
||||||
@@ -181,7 +181,7 @@ print(machine.succeed('grep "^hakurei: cannot write identifier: bad file descrip
|
|||||||
swaymsg("exec sh -c 'ne-foot; echo -n $? > /tmp/monitor-exit-code'")
|
swaymsg("exec sh -c 'ne-foot; echo -n $? > /tmp/monitor-exit-code'")
|
||||||
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
||||||
machine.succeed("pkill -INT -f 'hakurei -v run '")
|
machine.succeed("pkill -INT -f 'hakurei -v run '")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
machine.wait_for_file("/tmp/monitor-exit-code")
|
machine.wait_for_file("/tmp/monitor-exit-code")
|
||||||
interrupt_exit_code = int(machine.succeed("cat /tmp/monitor-exit-code"))
|
interrupt_exit_code = int(machine.succeed("cat /tmp/monitor-exit-code"))
|
||||||
if interrupt_exit_code != 230:
|
if interrupt_exit_code != 230:
|
||||||
@@ -191,7 +191,7 @@ if interrupt_exit_code != 230:
|
|||||||
swaymsg("exec sh -c 'ne-foot-immediate; echo -n $? > /tmp/monitor-exit-code'")
|
swaymsg("exec sh -c 'ne-foot-immediate; echo -n $? > /tmp/monitor-exit-code'")
|
||||||
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
||||||
machine.succeed("pkill -INT -f 'hakurei -v run '")
|
machine.succeed("pkill -INT -f 'hakurei -v run '")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
machine.wait_for_file("/tmp/monitor-exit-code")
|
machine.wait_for_file("/tmp/monitor-exit-code")
|
||||||
interrupt_exit_code = int(machine.succeed("cat /tmp/monitor-exit-code"))
|
interrupt_exit_code = int(machine.succeed("cat /tmp/monitor-exit-code"))
|
||||||
if interrupt_exit_code != 254:
|
if interrupt_exit_code != 254:
|
||||||
@@ -202,7 +202,7 @@ swaymsg("exec sh -c 'ne-foot &> /tmp/shim-cont-unexpected-pid'")
|
|||||||
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
||||||
machine.succeed("pkill -CONT -f 'hakurei shim'")
|
machine.succeed("pkill -CONT -f 'hakurei shim'")
|
||||||
machine.succeed("pkill -INT -f 'hakurei -v run '")
|
machine.succeed("pkill -INT -f 'hakurei -v run '")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
machine.wait_for_file("/tmp/shim-cont-unexpected-pid")
|
machine.wait_for_file("/tmp/shim-cont-unexpected-pid")
|
||||||
print(machine.succeed('grep "shim: got SIGCONT from unexpected process$" /tmp/shim-cont-unexpected-pid'))
|
print(machine.succeed('grep "shim: got SIGCONT from unexpected process$" /tmp/shim-cont-unexpected-pid'))
|
||||||
|
|
||||||
@@ -221,26 +221,26 @@ if sched_rr != 2:
|
|||||||
swaymsg("exec ne-foot")
|
swaymsg("exec ne-foot")
|
||||||
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
||||||
machine.send_chars("clear; wayland-info && touch /var/tmp/client-ok\n")
|
machine.send_chars("clear; wayland-info && touch /var/tmp/client-ok\n")
|
||||||
machine.wait_for_file("/var/tmp/client-ok")
|
machine.wait_for_file("/var/tmp/client-ok", timeout=15)
|
||||||
collect_state_ui("foot_wayland")
|
collect_state_ui("foot_wayland")
|
||||||
check_state("ne-foot", {"wayland": True})
|
check_state("ne-foot", {"wayland": True})
|
||||||
# Verify lack of acl on XDG_RUNTIME_DIR:
|
# Verify lack of acl on XDG_RUNTIME_DIR:
|
||||||
machine.fail(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(0) + 10000}")
|
machine.fail(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(0) + 10000}")
|
||||||
machine.send_chars("exit\n")
|
machine.send_chars("exit\n")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
machine.fail(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(0) + 10000}")
|
machine.fail(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(0) + 10000}", timeout=5)
|
||||||
|
|
||||||
# Test pipewire-pulse:
|
# Test pipewire-pulse:
|
||||||
swaymsg("exec pa-foot")
|
swaymsg("exec pa-foot")
|
||||||
wait_for_window(f"u0_a{hakurei_identity(1)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(1)}@machine")
|
||||||
machine.send_chars("clear; pactl info && touch /var/tmp/pulse-ok\n")
|
machine.send_chars("clear; pactl info && touch /var/tmp/pulse-ok\n")
|
||||||
machine.wait_for_file("/var/tmp/pulse-ok")
|
machine.wait_for_file("/var/tmp/pulse-ok", timeout=15)
|
||||||
collect_state_ui("pulse_wayland")
|
collect_state_ui("pulse_wayland")
|
||||||
check_state("pa-foot", {"wayland": True, "pipewire": True})
|
check_state("pa-foot", {"wayland": True, "pipewire": True})
|
||||||
machine.fail("find /tmp -maxdepth 1 -type d -name '.hakurei-shim-*' -print -exec false '{}' +")
|
machine.fail("find /tmp -maxdepth 1 -type d -name '.hakurei-shim-*' -print -exec false '{}' +")
|
||||||
machine.send_chars("exit\n")
|
machine.send_chars("exit\n")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
machine.wait_until_fails("pgrep -x hakurei")
|
machine.wait_until_fails("pgrep -x hakurei", timeout=5)
|
||||||
machine.succeed("find /tmp -maxdepth 1 -type d -name '.hakurei-shim-*' -print -exec false '{}' +")
|
machine.succeed("find /tmp -maxdepth 1 -type d -name '.hakurei-shim-*' -print -exec false '{}' +")
|
||||||
# Test PipeWire SecurityContext:
|
# Test PipeWire SecurityContext:
|
||||||
machine.succeed("sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 hakurei -v exec --pulse pactl info")
|
machine.succeed("sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 hakurei -v exec --pulse pactl info")
|
||||||
@@ -253,25 +253,25 @@ machine.fail("sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 hakurei -v exec --
|
|||||||
swaymsg("exec x11-alacritty")
|
swaymsg("exec x11-alacritty")
|
||||||
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
||||||
machine.send_chars("clear; glinfo && touch /var/tmp/x11-ok\n")
|
machine.send_chars("clear; glinfo && touch /var/tmp/x11-ok\n")
|
||||||
machine.wait_for_file("/var/tmp/x11-ok")
|
machine.wait_for_file("/var/tmp/x11-ok", timeout=15)
|
||||||
collect_state_ui("alacritty_x11")
|
collect_state_ui("alacritty_x11")
|
||||||
check_state("x11-alacritty", {"x11": True})
|
check_state("x11-alacritty", {"x11": True})
|
||||||
machine.send_chars("exit\n")
|
machine.send_chars("exit\n")
|
||||||
machine.wait_until_fails("pgrep alacritty")
|
machine.wait_until_fails("pgrep alacritty", timeout=5)
|
||||||
|
|
||||||
# Start app (foot) with direct Wayland access:
|
# Start app (foot) with direct Wayland access:
|
||||||
swaymsg("exec da-foot")
|
swaymsg("exec da-foot")
|
||||||
wait_for_window(f"u0_a{hakurei_identity(3)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(3)}@machine")
|
||||||
machine.send_chars("clear; wayland-info && touch /var/tmp/direct-ok\n")
|
machine.send_chars("clear; wayland-info && touch /var/tmp/direct-ok\n")
|
||||||
collect_state_ui("foot_direct")
|
collect_state_ui("foot_direct")
|
||||||
machine.wait_for_file("/var/tmp/direct-ok")
|
machine.wait_for_file("/var/tmp/direct-ok", timeout=15)
|
||||||
check_state("da-foot", {"wayland": True})
|
check_state("da-foot", {"wayland": True})
|
||||||
# Verify acl on XDG_RUNTIME_DIR:
|
# Verify acl on XDG_RUNTIME_DIR:
|
||||||
print(machine.succeed(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(3) + 10000}"))
|
print(machine.succeed(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(3) + 10000}"))
|
||||||
machine.send_chars("exit\n")
|
machine.send_chars("exit\n")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
# Verify acl cleanup on XDG_RUNTIME_DIR:
|
# Verify acl cleanup on XDG_RUNTIME_DIR:
|
||||||
machine.wait_until_fails(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(3) + 10000}")
|
machine.wait_until_fails(f"getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep {hakurei_identity(3) + 10000}", timeout=5)
|
||||||
|
|
||||||
# Test syscall filter:
|
# Test syscall filter:
|
||||||
print(machine.fail("sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 strace-failure"))
|
print(machine.fail("sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 strace-failure"))
|
||||||
@@ -280,20 +280,20 @@ print(machine.fail("sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 strace-failu
|
|||||||
swaymsg("exec foot $SHELL -c '(ne-foot) & disown && exec $SHELL'")
|
swaymsg("exec foot $SHELL -c '(ne-foot) & disown && exec $SHELL'")
|
||||||
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
wait_for_window(f"u0_a{hakurei_identity(0)}@machine")
|
||||||
machine.send_chars("clear; wayland-info && touch /var/tmp/term-ok\n")
|
machine.send_chars("clear; wayland-info && touch /var/tmp/term-ok\n")
|
||||||
machine.wait_for_file("/var/tmp/term-ok")
|
machine.wait_for_file("/var/tmp/term-ok", timeout=15)
|
||||||
machine.send_key("alt-h")
|
machine.send_key("alt-h")
|
||||||
machine.send_chars("clear; hakurei show $(hakurei ps --short) && touch /tmp/ps-show-ok && exec cat\n")
|
machine.send_chars("clear; hakurei show $(hakurei ps --short) && touch /tmp/ps-show-ok && exec cat\n")
|
||||||
machine.wait_for_file("/tmp/ps-show-ok")
|
machine.wait_for_file("/tmp/ps-show-ok", timeout=5)
|
||||||
collect_state_ui("foot_wayland_term")
|
collect_state_ui("foot_wayland_term")
|
||||||
check_state("ne-foot", {"wayland": True})
|
check_state("ne-foot", {"wayland": True})
|
||||||
machine.send_key("alt-l")
|
machine.send_key("alt-l")
|
||||||
machine.send_chars("exit\n")
|
machine.send_chars("exit\n")
|
||||||
wait_for_window("alice@machine")
|
wait_for_window("alice@machine")
|
||||||
machine.send_key("ctrl-c")
|
machine.send_key("ctrl-c")
|
||||||
machine.wait_until_fails("pgrep foot")
|
machine.wait_until_fails("pgrep foot", timeout=5)
|
||||||
|
|
||||||
# Exit Sway and verify process exit status 0:
|
# Exit Sway and verify process exit status 0:
|
||||||
machine.wait_until_fails("pgrep -x hakurei")
|
machine.wait_until_fails("pgrep -x hakurei", timeout=5)
|
||||||
swaymsg("exit", succeed=False)
|
swaymsg("exit", succeed=False)
|
||||||
machine.wait_for_file("/tmp/sway-exit-ok")
|
machine.wait_for_file("/tmp/sway-exit-ok")
|
||||||
|
|
||||||
@@ -308,4 +308,4 @@ machine.succeed("find /tmp -maxdepth 1 -type d -name '.hakurei-shim-*' -print -e
|
|||||||
# Verify go test status:
|
# Verify go test status:
|
||||||
machine.wait_for_file("/tmp/hakurei-test-done")
|
machine.wait_for_file("/tmp/hakurei-test-done")
|
||||||
print(machine.succeed("cat /tmp/hakurei-test.log"))
|
print(machine.succeed("cat /tmp/hakurei-test.log"))
|
||||||
machine.wait_for_file("/tmp/hakurei-test-ok")
|
machine.wait_for_file("/tmp/hakurei-test-ok", timeout=2)
|
||||||
|
|||||||
Reference in New Issue
Block a user