Compare commits
131 Commits
8a26521f5b
...
v0.3.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
9fb0b2452e
|
|||
|
a3e87dd0ef
|
|||
|
90a38c0708
|
|||
|
39cc8caa93
|
|||
|
c4f64f7606
|
|||
|
a9e2a5e59f
|
|||
|
9fb0722cdf
|
|||
|
2f3e323c46
|
|||
|
1fc9c3200f
|
|||
|
096a25ad3a
|
|||
|
ffd2f979fb
|
|||
|
31a8cc9b5c
|
|||
|
bb3f60fc74
|
|||
|
697c91e04d
|
|||
|
3f7b8b4332
|
|||
|
fa94155f42
|
|||
|
233bd163fb
|
|||
|
f9b69c94bc
|
|||
|
68aefa6d59
|
|||
|
159fd55dbb
|
|||
|
ce6b3ff53b
|
|||
|
30afa0e2ab
|
|||
|
9b751de078
|
|||
|
d77ad3bb6e
|
|||
|
0142fc90b0
|
|||
|
3c9f7cfcd0
|
|||
|
a3526b3ceb
|
|||
|
6ad21e2288
|
|||
|
27e2e3f996
|
|||
|
e0c720681b
|
|||
|
f982b13a59
|
|||
|
443911ada1
|
|||
|
d7a3706db3
|
|||
|
3226dc44dc
|
|||
|
9f98d12ad8
|
|||
|
550e83dda9
|
|||
|
7877b4e627
|
|||
|
47ce6f5bd0
|
|||
|
48f4ccba33
|
|||
|
c31884bee4
|
|||
|
f8661ad479
|
|||
|
536f0cbae6
|
|||
|
8d872ff1cd
|
|||
|
bf14a412e4
|
|||
|
8b4576bc5f
|
|||
|
29ebc52e26
|
|||
|
5f81aac0e2
|
|||
|
47490823be
|
|||
|
1ac8ca7a80
|
|||
|
fd8b2fd522
|
|||
|
20a8519044
|
|||
|
8c4fd00c50
|
|||
|
bc3dd6fbb0
|
|||
|
616ed29edf
|
|||
|
9d9b7294a4
|
|||
|
6c1e2f10a7
|
|||
|
abf96d2283
|
|||
|
6c90e879da
|
|||
|
d1b404dc3a
|
|||
|
744e4e0632
|
|||
|
85eda49b2b
|
|||
|
b26bc05bb0
|
|||
|
2d63ea8fee
|
|||
|
dd4326418c
|
|||
|
79c0106ea0
|
|||
|
536db533de
|
|||
|
07927006a8
|
|||
|
77ea27b038
|
|||
|
e76bc6a13a
|
|||
|
cc403c96d8
|
|||
|
66118ba941
|
|||
|
823ba08dbc
|
|||
|
660835151e
|
|||
|
53e6df7e81
|
|||
|
bd80327a8f
|
|||
|
41f9aebbb7
|
|||
|
a2a0e36802
|
|||
|
fbe93fc771
|
|||
|
968d8dbaf1
|
|||
|
f1758a6fa8
|
|||
|
88aaa4497c
|
|||
|
b7ea68de35
|
|||
|
67e453f5c4
|
|||
|
67092c835a
|
|||
|
18918d9a0d
|
|||
|
380ca4e022
|
|||
|
887aef8514
|
|||
|
d61faa09eb
|
|||
|
50153788ef
|
|||
|
c84fe63217
|
|||
|
eb67e5e0a8
|
|||
|
948afe33e5
|
|||
|
76c657177d
|
|||
|
4356f978aa
|
|||
|
4f17dad645
|
|||
|
68b7d41c65
|
|||
|
e48f303e38
|
|||
|
f1fd406b82
|
|||
|
53b1de3395
|
|||
|
92dcadbf27
|
|||
|
0bd6a18326
|
|||
|
67d592c337
|
|||
|
fdc8a8419b
|
|||
|
122cfbf63a
|
|||
|
504f5d28fe
|
|||
|
3eadd5c580
|
|||
|
4d29333807
|
|||
|
e1533fa4c6
|
|||
|
9a74d5273d
|
|||
|
2abc8c454e
|
|||
|
fecb963e85
|
|||
|
cd9da57f20
|
|||
|
c6a95f5a6a
|
|||
|
228489371d
|
|||
|
490471d22b
|
|||
|
763d2572fe
|
|||
|
bb1b6beb87
|
|||
|
3224a7da63
|
|||
|
8a86cf74ee
|
|||
|
e34a59e332
|
|||
|
861801597d
|
|||
|
334578fdde
|
|||
|
20790af71e
|
|||
|
43b8a40fc0
|
|||
|
87c3059214
|
|||
|
6956dfc31a
|
|||
|
d9ebaf20f8
|
|||
|
acee0b3632
|
|||
|
5e55a796df
|
|||
|
f6eaf76ec9
|
|||
|
5c127a7035
|
@@ -47,13 +47,13 @@ func main() {
|
||||
}()
|
||||
|
||||
var (
|
||||
flagVerbose bool
|
||||
flagQuiet bool
|
||||
flagCures int
|
||||
flagBase string
|
||||
flagTShift int
|
||||
)
|
||||
c := command.New(os.Stderr, log.Printf, "mbf", func([]string) (err error) {
|
||||
msg.SwapVerbose(flagVerbose)
|
||||
msg.SwapVerbose(!flagQuiet)
|
||||
|
||||
var base *check.Absolute
|
||||
if flagBase, err = filepath.Abs(flagBase); err != nil {
|
||||
@@ -62,16 +62,19 @@ func main() {
|
||||
return
|
||||
}
|
||||
if cache, err = pkg.Open(ctx, msg, flagCures, base); err == nil {
|
||||
if flagTShift < 0 || flagTShift > 31 {
|
||||
flagTShift = 31
|
||||
}
|
||||
if flagTShift < 0 {
|
||||
cache.SetThreshold(0)
|
||||
} else if flagTShift > 31 {
|
||||
cache.SetThreshold(1 << 31)
|
||||
} else {
|
||||
cache.SetThreshold(1 << flagTShift)
|
||||
}
|
||||
}
|
||||
return
|
||||
}).Flag(
|
||||
&flagVerbose,
|
||||
"v", command.BoolFlag(false),
|
||||
"Print cure messages to the console",
|
||||
&flagQuiet,
|
||||
"q", command.BoolFlag(false),
|
||||
"Do not print cure messages",
|
||||
).Flag(
|
||||
&flagCures,
|
||||
"cures", command.IntFlag(0),
|
||||
@@ -82,7 +85,7 @@ func main() {
|
||||
"Directory to store cured artifacts",
|
||||
).Flag(
|
||||
&flagTShift,
|
||||
"tshift", command.IntFlag(31),
|
||||
"tshift", command.IntFlag(-1),
|
||||
"Dependency graph size exponent, to the power of 2",
|
||||
)
|
||||
|
||||
@@ -110,6 +113,7 @@ func main() {
|
||||
"stage3",
|
||||
"Check for toolchain 3-stage non-determinism",
|
||||
func(args []string) (err error) {
|
||||
_, _, _, stage1 := (rosa.Std - 2).NewLLVM()
|
||||
_, _, _, stage2 := (rosa.Std - 1).NewLLVM()
|
||||
_, _, _, stage3 := rosa.Std.NewLLVM()
|
||||
var (
|
||||
@@ -117,6 +121,11 @@ func main() {
|
||||
checksum [2]unique.Handle[pkg.Checksum]
|
||||
)
|
||||
|
||||
if pathname, _, err = cache.Cure(stage1); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Println("stage1:", pathname)
|
||||
|
||||
if pathname, checksum[0], err = cache.Cure(stage2); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -131,11 +140,20 @@ func main() {
|
||||
Got: checksum[0].Value(),
|
||||
Want: checksum[1].Value(),
|
||||
}
|
||||
} else {
|
||||
log.Println(
|
||||
"stage2 is identical to stage3",
|
||||
"("+pkg.Encode(checksum[0].Value())+")",
|
||||
)
|
||||
}
|
||||
return
|
||||
},
|
||||
)
|
||||
|
||||
{
|
||||
var (
|
||||
flagDump string
|
||||
)
|
||||
c.NewCommand(
|
||||
"cure",
|
||||
"Cure the named artifact and show its path",
|
||||
@@ -143,31 +161,39 @@ func main() {
|
||||
if len(args) != 1 {
|
||||
return errors.New("cure requires 1 argument")
|
||||
}
|
||||
var a pkg.Artifact
|
||||
switch args[0] {
|
||||
case "busybox":
|
||||
a = rosa.Std.NewBusybox()
|
||||
case "musl":
|
||||
a = rosa.Std.NewMusl(nil)
|
||||
case "git":
|
||||
a = rosa.Std.NewGit()
|
||||
case "go":
|
||||
a = rosa.Std.NewGo()
|
||||
case "rsync":
|
||||
a = rosa.Std.NewRsync()
|
||||
|
||||
default:
|
||||
if p, ok := rosa.ResolveName(args[0]); !ok {
|
||||
return fmt.Errorf("unsupported artifact %q", args[0])
|
||||
}
|
||||
|
||||
pathname, _, err := cache.Cure(a)
|
||||
} else if flagDump == "" {
|
||||
pathname, _, err := cache.Cure(rosa.Std.Load(p))
|
||||
if err == nil {
|
||||
log.Println(pathname)
|
||||
}
|
||||
return err
|
||||
|
||||
},
|
||||
} else {
|
||||
f, err := os.OpenFile(
|
||||
flagDump,
|
||||
os.O_WRONLY|os.O_CREATE|os.O_EXCL,
|
||||
0644,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = cache.EncodeAll(f, rosa.Std.Load(p)); err != nil {
|
||||
_ = f.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
return f.Close()
|
||||
}
|
||||
},
|
||||
).
|
||||
Flag(
|
||||
&flagDump,
|
||||
"dump", command.StringFlag(""),
|
||||
"Write IR to specified pathname and terminate",
|
||||
)
|
||||
}
|
||||
|
||||
c.MustParse(os.Args[1:], func(err error) {
|
||||
if cache != nil {
|
||||
|
||||
@@ -274,13 +274,13 @@ var containerTestCases = []struct {
|
||||
Dev(check.MustAbs("/dev"), true),
|
||||
),
|
||||
earlyMnt(
|
||||
ent("/", "/dev", "ro,nosuid,nodev,relatime", "tmpfs", "devtmpfs", ignore),
|
||||
ent("/null", "/dev/null", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/zero", "/dev/zero", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/full", "/dev/full", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/random", "/dev/random", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/urandom", "/dev/urandom", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/tty", "/dev/tty", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/", "/dev", "ro,nosuid,nodev,relatime", "tmpfs", ignore, ignore),
|
||||
ent("/null", "/dev/null", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/zero", "/dev/zero", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/full", "/dev/full", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/random", "/dev/random", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/urandom", "/dev/urandom", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/tty", "/dev/tty", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/", "/dev/pts", "rw,nosuid,noexec,relatime", "devpts", "devpts", "rw,mode=620,ptmxmode=666"),
|
||||
ent("/", "/dev/mqueue", "rw,nosuid,nodev,noexec,relatime", "mqueue", "mqueue", "rw"),
|
||||
ent("/", "/dev/shm", "rw,nosuid,nodev,relatime", "tmpfs", "tmpfs", ignore),
|
||||
@@ -292,13 +292,13 @@ var containerTestCases = []struct {
|
||||
Dev(check.MustAbs("/dev"), false),
|
||||
),
|
||||
earlyMnt(
|
||||
ent("/", "/dev", "ro,nosuid,nodev,relatime", "tmpfs", "devtmpfs", ignore),
|
||||
ent("/null", "/dev/null", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/zero", "/dev/zero", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/full", "/dev/full", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/random", "/dev/random", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/urandom", "/dev/urandom", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/tty", "/dev/tty", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/", "/dev", "ro,nosuid,nodev,relatime", "tmpfs", ignore, ignore),
|
||||
ent("/null", "/dev/null", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/zero", "/dev/zero", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/full", "/dev/full", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/random", "/dev/random", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/urandom", "/dev/urandom", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/tty", "/dev/tty", ignore, "devtmpfs", ignore, ignore),
|
||||
ent("/", "/dev/pts", "rw,nosuid,noexec,relatime", "devpts", "devpts", "rw,mode=620,ptmxmode=666"),
|
||||
ent("/", "/dev/shm", "rw,nosuid,nodev,relatime", "tmpfs", "tmpfs", ignore),
|
||||
),
|
||||
@@ -690,11 +690,22 @@ func init() {
|
||||
return fmt.Errorf("got more than %d entries", len(mnt))
|
||||
}
|
||||
|
||||
// ugly hack but should be reliable and is less likely to false negative than comparing by parsed flags
|
||||
cur.VfsOptstr = strings.TrimSuffix(cur.VfsOptstr, ",relatime")
|
||||
cur.VfsOptstr = strings.TrimSuffix(cur.VfsOptstr, ",noatime")
|
||||
mnt[i].VfsOptstr = strings.TrimSuffix(mnt[i].VfsOptstr, ",relatime")
|
||||
mnt[i].VfsOptstr = strings.TrimSuffix(mnt[i].VfsOptstr, ",noatime")
|
||||
// ugly hack but should be reliable and is less likely to
|
||||
//false negative than comparing by parsed flags
|
||||
for _, s := range []string{
|
||||
"relatime",
|
||||
"noatime",
|
||||
} {
|
||||
cur.VfsOptstr = strings.TrimSuffix(cur.VfsOptstr, ","+s)
|
||||
mnt[i].VfsOptstr = strings.TrimSuffix(mnt[i].VfsOptstr, ","+s)
|
||||
}
|
||||
for _, s := range []string{
|
||||
"seclabel",
|
||||
"inode64",
|
||||
} {
|
||||
cur.FsOptstr = strings.Replace(cur.FsOptstr, ","+s, "", 1)
|
||||
mnt[i].FsOptstr = strings.Replace(mnt[i].FsOptstr, ","+s, "", 1)
|
||||
}
|
||||
|
||||
if !cur.EqualWithIgnore(mnt[i], "\x00") {
|
||||
fail = true
|
||||
|
||||
27
container/seccomp/presets_riscv64_test.go
Normal file
27
container/seccomp/presets_riscv64_test.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package seccomp_test
|
||||
|
||||
import (
|
||||
. "hakurei.app/container/seccomp"
|
||||
. "hakurei.app/container/std"
|
||||
)
|
||||
|
||||
var bpfExpected = bpfLookup{
|
||||
{AllowMultiarch | AllowCAN |
|
||||
AllowBluetooth, PresetExt |
|
||||
PresetDenyNS | PresetDenyTTY | PresetDenyDevel |
|
||||
PresetLinux32}: toHash(
|
||||
"a1c4ffa35f4bfbf38061184760b9a09edfcb4964c3b534395e47327b83f3fb61f2f9573ddfcc4772424cc2f5dd12fd32471e6531dbe10e85eda3797dd4fa179f"),
|
||||
|
||||
{0, 0}: toHash(
|
||||
"f3910fd727d087def593e3876c2c6ab9ace71d82ec8cbc992a26223e7bba85e1d7a0b56c5fc6303703f24595825dad8561637edaedd5384b34a6cd080946633c"),
|
||||
{0, PresetExt}: toHash(
|
||||
"741438c5e3f11c36c92ae8c5934f13440675c6e719541c2dbffeda79a10081bcfd9ad8314a60c1d1f53db86c8080c13fffa3bbcf7fe753935679b4b902737286"),
|
||||
{0, PresetStrict}: toHash(
|
||||
"79e9e464d02405c6d74fd2c771bd72a1311e488221c73a9c32db9270219837c54fccec2f36fe2474895547e60c311514567e2e6cf4e7a7fcf909c1ecd1e254a7"),
|
||||
{0, PresetDenyNS | PresetDenyTTY | PresetDenyDevel}: toHash(
|
||||
"3c443715a6c1e557a284862ea8efb70a5d4ecbe67d1226627323e861cd3646fb3e7768ec5b94b93760b7f652cf6916f66e317a4fbf8716d10c3673aa4fc3ae58"),
|
||||
{0, PresetExt | PresetDenyDevel}: toHash(
|
||||
"4448a74e8cc75a4ab63799c4f2cc2a5af63e5f4e8e9b8ac15a1873d647dfa67a4c67b39ed466d8dd32abc64136d401879fc6185c9ab00feeaf59ccf4305f8201"),
|
||||
{0, PresetExt | PresetDenyNS | PresetDenyDevel}: toHash(
|
||||
"c7c86e793cb7192f5f6c735f372cda27eb43ae1045e587f8eadb64c849520a3280b6570a3d7b601d32cddb38021585a2234db38e506cebfd10aa3d6c75440f17"),
|
||||
}
|
||||
@@ -12,6 +12,7 @@ my %syscall_cutoff_arch = (
|
||||
"x86" => 340,
|
||||
"x86_64" => 302,
|
||||
"aarch64" => 281,
|
||||
"riscv64" => 281,
|
||||
);
|
||||
|
||||
print <<EOF;
|
||||
|
||||
55
container/std/syscall_extra_linux_riscv64.go
Normal file
55
container/std/syscall_extra_linux_riscv64.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package std
|
||||
|
||||
import "syscall"
|
||||
|
||||
const (
|
||||
SYS_NEWFSTATAT = syscall.SYS_FSTATAT
|
||||
)
|
||||
|
||||
var syscallNumExtra = map[string]ScmpSyscall{
|
||||
"uselib": SNR_USELIB,
|
||||
"clock_adjtime64": SNR_CLOCK_ADJTIME64,
|
||||
"clock_settime64": SNR_CLOCK_SETTIME64,
|
||||
"umount": SNR_UMOUNT,
|
||||
"chown": SNR_CHOWN,
|
||||
"chown32": SNR_CHOWN32,
|
||||
"fchown32": SNR_FCHOWN32,
|
||||
"lchown": SNR_LCHOWN,
|
||||
"lchown32": SNR_LCHOWN32,
|
||||
"setgid32": SNR_SETGID32,
|
||||
"setgroups32": SNR_SETGROUPS32,
|
||||
"setregid32": SNR_SETREGID32,
|
||||
"setresgid32": SNR_SETRESGID32,
|
||||
"setresuid32": SNR_SETRESUID32,
|
||||
"setreuid32": SNR_SETREUID32,
|
||||
"setuid32": SNR_SETUID32,
|
||||
"modify_ldt": SNR_MODIFY_LDT,
|
||||
"subpage_prot": SNR_SUBPAGE_PROT,
|
||||
"switch_endian": SNR_SWITCH_ENDIAN,
|
||||
"vm86": SNR_VM86,
|
||||
"vm86old": SNR_VM86OLD,
|
||||
}
|
||||
|
||||
const (
|
||||
SNR_USELIB ScmpSyscall = __PNR_uselib
|
||||
SNR_CLOCK_ADJTIME64 ScmpSyscall = __PNR_clock_adjtime64
|
||||
SNR_CLOCK_SETTIME64 ScmpSyscall = __PNR_clock_settime64
|
||||
SNR_UMOUNT ScmpSyscall = __PNR_umount
|
||||
SNR_CHOWN ScmpSyscall = __PNR_chown
|
||||
SNR_CHOWN32 ScmpSyscall = __PNR_chown32
|
||||
SNR_FCHOWN32 ScmpSyscall = __PNR_fchown32
|
||||
SNR_LCHOWN ScmpSyscall = __PNR_lchown
|
||||
SNR_LCHOWN32 ScmpSyscall = __PNR_lchown32
|
||||
SNR_SETGID32 ScmpSyscall = __PNR_setgid32
|
||||
SNR_SETGROUPS32 ScmpSyscall = __PNR_setgroups32
|
||||
SNR_SETREGID32 ScmpSyscall = __PNR_setregid32
|
||||
SNR_SETRESGID32 ScmpSyscall = __PNR_setresgid32
|
||||
SNR_SETRESUID32 ScmpSyscall = __PNR_setresuid32
|
||||
SNR_SETREUID32 ScmpSyscall = __PNR_setreuid32
|
||||
SNR_SETUID32 ScmpSyscall = __PNR_setuid32
|
||||
SNR_MODIFY_LDT ScmpSyscall = __PNR_modify_ldt
|
||||
SNR_SUBPAGE_PROT ScmpSyscall = __PNR_subpage_prot
|
||||
SNR_SWITCH_ENDIAN ScmpSyscall = __PNR_switch_endian
|
||||
SNR_VM86 ScmpSyscall = __PNR_vm86
|
||||
SNR_VM86OLD ScmpSyscall = __PNR_vm86old
|
||||
)
|
||||
719
container/std/syscall_linux_riscv64.go
Normal file
719
container/std/syscall_linux_riscv64.go
Normal file
@@ -0,0 +1,719 @@
|
||||
// mksysnum_linux.pl /usr/include/riscv64-linux-gnu/asm/unistd.h
|
||||
// Code generated by the command above; DO NOT EDIT.
|
||||
|
||||
package std
|
||||
|
||||
import . "syscall"
|
||||
|
||||
var syscallNum = map[string]ScmpSyscall{
|
||||
"io_setup": SNR_IO_SETUP,
|
||||
"io_destroy": SNR_IO_DESTROY,
|
||||
"io_submit": SNR_IO_SUBMIT,
|
||||
"io_cancel": SNR_IO_CANCEL,
|
||||
"io_getevents": SNR_IO_GETEVENTS,
|
||||
"setxattr": SNR_SETXATTR,
|
||||
"lsetxattr": SNR_LSETXATTR,
|
||||
"fsetxattr": SNR_FSETXATTR,
|
||||
"getxattr": SNR_GETXATTR,
|
||||
"lgetxattr": SNR_LGETXATTR,
|
||||
"fgetxattr": SNR_FGETXATTR,
|
||||
"listxattr": SNR_LISTXATTR,
|
||||
"llistxattr": SNR_LLISTXATTR,
|
||||
"flistxattr": SNR_FLISTXATTR,
|
||||
"removexattr": SNR_REMOVEXATTR,
|
||||
"lremovexattr": SNR_LREMOVEXATTR,
|
||||
"fremovexattr": SNR_FREMOVEXATTR,
|
||||
"getcwd": SNR_GETCWD,
|
||||
"lookup_dcookie": SNR_LOOKUP_DCOOKIE,
|
||||
"eventfd2": SNR_EVENTFD2,
|
||||
"epoll_create1": SNR_EPOLL_CREATE1,
|
||||
"epoll_ctl": SNR_EPOLL_CTL,
|
||||
"epoll_pwait": SNR_EPOLL_PWAIT,
|
||||
"dup": SNR_DUP,
|
||||
"dup3": SNR_DUP3,
|
||||
"fcntl": SNR_FCNTL,
|
||||
"inotify_init1": SNR_INOTIFY_INIT1,
|
||||
"inotify_add_watch": SNR_INOTIFY_ADD_WATCH,
|
||||
"inotify_rm_watch": SNR_INOTIFY_RM_WATCH,
|
||||
"ioctl": SNR_IOCTL,
|
||||
"ioprio_set": SNR_IOPRIO_SET,
|
||||
"ioprio_get": SNR_IOPRIO_GET,
|
||||
"flock": SNR_FLOCK,
|
||||
"mknodat": SNR_MKNODAT,
|
||||
"mkdirat": SNR_MKDIRAT,
|
||||
"unlinkat": SNR_UNLINKAT,
|
||||
"symlinkat": SNR_SYMLINKAT,
|
||||
"linkat": SNR_LINKAT,
|
||||
"umount2": SNR_UMOUNT2,
|
||||
"mount": SNR_MOUNT,
|
||||
"pivot_root": SNR_PIVOT_ROOT,
|
||||
"nfsservctl": SNR_NFSSERVCTL,
|
||||
"statfs": SNR_STATFS,
|
||||
"fstatfs": SNR_FSTATFS,
|
||||
"truncate": SNR_TRUNCATE,
|
||||
"ftruncate": SNR_FTRUNCATE,
|
||||
"fallocate": SNR_FALLOCATE,
|
||||
"faccessat": SNR_FACCESSAT,
|
||||
"chdir": SNR_CHDIR,
|
||||
"fchdir": SNR_FCHDIR,
|
||||
"chroot": SNR_CHROOT,
|
||||
"fchmod": SNR_FCHMOD,
|
||||
"fchmodat": SNR_FCHMODAT,
|
||||
"fchownat": SNR_FCHOWNAT,
|
||||
"fchown": SNR_FCHOWN,
|
||||
"openat": SNR_OPENAT,
|
||||
"close": SNR_CLOSE,
|
||||
"vhangup": SNR_VHANGUP,
|
||||
"pipe2": SNR_PIPE2,
|
||||
"quotactl": SNR_QUOTACTL,
|
||||
"getdents64": SNR_GETDENTS64,
|
||||
"lseek": SNR_LSEEK,
|
||||
"read": SNR_READ,
|
||||
"write": SNR_WRITE,
|
||||
"readv": SNR_READV,
|
||||
"writev": SNR_WRITEV,
|
||||
"pread64": SNR_PREAD64,
|
||||
"pwrite64": SNR_PWRITE64,
|
||||
"preadv": SNR_PREADV,
|
||||
"pwritev": SNR_PWRITEV,
|
||||
"sendfile": SNR_SENDFILE,
|
||||
"pselect6": SNR_PSELECT6,
|
||||
"ppoll": SNR_PPOLL,
|
||||
"signalfd4": SNR_SIGNALFD4,
|
||||
"vmsplice": SNR_VMSPLICE,
|
||||
"splice": SNR_SPLICE,
|
||||
"tee": SNR_TEE,
|
||||
"readlinkat": SNR_READLINKAT,
|
||||
"newfstatat": SNR_NEWFSTATAT,
|
||||
"fstat": SNR_FSTAT,
|
||||
"sync": SNR_SYNC,
|
||||
"fsync": SNR_FSYNC,
|
||||
"fdatasync": SNR_FDATASYNC,
|
||||
"sync_file_range": SNR_SYNC_FILE_RANGE,
|
||||
"timerfd_create": SNR_TIMERFD_CREATE,
|
||||
"timerfd_settime": SNR_TIMERFD_SETTIME,
|
||||
"timerfd_gettime": SNR_TIMERFD_GETTIME,
|
||||
"utimensat": SNR_UTIMENSAT,
|
||||
"acct": SNR_ACCT,
|
||||
"capget": SNR_CAPGET,
|
||||
"capset": SNR_CAPSET,
|
||||
"personality": SNR_PERSONALITY,
|
||||
"exit": SNR_EXIT,
|
||||
"exit_group": SNR_EXIT_GROUP,
|
||||
"waitid": SNR_WAITID,
|
||||
"set_tid_address": SNR_SET_TID_ADDRESS,
|
||||
"unshare": SNR_UNSHARE,
|
||||
"futex": SNR_FUTEX,
|
||||
"set_robust_list": SNR_SET_ROBUST_LIST,
|
||||
"get_robust_list": SNR_GET_ROBUST_LIST,
|
||||
"nanosleep": SNR_NANOSLEEP,
|
||||
"getitimer": SNR_GETITIMER,
|
||||
"setitimer": SNR_SETITIMER,
|
||||
"kexec_load": SNR_KEXEC_LOAD,
|
||||
"init_module": SNR_INIT_MODULE,
|
||||
"delete_module": SNR_DELETE_MODULE,
|
||||
"timer_create": SNR_TIMER_CREATE,
|
||||
"timer_gettime": SNR_TIMER_GETTIME,
|
||||
"timer_getoverrun": SNR_TIMER_GETOVERRUN,
|
||||
"timer_settime": SNR_TIMER_SETTIME,
|
||||
"timer_delete": SNR_TIMER_DELETE,
|
||||
"clock_settime": SNR_CLOCK_SETTIME,
|
||||
"clock_gettime": SNR_CLOCK_GETTIME,
|
||||
"clock_getres": SNR_CLOCK_GETRES,
|
||||
"clock_nanosleep": SNR_CLOCK_NANOSLEEP,
|
||||
"syslog": SNR_SYSLOG,
|
||||
"ptrace": SNR_PTRACE,
|
||||
"sched_setparam": SNR_SCHED_SETPARAM,
|
||||
"sched_setscheduler": SNR_SCHED_SETSCHEDULER,
|
||||
"sched_getscheduler": SNR_SCHED_GETSCHEDULER,
|
||||
"sched_getparam": SNR_SCHED_GETPARAM,
|
||||
"sched_setaffinity": SNR_SCHED_SETAFFINITY,
|
||||
"sched_getaffinity": SNR_SCHED_GETAFFINITY,
|
||||
"sched_yield": SNR_SCHED_YIELD,
|
||||
"sched_get_priority_max": SNR_SCHED_GET_PRIORITY_MAX,
|
||||
"sched_get_priority_min": SNR_SCHED_GET_PRIORITY_MIN,
|
||||
"sched_rr_get_interval": SNR_SCHED_RR_GET_INTERVAL,
|
||||
"restart_syscall": SNR_RESTART_SYSCALL,
|
||||
"kill": SNR_KILL,
|
||||
"tkill": SNR_TKILL,
|
||||
"tgkill": SNR_TGKILL,
|
||||
"sigaltstack": SNR_SIGALTSTACK,
|
||||
"rt_sigsuspend": SNR_RT_SIGSUSPEND,
|
||||
"rt_sigaction": SNR_RT_SIGACTION,
|
||||
"rt_sigprocmask": SNR_RT_SIGPROCMASK,
|
||||
"rt_sigpending": SNR_RT_SIGPENDING,
|
||||
"rt_sigtimedwait": SNR_RT_SIGTIMEDWAIT,
|
||||
"rt_sigqueueinfo": SNR_RT_SIGQUEUEINFO,
|
||||
"rt_sigreturn": SNR_RT_SIGRETURN,
|
||||
"setpriority": SNR_SETPRIORITY,
|
||||
"getpriority": SNR_GETPRIORITY,
|
||||
"reboot": SNR_REBOOT,
|
||||
"setregid": SNR_SETREGID,
|
||||
"setgid": SNR_SETGID,
|
||||
"setreuid": SNR_SETREUID,
|
||||
"setuid": SNR_SETUID,
|
||||
"setresuid": SNR_SETRESUID,
|
||||
"getresuid": SNR_GETRESUID,
|
||||
"setresgid": SNR_SETRESGID,
|
||||
"getresgid": SNR_GETRESGID,
|
||||
"setfsuid": SNR_SETFSUID,
|
||||
"setfsgid": SNR_SETFSGID,
|
||||
"times": SNR_TIMES,
|
||||
"setpgid": SNR_SETPGID,
|
||||
"getpgid": SNR_GETPGID,
|
||||
"getsid": SNR_GETSID,
|
||||
"setsid": SNR_SETSID,
|
||||
"getgroups": SNR_GETGROUPS,
|
||||
"setgroups": SNR_SETGROUPS,
|
||||
"uname": SNR_UNAME,
|
||||
"sethostname": SNR_SETHOSTNAME,
|
||||
"setdomainname": SNR_SETDOMAINNAME,
|
||||
"getrlimit": SNR_GETRLIMIT,
|
||||
"setrlimit": SNR_SETRLIMIT,
|
||||
"getrusage": SNR_GETRUSAGE,
|
||||
"umask": SNR_UMASK,
|
||||
"prctl": SNR_PRCTL,
|
||||
"getcpu": SNR_GETCPU,
|
||||
"gettimeofday": SNR_GETTIMEOFDAY,
|
||||
"settimeofday": SNR_SETTIMEOFDAY,
|
||||
"adjtimex": SNR_ADJTIMEX,
|
||||
"getpid": SNR_GETPID,
|
||||
"getppid": SNR_GETPPID,
|
||||
"getuid": SNR_GETUID,
|
||||
"geteuid": SNR_GETEUID,
|
||||
"getgid": SNR_GETGID,
|
||||
"getegid": SNR_GETEGID,
|
||||
"gettid": SNR_GETTID,
|
||||
"sysinfo": SNR_SYSINFO,
|
||||
"mq_open": SNR_MQ_OPEN,
|
||||
"mq_unlink": SNR_MQ_UNLINK,
|
||||
"mq_timedsend": SNR_MQ_TIMEDSEND,
|
||||
"mq_timedreceive": SNR_MQ_TIMEDRECEIVE,
|
||||
"mq_notify": SNR_MQ_NOTIFY,
|
||||
"mq_getsetattr": SNR_MQ_GETSETATTR,
|
||||
"msgget": SNR_MSGGET,
|
||||
"msgctl": SNR_MSGCTL,
|
||||
"msgrcv": SNR_MSGRCV,
|
||||
"msgsnd": SNR_MSGSND,
|
||||
"semget": SNR_SEMGET,
|
||||
"semctl": SNR_SEMCTL,
|
||||
"semtimedop": SNR_SEMTIMEDOP,
|
||||
"semop": SNR_SEMOP,
|
||||
"shmget": SNR_SHMGET,
|
||||
"shmctl": SNR_SHMCTL,
|
||||
"shmat": SNR_SHMAT,
|
||||
"shmdt": SNR_SHMDT,
|
||||
"socket": SNR_SOCKET,
|
||||
"socketpair": SNR_SOCKETPAIR,
|
||||
"bind": SNR_BIND,
|
||||
"listen": SNR_LISTEN,
|
||||
"accept": SNR_ACCEPT,
|
||||
"connect": SNR_CONNECT,
|
||||
"getsockname": SNR_GETSOCKNAME,
|
||||
"getpeername": SNR_GETPEERNAME,
|
||||
"sendto": SNR_SENDTO,
|
||||
"recvfrom": SNR_RECVFROM,
|
||||
"setsockopt": SNR_SETSOCKOPT,
|
||||
"getsockopt": SNR_GETSOCKOPT,
|
||||
"shutdown": SNR_SHUTDOWN,
|
||||
"sendmsg": SNR_SENDMSG,
|
||||
"recvmsg": SNR_RECVMSG,
|
||||
"readahead": SNR_READAHEAD,
|
||||
"brk": SNR_BRK,
|
||||
"munmap": SNR_MUNMAP,
|
||||
"mremap": SNR_MREMAP,
|
||||
"add_key": SNR_ADD_KEY,
|
||||
"request_key": SNR_REQUEST_KEY,
|
||||
"keyctl": SNR_KEYCTL,
|
||||
"clone": SNR_CLONE,
|
||||
"execve": SNR_EXECVE,
|
||||
"mmap": SNR_MMAP,
|
||||
"fadvise64": SNR_FADVISE64,
|
||||
"swapon": SNR_SWAPON,
|
||||
"swapoff": SNR_SWAPOFF,
|
||||
"mprotect": SNR_MPROTECT,
|
||||
"msync": SNR_MSYNC,
|
||||
"mlock": SNR_MLOCK,
|
||||
"munlock": SNR_MUNLOCK,
|
||||
"mlockall": SNR_MLOCKALL,
|
||||
"munlockall": SNR_MUNLOCKALL,
|
||||
"mincore": SNR_MINCORE,
|
||||
"madvise": SNR_MADVISE,
|
||||
"remap_file_pages": SNR_REMAP_FILE_PAGES,
|
||||
"mbind": SNR_MBIND,
|
||||
"get_mempolicy": SNR_GET_MEMPOLICY,
|
||||
"set_mempolicy": SNR_SET_MEMPOLICY,
|
||||
"migrate_pages": SNR_MIGRATE_PAGES,
|
||||
"move_pages": SNR_MOVE_PAGES,
|
||||
"rt_tgsigqueueinfo": SNR_RT_TGSIGQUEUEINFO,
|
||||
"perf_event_open": SNR_PERF_EVENT_OPEN,
|
||||
"accept4": SNR_ACCEPT4,
|
||||
"recvmmsg": SNR_RECVMMSG,
|
||||
"wait4": SNR_WAIT4,
|
||||
"prlimit64": SNR_PRLIMIT64,
|
||||
"fanotify_init": SNR_FANOTIFY_INIT,
|
||||
"fanotify_mark": SNR_FANOTIFY_MARK,
|
||||
"name_to_handle_at": SNR_NAME_TO_HANDLE_AT,
|
||||
"open_by_handle_at": SNR_OPEN_BY_HANDLE_AT,
|
||||
"clock_adjtime": SNR_CLOCK_ADJTIME,
|
||||
"syncfs": SNR_SYNCFS,
|
||||
"setns": SNR_SETNS,
|
||||
"sendmmsg": SNR_SENDMMSG,
|
||||
"process_vm_readv": SNR_PROCESS_VM_READV,
|
||||
"process_vm_writev": SNR_PROCESS_VM_WRITEV,
|
||||
"kcmp": SNR_KCMP,
|
||||
"finit_module": SNR_FINIT_MODULE,
|
||||
"sched_setattr": SNR_SCHED_SETATTR,
|
||||
"sched_getattr": SNR_SCHED_GETATTR,
|
||||
"renameat2": SNR_RENAMEAT2,
|
||||
"seccomp": SNR_SECCOMP,
|
||||
"getrandom": SNR_GETRANDOM,
|
||||
"memfd_create": SNR_MEMFD_CREATE,
|
||||
"bpf": SNR_BPF,
|
||||
"execveat": SNR_EXECVEAT,
|
||||
"userfaultfd": SNR_USERFAULTFD,
|
||||
"membarrier": SNR_MEMBARRIER,
|
||||
"mlock2": SNR_MLOCK2,
|
||||
"copy_file_range": SNR_COPY_FILE_RANGE,
|
||||
"preadv2": SNR_PREADV2,
|
||||
"pwritev2": SNR_PWRITEV2,
|
||||
"pkey_mprotect": SNR_PKEY_MPROTECT,
|
||||
"pkey_alloc": SNR_PKEY_ALLOC,
|
||||
"pkey_free": SNR_PKEY_FREE,
|
||||
"statx": SNR_STATX,
|
||||
"io_pgetevents": SNR_IO_PGETEVENTS,
|
||||
"rseq": SNR_RSEQ,
|
||||
"kexec_file_load": SNR_KEXEC_FILE_LOAD,
|
||||
"pidfd_send_signal": SNR_PIDFD_SEND_SIGNAL,
|
||||
"io_uring_setup": SNR_IO_URING_SETUP,
|
||||
"io_uring_enter": SNR_IO_URING_ENTER,
|
||||
"io_uring_register": SNR_IO_URING_REGISTER,
|
||||
"open_tree": SNR_OPEN_TREE,
|
||||
"move_mount": SNR_MOVE_MOUNT,
|
||||
"fsopen": SNR_FSOPEN,
|
||||
"fsconfig": SNR_FSCONFIG,
|
||||
"fsmount": SNR_FSMOUNT,
|
||||
"fspick": SNR_FSPICK,
|
||||
"pidfd_open": SNR_PIDFD_OPEN,
|
||||
"clone3": SNR_CLONE3,
|
||||
"close_range": SNR_CLOSE_RANGE,
|
||||
"openat2": SNR_OPENAT2,
|
||||
"pidfd_getfd": SNR_PIDFD_GETFD,
|
||||
"faccessat2": SNR_FACCESSAT2,
|
||||
"process_madvise": SNR_PROCESS_MADVISE,
|
||||
"epoll_pwait2": SNR_EPOLL_PWAIT2,
|
||||
"mount_setattr": SNR_MOUNT_SETATTR,
|
||||
"quotactl_fd": SNR_QUOTACTL_FD,
|
||||
"landlock_create_ruleset": SNR_LANDLOCK_CREATE_RULESET,
|
||||
"landlock_add_rule": SNR_LANDLOCK_ADD_RULE,
|
||||
"landlock_restrict_self": SNR_LANDLOCK_RESTRICT_SELF,
|
||||
"memfd_secret": SNR_MEMFD_SECRET,
|
||||
"process_mrelease": SNR_PROCESS_MRELEASE,
|
||||
"futex_waitv": SNR_FUTEX_WAITV,
|
||||
"set_mempolicy_home_node": SNR_SET_MEMPOLICY_HOME_NODE,
|
||||
"cachestat": SNR_CACHESTAT,
|
||||
"fchmodat2": SNR_FCHMODAT2,
|
||||
"map_shadow_stack": SNR_MAP_SHADOW_STACK,
|
||||
"futex_wake": SNR_FUTEX_WAKE,
|
||||
"futex_wait": SNR_FUTEX_WAIT,
|
||||
"futex_requeue": SNR_FUTEX_REQUEUE,
|
||||
"statmount": SNR_STATMOUNT,
|
||||
"listmount": SNR_LISTMOUNT,
|
||||
"lsm_get_self_attr": SNR_LSM_GET_SELF_ATTR,
|
||||
"lsm_set_self_attr": SNR_LSM_SET_SELF_ATTR,
|
||||
"lsm_list_modules": SNR_LSM_LIST_MODULES,
|
||||
"mseal": SNR_MSEAL,
|
||||
"setxattrat": SNR_SETXATTRAT,
|
||||
"getxattrat": SNR_GETXATTRAT,
|
||||
"listxattrat": SNR_LISTXATTRAT,
|
||||
"removexattrat": SNR_REMOVEXATTRAT,
|
||||
}
|
||||
|
||||
const (
|
||||
SYS_USERFAULTFD = 282
|
||||
SYS_MEMBARRIER = 283
|
||||
SYS_MLOCK2 = 284
|
||||
SYS_COPY_FILE_RANGE = 285
|
||||
SYS_PREADV2 = 286
|
||||
SYS_PWRITEV2 = 287
|
||||
SYS_PKEY_MPROTECT = 288
|
||||
SYS_PKEY_ALLOC = 289
|
||||
SYS_PKEY_FREE = 290
|
||||
SYS_STATX = 291
|
||||
SYS_IO_PGETEVENTS = 292
|
||||
SYS_RSEQ = 293
|
||||
SYS_KEXEC_FILE_LOAD = 294
|
||||
SYS_PIDFD_SEND_SIGNAL = 424
|
||||
SYS_IO_URING_SETUP = 425
|
||||
SYS_IO_URING_ENTER = 426
|
||||
SYS_IO_URING_REGISTER = 427
|
||||
SYS_OPEN_TREE = 428
|
||||
SYS_MOVE_MOUNT = 429
|
||||
SYS_FSOPEN = 430
|
||||
SYS_FSCONFIG = 431
|
||||
SYS_FSMOUNT = 432
|
||||
SYS_FSPICK = 433
|
||||
SYS_PIDFD_OPEN = 434
|
||||
SYS_CLONE3 = 435
|
||||
SYS_CLOSE_RANGE = 436
|
||||
SYS_OPENAT2 = 437
|
||||
SYS_PIDFD_GETFD = 438
|
||||
SYS_FACCESSAT2 = 439
|
||||
SYS_PROCESS_MADVISE = 440
|
||||
SYS_EPOLL_PWAIT2 = 441
|
||||
SYS_MOUNT_SETATTR = 442
|
||||
SYS_QUOTACTL_FD = 443
|
||||
SYS_LANDLOCK_CREATE_RULESET = 444
|
||||
SYS_LANDLOCK_ADD_RULE = 445
|
||||
SYS_LANDLOCK_RESTRICT_SELF = 446
|
||||
SYS_MEMFD_SECRET = 447
|
||||
SYS_PROCESS_MRELEASE = 448
|
||||
SYS_FUTEX_WAITV = 449
|
||||
SYS_SET_MEMPOLICY_HOME_NODE = 450
|
||||
SYS_CACHESTAT = 451
|
||||
SYS_FCHMODAT2 = 452
|
||||
SYS_MAP_SHADOW_STACK = 453
|
||||
SYS_FUTEX_WAKE = 454
|
||||
SYS_FUTEX_WAIT = 455
|
||||
SYS_FUTEX_REQUEUE = 456
|
||||
SYS_STATMOUNT = 457
|
||||
SYS_LISTMOUNT = 458
|
||||
SYS_LSM_GET_SELF_ATTR = 459
|
||||
SYS_LSM_SET_SELF_ATTR = 460
|
||||
SYS_LSM_LIST_MODULES = 461
|
||||
SYS_MSEAL = 462
|
||||
SYS_SETXATTRAT = 463
|
||||
SYS_GETXATTRAT = 464
|
||||
SYS_LISTXATTRAT = 465
|
||||
SYS_REMOVEXATTRAT = 466
|
||||
SYS_OPEN_TREE_ATTR = 467
|
||||
SYS_FILE_GETATTR = 468
|
||||
SYS_FILE_SETATTR = 469
|
||||
)
|
||||
|
||||
const (
|
||||
SNR_IO_SETUP ScmpSyscall = SYS_IO_SETUP
|
||||
SNR_IO_DESTROY ScmpSyscall = SYS_IO_DESTROY
|
||||
SNR_IO_SUBMIT ScmpSyscall = SYS_IO_SUBMIT
|
||||
SNR_IO_CANCEL ScmpSyscall = SYS_IO_CANCEL
|
||||
SNR_IO_GETEVENTS ScmpSyscall = SYS_IO_GETEVENTS
|
||||
SNR_SETXATTR ScmpSyscall = SYS_SETXATTR
|
||||
SNR_LSETXATTR ScmpSyscall = SYS_LSETXATTR
|
||||
SNR_FSETXATTR ScmpSyscall = SYS_FSETXATTR
|
||||
SNR_GETXATTR ScmpSyscall = SYS_GETXATTR
|
||||
SNR_LGETXATTR ScmpSyscall = SYS_LGETXATTR
|
||||
SNR_FGETXATTR ScmpSyscall = SYS_FGETXATTR
|
||||
SNR_LISTXATTR ScmpSyscall = SYS_LISTXATTR
|
||||
SNR_LLISTXATTR ScmpSyscall = SYS_LLISTXATTR
|
||||
SNR_FLISTXATTR ScmpSyscall = SYS_FLISTXATTR
|
||||
SNR_REMOVEXATTR ScmpSyscall = SYS_REMOVEXATTR
|
||||
SNR_LREMOVEXATTR ScmpSyscall = SYS_LREMOVEXATTR
|
||||
SNR_FREMOVEXATTR ScmpSyscall = SYS_FREMOVEXATTR
|
||||
SNR_GETCWD ScmpSyscall = SYS_GETCWD
|
||||
SNR_LOOKUP_DCOOKIE ScmpSyscall = SYS_LOOKUP_DCOOKIE
|
||||
SNR_EVENTFD2 ScmpSyscall = SYS_EVENTFD2
|
||||
SNR_EPOLL_CREATE1 ScmpSyscall = SYS_EPOLL_CREATE1
|
||||
SNR_EPOLL_CTL ScmpSyscall = SYS_EPOLL_CTL
|
||||
SNR_EPOLL_PWAIT ScmpSyscall = SYS_EPOLL_PWAIT
|
||||
SNR_DUP ScmpSyscall = SYS_DUP
|
||||
SNR_DUP3 ScmpSyscall = SYS_DUP3
|
||||
SNR_FCNTL ScmpSyscall = SYS_FCNTL
|
||||
SNR_INOTIFY_INIT1 ScmpSyscall = SYS_INOTIFY_INIT1
|
||||
SNR_INOTIFY_ADD_WATCH ScmpSyscall = SYS_INOTIFY_ADD_WATCH
|
||||
SNR_INOTIFY_RM_WATCH ScmpSyscall = SYS_INOTIFY_RM_WATCH
|
||||
SNR_IOCTL ScmpSyscall = SYS_IOCTL
|
||||
SNR_IOPRIO_SET ScmpSyscall = SYS_IOPRIO_SET
|
||||
SNR_IOPRIO_GET ScmpSyscall = SYS_IOPRIO_GET
|
||||
SNR_FLOCK ScmpSyscall = SYS_FLOCK
|
||||
SNR_MKNODAT ScmpSyscall = SYS_MKNODAT
|
||||
SNR_MKDIRAT ScmpSyscall = SYS_MKDIRAT
|
||||
SNR_UNLINKAT ScmpSyscall = SYS_UNLINKAT
|
||||
SNR_SYMLINKAT ScmpSyscall = SYS_SYMLINKAT
|
||||
SNR_LINKAT ScmpSyscall = SYS_LINKAT
|
||||
SNR_UMOUNT2 ScmpSyscall = SYS_UMOUNT2
|
||||
SNR_MOUNT ScmpSyscall = SYS_MOUNT
|
||||
SNR_PIVOT_ROOT ScmpSyscall = SYS_PIVOT_ROOT
|
||||
SNR_NFSSERVCTL ScmpSyscall = SYS_NFSSERVCTL
|
||||
SNR_STATFS ScmpSyscall = SYS_STATFS
|
||||
SNR_FSTATFS ScmpSyscall = SYS_FSTATFS
|
||||
SNR_TRUNCATE ScmpSyscall = SYS_TRUNCATE
|
||||
SNR_FTRUNCATE ScmpSyscall = SYS_FTRUNCATE
|
||||
SNR_FALLOCATE ScmpSyscall = SYS_FALLOCATE
|
||||
SNR_FACCESSAT ScmpSyscall = SYS_FACCESSAT
|
||||
SNR_CHDIR ScmpSyscall = SYS_CHDIR
|
||||
SNR_FCHDIR ScmpSyscall = SYS_FCHDIR
|
||||
SNR_CHROOT ScmpSyscall = SYS_CHROOT
|
||||
SNR_FCHMOD ScmpSyscall = SYS_FCHMOD
|
||||
SNR_FCHMODAT ScmpSyscall = SYS_FCHMODAT
|
||||
SNR_FCHOWNAT ScmpSyscall = SYS_FCHOWNAT
|
||||
SNR_FCHOWN ScmpSyscall = SYS_FCHOWN
|
||||
SNR_OPENAT ScmpSyscall = SYS_OPENAT
|
||||
SNR_CLOSE ScmpSyscall = SYS_CLOSE
|
||||
SNR_VHANGUP ScmpSyscall = SYS_VHANGUP
|
||||
SNR_PIPE2 ScmpSyscall = SYS_PIPE2
|
||||
SNR_QUOTACTL ScmpSyscall = SYS_QUOTACTL
|
||||
SNR_GETDENTS64 ScmpSyscall = SYS_GETDENTS64
|
||||
SNR_LSEEK ScmpSyscall = SYS_LSEEK
|
||||
SNR_READ ScmpSyscall = SYS_READ
|
||||
SNR_WRITE ScmpSyscall = SYS_WRITE
|
||||
SNR_READV ScmpSyscall = SYS_READV
|
||||
SNR_WRITEV ScmpSyscall = SYS_WRITEV
|
||||
SNR_PREAD64 ScmpSyscall = SYS_PREAD64
|
||||
SNR_PWRITE64 ScmpSyscall = SYS_PWRITE64
|
||||
SNR_PREADV ScmpSyscall = SYS_PREADV
|
||||
SNR_PWRITEV ScmpSyscall = SYS_PWRITEV
|
||||
SNR_SENDFILE ScmpSyscall = SYS_SENDFILE
|
||||
SNR_PSELECT6 ScmpSyscall = SYS_PSELECT6
|
||||
SNR_PPOLL ScmpSyscall = SYS_PPOLL
|
||||
SNR_SIGNALFD4 ScmpSyscall = SYS_SIGNALFD4
|
||||
SNR_VMSPLICE ScmpSyscall = SYS_VMSPLICE
|
||||
SNR_SPLICE ScmpSyscall = SYS_SPLICE
|
||||
SNR_TEE ScmpSyscall = SYS_TEE
|
||||
SNR_READLINKAT ScmpSyscall = SYS_READLINKAT
|
||||
SNR_NEWFSTATAT ScmpSyscall = SYS_NEWFSTATAT
|
||||
SNR_FSTAT ScmpSyscall = SYS_FSTAT
|
||||
SNR_SYNC ScmpSyscall = SYS_SYNC
|
||||
SNR_FSYNC ScmpSyscall = SYS_FSYNC
|
||||
SNR_FDATASYNC ScmpSyscall = SYS_FDATASYNC
|
||||
SNR_SYNC_FILE_RANGE ScmpSyscall = SYS_SYNC_FILE_RANGE
|
||||
SNR_TIMERFD_CREATE ScmpSyscall = SYS_TIMERFD_CREATE
|
||||
SNR_TIMERFD_SETTIME ScmpSyscall = SYS_TIMERFD_SETTIME
|
||||
SNR_TIMERFD_GETTIME ScmpSyscall = SYS_TIMERFD_GETTIME
|
||||
SNR_UTIMENSAT ScmpSyscall = SYS_UTIMENSAT
|
||||
SNR_ACCT ScmpSyscall = SYS_ACCT
|
||||
SNR_CAPGET ScmpSyscall = SYS_CAPGET
|
||||
SNR_CAPSET ScmpSyscall = SYS_CAPSET
|
||||
SNR_PERSONALITY ScmpSyscall = SYS_PERSONALITY
|
||||
SNR_EXIT ScmpSyscall = SYS_EXIT
|
||||
SNR_EXIT_GROUP ScmpSyscall = SYS_EXIT_GROUP
|
||||
SNR_WAITID ScmpSyscall = SYS_WAITID
|
||||
SNR_SET_TID_ADDRESS ScmpSyscall = SYS_SET_TID_ADDRESS
|
||||
SNR_UNSHARE ScmpSyscall = SYS_UNSHARE
|
||||
SNR_FUTEX ScmpSyscall = SYS_FUTEX
|
||||
SNR_SET_ROBUST_LIST ScmpSyscall = SYS_SET_ROBUST_LIST
|
||||
SNR_GET_ROBUST_LIST ScmpSyscall = SYS_GET_ROBUST_LIST
|
||||
SNR_NANOSLEEP ScmpSyscall = SYS_NANOSLEEP
|
||||
SNR_GETITIMER ScmpSyscall = SYS_GETITIMER
|
||||
SNR_SETITIMER ScmpSyscall = SYS_SETITIMER
|
||||
SNR_KEXEC_LOAD ScmpSyscall = SYS_KEXEC_LOAD
|
||||
SNR_INIT_MODULE ScmpSyscall = SYS_INIT_MODULE
|
||||
SNR_DELETE_MODULE ScmpSyscall = SYS_DELETE_MODULE
|
||||
SNR_TIMER_CREATE ScmpSyscall = SYS_TIMER_CREATE
|
||||
SNR_TIMER_GETTIME ScmpSyscall = SYS_TIMER_GETTIME
|
||||
SNR_TIMER_GETOVERRUN ScmpSyscall = SYS_TIMER_GETOVERRUN
|
||||
SNR_TIMER_SETTIME ScmpSyscall = SYS_TIMER_SETTIME
|
||||
SNR_TIMER_DELETE ScmpSyscall = SYS_TIMER_DELETE
|
||||
SNR_CLOCK_SETTIME ScmpSyscall = SYS_CLOCK_SETTIME
|
||||
SNR_CLOCK_GETTIME ScmpSyscall = SYS_CLOCK_GETTIME
|
||||
SNR_CLOCK_GETRES ScmpSyscall = SYS_CLOCK_GETRES
|
||||
SNR_CLOCK_NANOSLEEP ScmpSyscall = SYS_CLOCK_NANOSLEEP
|
||||
SNR_SYSLOG ScmpSyscall = SYS_SYSLOG
|
||||
SNR_PTRACE ScmpSyscall = SYS_PTRACE
|
||||
SNR_SCHED_SETPARAM ScmpSyscall = SYS_SCHED_SETPARAM
|
||||
SNR_SCHED_SETSCHEDULER ScmpSyscall = SYS_SCHED_SETSCHEDULER
|
||||
SNR_SCHED_GETSCHEDULER ScmpSyscall = SYS_SCHED_GETSCHEDULER
|
||||
SNR_SCHED_GETPARAM ScmpSyscall = SYS_SCHED_GETPARAM
|
||||
SNR_SCHED_SETAFFINITY ScmpSyscall = SYS_SCHED_SETAFFINITY
|
||||
SNR_SCHED_GETAFFINITY ScmpSyscall = SYS_SCHED_GETAFFINITY
|
||||
SNR_SCHED_YIELD ScmpSyscall = SYS_SCHED_YIELD
|
||||
SNR_SCHED_GET_PRIORITY_MAX ScmpSyscall = SYS_SCHED_GET_PRIORITY_MAX
|
||||
SNR_SCHED_GET_PRIORITY_MIN ScmpSyscall = SYS_SCHED_GET_PRIORITY_MIN
|
||||
SNR_SCHED_RR_GET_INTERVAL ScmpSyscall = SYS_SCHED_RR_GET_INTERVAL
|
||||
SNR_RESTART_SYSCALL ScmpSyscall = SYS_RESTART_SYSCALL
|
||||
SNR_KILL ScmpSyscall = SYS_KILL
|
||||
SNR_TKILL ScmpSyscall = SYS_TKILL
|
||||
SNR_TGKILL ScmpSyscall = SYS_TGKILL
|
||||
SNR_SIGALTSTACK ScmpSyscall = SYS_SIGALTSTACK
|
||||
SNR_RT_SIGSUSPEND ScmpSyscall = SYS_RT_SIGSUSPEND
|
||||
SNR_RT_SIGACTION ScmpSyscall = SYS_RT_SIGACTION
|
||||
SNR_RT_SIGPROCMASK ScmpSyscall = SYS_RT_SIGPROCMASK
|
||||
SNR_RT_SIGPENDING ScmpSyscall = SYS_RT_SIGPENDING
|
||||
SNR_RT_SIGTIMEDWAIT ScmpSyscall = SYS_RT_SIGTIMEDWAIT
|
||||
SNR_RT_SIGQUEUEINFO ScmpSyscall = SYS_RT_SIGQUEUEINFO
|
||||
SNR_RT_SIGRETURN ScmpSyscall = SYS_RT_SIGRETURN
|
||||
SNR_SETPRIORITY ScmpSyscall = SYS_SETPRIORITY
|
||||
SNR_GETPRIORITY ScmpSyscall = SYS_GETPRIORITY
|
||||
SNR_REBOOT ScmpSyscall = SYS_REBOOT
|
||||
SNR_SETREGID ScmpSyscall = SYS_SETREGID
|
||||
SNR_SETGID ScmpSyscall = SYS_SETGID
|
||||
SNR_SETREUID ScmpSyscall = SYS_SETREUID
|
||||
SNR_SETUID ScmpSyscall = SYS_SETUID
|
||||
SNR_SETRESUID ScmpSyscall = SYS_SETRESUID
|
||||
SNR_GETRESUID ScmpSyscall = SYS_GETRESUID
|
||||
SNR_SETRESGID ScmpSyscall = SYS_SETRESGID
|
||||
SNR_GETRESGID ScmpSyscall = SYS_GETRESGID
|
||||
SNR_SETFSUID ScmpSyscall = SYS_SETFSUID
|
||||
SNR_SETFSGID ScmpSyscall = SYS_SETFSGID
|
||||
SNR_TIMES ScmpSyscall = SYS_TIMES
|
||||
SNR_SETPGID ScmpSyscall = SYS_SETPGID
|
||||
SNR_GETPGID ScmpSyscall = SYS_GETPGID
|
||||
SNR_GETSID ScmpSyscall = SYS_GETSID
|
||||
SNR_SETSID ScmpSyscall = SYS_SETSID
|
||||
SNR_GETGROUPS ScmpSyscall = SYS_GETGROUPS
|
||||
SNR_SETGROUPS ScmpSyscall = SYS_SETGROUPS
|
||||
SNR_UNAME ScmpSyscall = SYS_UNAME
|
||||
SNR_SETHOSTNAME ScmpSyscall = SYS_SETHOSTNAME
|
||||
SNR_SETDOMAINNAME ScmpSyscall = SYS_SETDOMAINNAME
|
||||
SNR_GETRLIMIT ScmpSyscall = SYS_GETRLIMIT
|
||||
SNR_SETRLIMIT ScmpSyscall = SYS_SETRLIMIT
|
||||
SNR_GETRUSAGE ScmpSyscall = SYS_GETRUSAGE
|
||||
SNR_UMASK ScmpSyscall = SYS_UMASK
|
||||
SNR_PRCTL ScmpSyscall = SYS_PRCTL
|
||||
SNR_GETCPU ScmpSyscall = SYS_GETCPU
|
||||
SNR_GETTIMEOFDAY ScmpSyscall = SYS_GETTIMEOFDAY
|
||||
SNR_SETTIMEOFDAY ScmpSyscall = SYS_SETTIMEOFDAY
|
||||
SNR_ADJTIMEX ScmpSyscall = SYS_ADJTIMEX
|
||||
SNR_GETPID ScmpSyscall = SYS_GETPID
|
||||
SNR_GETPPID ScmpSyscall = SYS_GETPPID
|
||||
SNR_GETUID ScmpSyscall = SYS_GETUID
|
||||
SNR_GETEUID ScmpSyscall = SYS_GETEUID
|
||||
SNR_GETGID ScmpSyscall = SYS_GETGID
|
||||
SNR_GETEGID ScmpSyscall = SYS_GETEGID
|
||||
SNR_GETTID ScmpSyscall = SYS_GETTID
|
||||
SNR_SYSINFO ScmpSyscall = SYS_SYSINFO
|
||||
SNR_MQ_OPEN ScmpSyscall = SYS_MQ_OPEN
|
||||
SNR_MQ_UNLINK ScmpSyscall = SYS_MQ_UNLINK
|
||||
SNR_MQ_TIMEDSEND ScmpSyscall = SYS_MQ_TIMEDSEND
|
||||
SNR_MQ_TIMEDRECEIVE ScmpSyscall = SYS_MQ_TIMEDRECEIVE
|
||||
SNR_MQ_NOTIFY ScmpSyscall = SYS_MQ_NOTIFY
|
||||
SNR_MQ_GETSETATTR ScmpSyscall = SYS_MQ_GETSETATTR
|
||||
SNR_MSGGET ScmpSyscall = SYS_MSGGET
|
||||
SNR_MSGCTL ScmpSyscall = SYS_MSGCTL
|
||||
SNR_MSGRCV ScmpSyscall = SYS_MSGRCV
|
||||
SNR_MSGSND ScmpSyscall = SYS_MSGSND
|
||||
SNR_SEMGET ScmpSyscall = SYS_SEMGET
|
||||
SNR_SEMCTL ScmpSyscall = SYS_SEMCTL
|
||||
SNR_SEMTIMEDOP ScmpSyscall = SYS_SEMTIMEDOP
|
||||
SNR_SEMOP ScmpSyscall = SYS_SEMOP
|
||||
SNR_SHMGET ScmpSyscall = SYS_SHMGET
|
||||
SNR_SHMCTL ScmpSyscall = SYS_SHMCTL
|
||||
SNR_SHMAT ScmpSyscall = SYS_SHMAT
|
||||
SNR_SHMDT ScmpSyscall = SYS_SHMDT
|
||||
SNR_SOCKET ScmpSyscall = SYS_SOCKET
|
||||
SNR_SOCKETPAIR ScmpSyscall = SYS_SOCKETPAIR
|
||||
SNR_BIND ScmpSyscall = SYS_BIND
|
||||
SNR_LISTEN ScmpSyscall = SYS_LISTEN
|
||||
SNR_ACCEPT ScmpSyscall = SYS_ACCEPT
|
||||
SNR_CONNECT ScmpSyscall = SYS_CONNECT
|
||||
SNR_GETSOCKNAME ScmpSyscall = SYS_GETSOCKNAME
|
||||
SNR_GETPEERNAME ScmpSyscall = SYS_GETPEERNAME
|
||||
SNR_SENDTO ScmpSyscall = SYS_SENDTO
|
||||
SNR_RECVFROM ScmpSyscall = SYS_RECVFROM
|
||||
SNR_SETSOCKOPT ScmpSyscall = SYS_SETSOCKOPT
|
||||
SNR_GETSOCKOPT ScmpSyscall = SYS_GETSOCKOPT
|
||||
SNR_SHUTDOWN ScmpSyscall = SYS_SHUTDOWN
|
||||
SNR_SENDMSG ScmpSyscall = SYS_SENDMSG
|
||||
SNR_RECVMSG ScmpSyscall = SYS_RECVMSG
|
||||
SNR_READAHEAD ScmpSyscall = SYS_READAHEAD
|
||||
SNR_BRK ScmpSyscall = SYS_BRK
|
||||
SNR_MUNMAP ScmpSyscall = SYS_MUNMAP
|
||||
SNR_MREMAP ScmpSyscall = SYS_MREMAP
|
||||
SNR_ADD_KEY ScmpSyscall = SYS_ADD_KEY
|
||||
SNR_REQUEST_KEY ScmpSyscall = SYS_REQUEST_KEY
|
||||
SNR_KEYCTL ScmpSyscall = SYS_KEYCTL
|
||||
SNR_CLONE ScmpSyscall = SYS_CLONE
|
||||
SNR_EXECVE ScmpSyscall = SYS_EXECVE
|
||||
SNR_MMAP ScmpSyscall = SYS_MMAP
|
||||
SNR_FADVISE64 ScmpSyscall = SYS_FADVISE64
|
||||
SNR_SWAPON ScmpSyscall = SYS_SWAPON
|
||||
SNR_SWAPOFF ScmpSyscall = SYS_SWAPOFF
|
||||
SNR_MPROTECT ScmpSyscall = SYS_MPROTECT
|
||||
SNR_MSYNC ScmpSyscall = SYS_MSYNC
|
||||
SNR_MLOCK ScmpSyscall = SYS_MLOCK
|
||||
SNR_MUNLOCK ScmpSyscall = SYS_MUNLOCK
|
||||
SNR_MLOCKALL ScmpSyscall = SYS_MLOCKALL
|
||||
SNR_MUNLOCKALL ScmpSyscall = SYS_MUNLOCKALL
|
||||
SNR_MINCORE ScmpSyscall = SYS_MINCORE
|
||||
SNR_MADVISE ScmpSyscall = SYS_MADVISE
|
||||
SNR_REMAP_FILE_PAGES ScmpSyscall = SYS_REMAP_FILE_PAGES
|
||||
SNR_MBIND ScmpSyscall = SYS_MBIND
|
||||
SNR_GET_MEMPOLICY ScmpSyscall = SYS_GET_MEMPOLICY
|
||||
SNR_SET_MEMPOLICY ScmpSyscall = SYS_SET_MEMPOLICY
|
||||
SNR_MIGRATE_PAGES ScmpSyscall = SYS_MIGRATE_PAGES
|
||||
SNR_MOVE_PAGES ScmpSyscall = SYS_MOVE_PAGES
|
||||
SNR_RT_TGSIGQUEUEINFO ScmpSyscall = SYS_RT_TGSIGQUEUEINFO
|
||||
SNR_PERF_EVENT_OPEN ScmpSyscall = SYS_PERF_EVENT_OPEN
|
||||
SNR_ACCEPT4 ScmpSyscall = SYS_ACCEPT4
|
||||
SNR_RECVMMSG ScmpSyscall = SYS_RECVMMSG
|
||||
SNR_WAIT4 ScmpSyscall = SYS_WAIT4
|
||||
SNR_PRLIMIT64 ScmpSyscall = SYS_PRLIMIT64
|
||||
SNR_FANOTIFY_INIT ScmpSyscall = SYS_FANOTIFY_INIT
|
||||
SNR_FANOTIFY_MARK ScmpSyscall = SYS_FANOTIFY_MARK
|
||||
SNR_NAME_TO_HANDLE_AT ScmpSyscall = SYS_NAME_TO_HANDLE_AT
|
||||
SNR_OPEN_BY_HANDLE_AT ScmpSyscall = SYS_OPEN_BY_HANDLE_AT
|
||||
SNR_CLOCK_ADJTIME ScmpSyscall = SYS_CLOCK_ADJTIME
|
||||
SNR_SYNCFS ScmpSyscall = SYS_SYNCFS
|
||||
SNR_SETNS ScmpSyscall = SYS_SETNS
|
||||
SNR_SENDMMSG ScmpSyscall = SYS_SENDMMSG
|
||||
SNR_PROCESS_VM_READV ScmpSyscall = SYS_PROCESS_VM_READV
|
||||
SNR_PROCESS_VM_WRITEV ScmpSyscall = SYS_PROCESS_VM_WRITEV
|
||||
SNR_KCMP ScmpSyscall = SYS_KCMP
|
||||
SNR_FINIT_MODULE ScmpSyscall = SYS_FINIT_MODULE
|
||||
SNR_SCHED_SETATTR ScmpSyscall = SYS_SCHED_SETATTR
|
||||
SNR_SCHED_GETATTR ScmpSyscall = SYS_SCHED_GETATTR
|
||||
SNR_RENAMEAT2 ScmpSyscall = SYS_RENAMEAT2
|
||||
SNR_SECCOMP ScmpSyscall = SYS_SECCOMP
|
||||
SNR_GETRANDOM ScmpSyscall = SYS_GETRANDOM
|
||||
SNR_MEMFD_CREATE ScmpSyscall = SYS_MEMFD_CREATE
|
||||
SNR_BPF ScmpSyscall = SYS_BPF
|
||||
SNR_EXECVEAT ScmpSyscall = SYS_EXECVEAT
|
||||
SNR_USERFAULTFD ScmpSyscall = SYS_USERFAULTFD
|
||||
SNR_MEMBARRIER ScmpSyscall = SYS_MEMBARRIER
|
||||
SNR_MLOCK2 ScmpSyscall = SYS_MLOCK2
|
||||
SNR_COPY_FILE_RANGE ScmpSyscall = SYS_COPY_FILE_RANGE
|
||||
SNR_PREADV2 ScmpSyscall = SYS_PREADV2
|
||||
SNR_PWRITEV2 ScmpSyscall = SYS_PWRITEV2
|
||||
SNR_PKEY_MPROTECT ScmpSyscall = SYS_PKEY_MPROTECT
|
||||
SNR_PKEY_ALLOC ScmpSyscall = SYS_PKEY_ALLOC
|
||||
SNR_PKEY_FREE ScmpSyscall = SYS_PKEY_FREE
|
||||
SNR_STATX ScmpSyscall = SYS_STATX
|
||||
SNR_IO_PGETEVENTS ScmpSyscall = SYS_IO_PGETEVENTS
|
||||
SNR_RSEQ ScmpSyscall = SYS_RSEQ
|
||||
SNR_KEXEC_FILE_LOAD ScmpSyscall = SYS_KEXEC_FILE_LOAD
|
||||
SNR_PIDFD_SEND_SIGNAL ScmpSyscall = SYS_PIDFD_SEND_SIGNAL
|
||||
SNR_IO_URING_SETUP ScmpSyscall = SYS_IO_URING_SETUP
|
||||
SNR_IO_URING_ENTER ScmpSyscall = SYS_IO_URING_ENTER
|
||||
SNR_IO_URING_REGISTER ScmpSyscall = SYS_IO_URING_REGISTER
|
||||
SNR_OPEN_TREE ScmpSyscall = SYS_OPEN_TREE
|
||||
SNR_MOVE_MOUNT ScmpSyscall = SYS_MOVE_MOUNT
|
||||
SNR_FSOPEN ScmpSyscall = SYS_FSOPEN
|
||||
SNR_FSCONFIG ScmpSyscall = SYS_FSCONFIG
|
||||
SNR_FSMOUNT ScmpSyscall = SYS_FSMOUNT
|
||||
SNR_FSPICK ScmpSyscall = SYS_FSPICK
|
||||
SNR_PIDFD_OPEN ScmpSyscall = SYS_PIDFD_OPEN
|
||||
SNR_CLONE3 ScmpSyscall = SYS_CLONE3
|
||||
SNR_CLOSE_RANGE ScmpSyscall = SYS_CLOSE_RANGE
|
||||
SNR_OPENAT2 ScmpSyscall = SYS_OPENAT2
|
||||
SNR_PIDFD_GETFD ScmpSyscall = SYS_PIDFD_GETFD
|
||||
SNR_FACCESSAT2 ScmpSyscall = SYS_FACCESSAT2
|
||||
SNR_PROCESS_MADVISE ScmpSyscall = SYS_PROCESS_MADVISE
|
||||
SNR_EPOLL_PWAIT2 ScmpSyscall = SYS_EPOLL_PWAIT2
|
||||
SNR_MOUNT_SETATTR ScmpSyscall = SYS_MOUNT_SETATTR
|
||||
SNR_QUOTACTL_FD ScmpSyscall = SYS_QUOTACTL_FD
|
||||
SNR_LANDLOCK_CREATE_RULESET ScmpSyscall = SYS_LANDLOCK_CREATE_RULESET
|
||||
SNR_LANDLOCK_ADD_RULE ScmpSyscall = SYS_LANDLOCK_ADD_RULE
|
||||
SNR_LANDLOCK_RESTRICT_SELF ScmpSyscall = SYS_LANDLOCK_RESTRICT_SELF
|
||||
SNR_MEMFD_SECRET ScmpSyscall = SYS_MEMFD_SECRET
|
||||
SNR_PROCESS_MRELEASE ScmpSyscall = SYS_PROCESS_MRELEASE
|
||||
SNR_FUTEX_WAITV ScmpSyscall = SYS_FUTEX_WAITV
|
||||
SNR_SET_MEMPOLICY_HOME_NODE ScmpSyscall = SYS_SET_MEMPOLICY_HOME_NODE
|
||||
SNR_CACHESTAT ScmpSyscall = SYS_CACHESTAT
|
||||
SNR_FCHMODAT2 ScmpSyscall = SYS_FCHMODAT2
|
||||
SNR_MAP_SHADOW_STACK ScmpSyscall = SYS_MAP_SHADOW_STACK
|
||||
SNR_FUTEX_WAKE ScmpSyscall = SYS_FUTEX_WAKE
|
||||
SNR_FUTEX_WAIT ScmpSyscall = SYS_FUTEX_WAIT
|
||||
SNR_FUTEX_REQUEUE ScmpSyscall = SYS_FUTEX_REQUEUE
|
||||
SNR_STATMOUNT ScmpSyscall = SYS_STATMOUNT
|
||||
SNR_LISTMOUNT ScmpSyscall = SYS_LISTMOUNT
|
||||
SNR_LSM_GET_SELF_ATTR ScmpSyscall = SYS_LSM_GET_SELF_ATTR
|
||||
SNR_LSM_SET_SELF_ATTR ScmpSyscall = SYS_LSM_SET_SELF_ATTR
|
||||
SNR_LSM_LIST_MODULES ScmpSyscall = SYS_LSM_LIST_MODULES
|
||||
SNR_MSEAL ScmpSyscall = SYS_MSEAL
|
||||
SNR_SETXATTRAT ScmpSyscall = SYS_SETXATTRAT
|
||||
SNR_GETXATTRAT ScmpSyscall = SYS_GETXATTRAT
|
||||
SNR_LISTXATTRAT ScmpSyscall = SYS_LISTXATTRAT
|
||||
SNR_REMOVEXATTRAT ScmpSyscall = SYS_REMOVEXATTRAT
|
||||
SNR_OPEN_TREE_ATTR ScmpSyscall = SYS_OPEN_TREE_ATTR
|
||||
SNR_FILE_GETATTR ScmpSyscall = SYS_FILE_GETATTR
|
||||
SNR_FILE_SETATTR ScmpSyscall = SYS_FILE_SETATTR
|
||||
)
|
||||
12
dist/install.sh
vendored
12
dist/install.sh
vendored
@@ -1,12 +1,12 @@
|
||||
#!/bin/sh
|
||||
cd "$(dirname -- "$0")" || exit 1
|
||||
|
||||
install -vDm0755 "bin/hakurei" "${HAKUREI_INSTALL_PREFIX}/usr/bin/hakurei"
|
||||
install -vDm0755 "bin/sharefs" "${HAKUREI_INSTALL_PREFIX}/usr/bin/sharefs"
|
||||
install -vDm0755 "bin/hakurei" "${DESTDIR}/usr/bin/hakurei"
|
||||
install -vDm0755 "bin/sharefs" "${DESTDIR}/usr/bin/sharefs"
|
||||
|
||||
install -vDm4511 "bin/hsu" "${HAKUREI_INSTALL_PREFIX}/usr/bin/hsu"
|
||||
if [ ! -f "${HAKUREI_INSTALL_PREFIX}/etc/hsurc" ]; then
|
||||
install -vDm0400 "hsurc.default" "${HAKUREI_INSTALL_PREFIX}/etc/hsurc"
|
||||
install -vDm4511 "bin/hsu" "${DESTDIR}/usr/bin/hsu"
|
||||
if [ ! -f "${DESTDIR}/etc/hsurc" ]; then
|
||||
install -vDm0400 "hsurc.default" "${DESTDIR}/etc/hsurc"
|
||||
fi
|
||||
|
||||
install -vDm0644 "comp/_hakurei" "${HAKUREI_INSTALL_PREFIX}/usr/share/zsh/site-functions/_hakurei"
|
||||
install -vDm0644 "comp/_hakurei" "${DESTDIR}/usr/share/zsh/site-functions/_hakurei"
|
||||
23
dist/release.sh
vendored
23
dist/release.sh
vendored
@@ -1,20 +1,31 @@
|
||||
#!/bin/sh -e
|
||||
cd "$(dirname -- "$0")/.."
|
||||
VERSION="${HAKUREI_VERSION:-untagged}"
|
||||
pname="hakurei-${VERSION}"
|
||||
out="dist/${pname}"
|
||||
pname="hakurei-${VERSION}-$(go env GOARCH)"
|
||||
out="${DESTDIR:-dist}/${pname}"
|
||||
|
||||
echo '# Preparing distribution files.'
|
||||
mkdir -p "${out}"
|
||||
cp -v "README.md" "dist/hsurc.default" "dist/install.sh" "${out}"
|
||||
cp -rv "dist/comp" "${out}"
|
||||
echo
|
||||
|
||||
echo '# Building hakurei.'
|
||||
go generate ./...
|
||||
go build -trimpath -v -o "${out}/bin/" -ldflags "-s -w -buildid= -extldflags '-static'
|
||||
go build -trimpath -v -o "${out}/bin/" -ldflags "-s -w
|
||||
-buildid= -extldflags '-static'
|
||||
-X hakurei.app/internal/info.buildVersion=${VERSION}
|
||||
-X hakurei.app/internal/info.hakureiPath=/usr/bin/hakurei
|
||||
-X hakurei.app/internal/info.hsuPath=/usr/bin/hsu
|
||||
-X main.hakureiPath=/usr/bin/hakurei" ./...
|
||||
echo
|
||||
|
||||
rm -f "./${out}.tar.gz" && tar -C dist -czf "${out}.tar.gz" "${pname}"
|
||||
rm -rf "./${out}"
|
||||
(cd dist && sha512sum "${pname}.tar.gz" > "${pname}.tar.gz.sha512")
|
||||
echo '# Testing hakurei.'
|
||||
go test -ldflags='-buildid= -extldflags=-static' ./...
|
||||
echo
|
||||
|
||||
echo '# Creating distribution.'
|
||||
rm -f "${out}.tar.gz" && tar -C "${out}/.." -vczf "${out}.tar.gz" "${pname}"
|
||||
rm -rf "${out}"
|
||||
(cd "${out}/.." && sha512sum "${pname}.tar.gz" > "${pname}.tar.gz.sha512")
|
||||
echo
|
||||
|
||||
22
flake.nix
22
flake.nix
@@ -143,18 +143,26 @@
|
||||
"bin/mount.fuse.sharefs" = "${hakurei}/libexec/sharefs";
|
||||
};
|
||||
|
||||
dist = pkgs.runCommand "${hakurei.name}-dist" { buildInputs = hakurei.targetPkgs ++ [ pkgs.pkgsStatic.musl ]; } ''
|
||||
# go requires XDG_CACHE_HOME for the build cache
|
||||
export XDG_CACHE_HOME="$(mktemp -d)"
|
||||
|
||||
# get a different workdir as go does not like /build
|
||||
dist =
|
||||
pkgs.runCommand "${hakurei.name}-dist"
|
||||
{
|
||||
buildInputs = hakurei.targetPkgs ++ [
|
||||
pkgs.pkgsStatic.musl
|
||||
];
|
||||
}
|
||||
''
|
||||
cd $(mktemp -d) \
|
||||
&& cp -r ${hakurei.src}/. . \
|
||||
&& chmod +w cmd && cp -r ${hsu.src}/. cmd/hsu/ \
|
||||
&& chmod -R +w .
|
||||
|
||||
export HAKUREI_VERSION="v${hakurei.version}"
|
||||
CC="clang -O3 -Werror" ./dist/release.sh && mkdir $out && cp -v "dist/hakurei-$HAKUREI_VERSION.tar.gz"* $out
|
||||
CC="musl-clang -O3 -Werror -Qunused-arguments" \
|
||||
GOCACHE="$(mktemp -d)" \
|
||||
HAKUREI_TEST_SKIP_ACL=1 \
|
||||
PATH="${pkgs.pkgsStatic.musl.bin}/bin:$PATH" \
|
||||
DESTDIR="$out" \
|
||||
HAKUREI_VERSION="v${hakurei.version}" \
|
||||
./dist/release.sh
|
||||
'';
|
||||
}
|
||||
);
|
||||
|
||||
@@ -24,9 +24,8 @@ var (
|
||||
)
|
||||
|
||||
func TestUpdate(t *testing.T) {
|
||||
if os.Getenv("GO_TEST_SKIP_ACL") == "1" {
|
||||
t.Log("acl test skipped")
|
||||
t.SkipNow()
|
||||
if os.Getenv("HAKUREI_TEST_SKIP_ACL") == "1" {
|
||||
t.Skip("acl test skipped")
|
||||
}
|
||||
|
||||
testFilePath := path.Join(t.TempDir(), testFileName)
|
||||
@@ -143,6 +142,7 @@ func (c *getFAclInvocation) run(name string) error {
|
||||
}
|
||||
|
||||
c.cmd = exec.Command("getfacl", "--omit-header", "--absolute-names", "--numeric", name)
|
||||
c.cmd.Stderr = os.Stderr
|
||||
|
||||
scanErr := make(chan error, 1)
|
||||
if p, err := c.cmd.StdoutPipe(); err != nil {
|
||||
@@ -254,7 +254,7 @@ func getfacl(t *testing.T, name string) []*getFAclResp {
|
||||
t.Fatalf("getfacl: error = %v", err)
|
||||
}
|
||||
if len(c.pe) != 0 {
|
||||
t.Errorf("errors encountered parsing getfacl output\n%s", errors.Join(c.pe...).Error())
|
||||
t.Errorf("errors encountered parsing getfacl output\n%s", errors.Join(c.pe...))
|
||||
}
|
||||
return c.val
|
||||
}
|
||||
|
||||
@@ -28,14 +28,6 @@ type FlatEntry struct {
|
||||
| data []byte |
|
||||
*/
|
||||
|
||||
// wordSize is the boundary which binary segments are always aligned to.
|
||||
const wordSize = 8
|
||||
|
||||
// alignSize returns the padded size for aligning sz.
|
||||
func alignSize(sz int) int {
|
||||
return sz + (wordSize-(sz)%wordSize)%wordSize
|
||||
}
|
||||
|
||||
// Encode encodes the entry for transmission or hashing.
|
||||
func (ent *FlatEntry) Encode(w io.Writer) (n int, err error) {
|
||||
pPathSize := alignSize(len(ent.Path))
|
||||
|
||||
@@ -76,7 +76,7 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU": {Mode: 0400, Data: []byte("\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69")},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/NqVORkT6L9HX6Za7kT2zcibY10qFqBaxEjPiYFrBQX-ZFr3yxCzJxbKOP0zVjeWb": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
|
||||
"identifier/oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
|
||||
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
}, []pkg.FlatEntry{
|
||||
@@ -86,10 +86,10 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: 0400, Path: "checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU", Data: []byte("\x7f\xe1\x69\xa2\xdd\x63\x96\x26\x83\x79\x61\x8b\xf0\x3f\xd5\x16\x9a\x39\x3a\xdb\xcf\xb1\xbc\x8d\x33\xff\x75\xee\x62\x56\xa9\xf0\x27\xac\x13\x94\x69")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/NqVORkT6L9HX6Za7kT2zcibY10qFqBaxEjPiYFrBQX-ZFr3yxCzJxbKOP0zVjeWb", Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_", Data: []byte("../checksum/fLYGIMHgN1louE-JzITJZJo2SDniPu-IHBXubtvQWFO-hXnDVKNuscV7-zlyr5fU")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("bqtn69RkV5E7V7GhhgCFjcvbxmaqrO8DywamM4Tyjf10F6EJBHjXiIa_tFRtF4iN"), nil},
|
||||
}, pkg.MustDecode("L_0RFHpr9JUS4Zp14rz2dESSRvfLzpvqsLhR1-YjQt8hYlmEdVl7vI3_-v8UNPKs"), nil},
|
||||
|
||||
{"sample directory step simple", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0500},
|
||||
@@ -208,8 +208,8 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/work": {Mode: fs.ModeDir | 0500},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/-P_1iw6yVq_letMHncqcExSE0bYcDhYI5OdY6b1wKASf-Corufvj__XTBUq2Qd2a": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
"identifier/0_rRxIqbX9LK9L_KDbuafotFz6HFkonNgO9gXhK1asM_Y1Pxn0amg756vRTo6m74": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
"identifier/rg7F1D5hwv6o4xctjD5zDq4i5MD0mArTsUIWfhUbik8xC6Bsyt3mjXXOm3goojTz": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -230,12 +230,12 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: fs.ModeDir | 0500, Path: "checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM/work"},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/-P_1iw6yVq_letMHncqcExSE0bYcDhYI5OdY6b1wKASf-Corufvj__XTBUq2Qd2a", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/0_rRxIqbX9LK9L_KDbuafotFz6HFkonNgO9gXhK1asM_Y1Pxn0amg756vRTo6m74", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/rg7F1D5hwv6o4xctjD5zDq4i5MD0mArTsUIWfhUbik8xC6Bsyt3mjXXOm3goojTz", Data: []byte("../checksum/cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("sxbgyX-bPoezbha214n2lbQhiVfTUBkhZ0EX6zI7mmkMdrCdwuMwhMBJphLQsy94"), nil},
|
||||
}, pkg.MustDecode("NQTlc466JmSVLIyWklm_u8_g95jEEb98PxJU-kjwxLpfdjwMWJq0G8ze9R4Vo1Vu"), nil},
|
||||
|
||||
{"sample tar expand step unpack", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0500},
|
||||
@@ -255,8 +255,8 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN/libedac.so": {Mode: fs.ModeSymlink | 0777, Data: []byte("/proc/nonexistent/libedac.so")},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/-P_1iw6yVq_letMHncqcExSE0bYcDhYI5OdY6b1wKASf-Corufvj__XTBUq2Qd2a": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
"identifier/0_rRxIqbX9LK9L_KDbuafotFz6HFkonNgO9gXhK1asM_Y1Pxn0amg756vRTo6m74": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
"identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
"identifier/_v1blm2h-_KA-dVaawdpLas6MjHc6rbhhFS8JWwx8iJxZGUu8EBbRrhr5AaZ9PJL": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -268,12 +268,12 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN/libedac.so", Data: []byte("/proc/nonexistent/libedac.so")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/-P_1iw6yVq_letMHncqcExSE0bYcDhYI5OdY6b1wKASf-Corufvj__XTBUq2Qd2a", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/0_rRxIqbX9LK9L_KDbuafotFz6HFkonNgO9gXhK1asM_Y1Pxn0amg756vRTo6m74", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/W5S65DEhawz_WKaok5NjUKLmnD9dNl5RPauNJjcOVcB3VM4eGhSaLGmXbL8vZpiw", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_v1blm2h-_KA-dVaawdpLas6MjHc6rbhhFS8JWwx8iJxZGUu8EBbRrhr5AaZ9PJL", Data: []byte("../checksum/CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("4I8wx_h7NSJTlG5lbuz-GGEXrOg0GYC3M_503LYEBhv5XGWXfNIdIY9Q3eVSYldX"), nil},
|
||||
}, pkg.MustDecode("hSoSSgCYTNonX3Q8FjvjD1fBl-E-BQyA6OTXro2OadXqbST4tZ-akGXszdeqphRe"), nil},
|
||||
|
||||
{"testtool", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0500},
|
||||
@@ -295,9 +295,9 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb": {Mode: 0400, Data: []byte{}},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/U2cbgVgEtjfRuvHfE1cQnZ3t8yoexULQyo_VLgvxAVJSsobMcNaFIsuDWtmt7kzK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
|
||||
"identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -311,13 +311,13 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: 0400, Path: "checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb", Data: []byte{}},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/U2cbgVgEtjfRuvHfE1cQnZ3t8yoexULQyo_VLgvxAVJSsobMcNaFIsuDWtmt7kzK", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("UiV6kMz7KrTsc_yphiyQzFLqjRanHxUOwrBMtkKuWo4mOO6WgPFAcoUEeSp7eVIW"), nil},
|
||||
}, pkg.MustDecode("Q5DluWQCAeohLoiGRImurwFp3vdz9IfQCoj7Fuhh73s4KQPRHpEQEnHTdNHmB8Fx"), nil},
|
||||
|
||||
{"testtool net", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0500},
|
||||
@@ -339,9 +339,9 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W/check": {Mode: 0400, Data: []byte("net")},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
|
||||
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
|
||||
"identifier/QdsJhGgnk5N2xdUNGcndXQxFKifxf1V_2t9X8CQ-pDcg24x6mGJC_BiLfGbs6Qml": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
|
||||
"identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -355,13 +355,13 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: 0400, Path: "checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W/check", Data: []byte("net")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/QdsJhGgnk5N2xdUNGcndXQxFKifxf1V_2t9X8CQ-pDcg24x6mGJC_BiLfGbs6Qml", Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3", Data: []byte("../checksum/a1F_i9PVQI4qMcoHgTQkORuyWLkC1GLIxOhDt2JpU1NGAxWc5VJzdlfRK-PYBh3W")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("ek4K-0d4iRSArkY2TCs3WK34DbiYeOmhE_4vsJTSu_6roY4ZF3YG6eKRooal-i1o"), nil},
|
||||
}, pkg.MustDecode("bPYvvqxpfV7xcC1EptqyKNK1klLJgYHMDkzBcoOyK6j_Aj5hb0mXNPwTwPSK5F6Z"), nil},
|
||||
|
||||
{"sample exec container overlay root", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0700},
|
||||
@@ -372,8 +372,8 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/5ey2wpmMpj483YYa7ZZQciYLA2cx3_l167JCqWW4Pd-5DVp81dj9EsBtVTwYptF6": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -386,12 +386,12 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/5ey2wpmMpj483YYa7ZZQciYLA2cx3_l167JCqWW4Pd-5DVp81dj9EsBtVTwYptF6", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("VIqqpf0ip9jcyw63i6E8lCMGUcLivQBe4Bevt3WusNac-1MSy5bzB647qGUBzl-W"), nil},
|
||||
}, pkg.MustDecode("PO2DSSCa4yoSgEYRcCSZfQfwow1yRigL3Ry-hI0RDI4aGuFBha-EfXeSJnG_5_Rl"), nil},
|
||||
|
||||
{"sample exec container overlay work", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0700},
|
||||
@@ -402,8 +402,8 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/acaDzHZv40dZaz4cGAXayqbRMgbEOuiuiUijZL8IgDQvyeCNMFE3onBMYfny-kXA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -416,12 +416,12 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/acaDzHZv40dZaz4cGAXayqbRMgbEOuiuiUijZL8IgDQvyeCNMFE3onBMYfny-kXA", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("q8x2zQg4YZbKpPqKlEBj_uxXD9vOBaZ852qOuIsl9QdO73I_UMNpuUoPLtunxUYl"), nil},
|
||||
}, pkg.MustDecode("iaRt6l_Wm2n-h5UsDewZxQkCmjZjyL8r7wv32QT2kyV55-Lx09Dq4gfg9BiwPnKs"), nil},
|
||||
|
||||
{"sample exec container multiple layers", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0700},
|
||||
@@ -436,9 +436,9 @@ func TestFlatten(t *testing.T) {
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
|
||||
"identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/rXLKjjYfGSyoWmuvEJooHkvGJIZaC0IAWnKGvtPZkM15gBxAgW7mIXcxRVNOXAr4": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
|
||||
"identifier/tfjrsVuBuFgzWgwz-yPppFtylYuC1VFWnKhyBiHbWTGkyz8lt7Ee9QXWaIHPXs4x": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/B-kc5iJMx8GtlCua4dz6BiJHnDAOUfPjgpbKq4e-QEn0_CZkSYs3fOA1ve06qMs2": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
|
||||
"identifier/p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -454,14 +454,14 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: 0400, Path: "checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK/check", Data: []byte("layers")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/B-kc5iJMx8GtlCua4dz6BiJHnDAOUfPjgpbKq4e-QEn0_CZkSYs3fOA1ve06qMs2", Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/_gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", Data: []byte("../checksum/OLBgp1GsljhM2TJ-sbHjaiH9txEUvgdDTAzHv2P24donTt6_529l-9Ua0vFImLlb")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/rXLKjjYfGSyoWmuvEJooHkvGJIZaC0IAWnKGvtPZkM15gBxAgW7mIXcxRVNOXAr4", Data: []byte("../checksum/nY_CUdiaUM1OL4cPr5TS92FCJ3rCRV7Hm5oVTzAvMXwC03_QnTRfQ5PPs7mOU9fK")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/tfjrsVuBuFgzWgwz-yPppFtylYuC1VFWnKhyBiHbWTGkyz8lt7Ee9QXWaIHPXs4x", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("SITnQ6PTV12PAQQjIuLUxkvsXQiC9Gq_HJQlcb4BPL5YnRHnx8lsW7PRM9YMLBsx"), nil},
|
||||
}, pkg.MustDecode("O2YzyR7IUGU5J2CADy0hUZ3A5NkP_Vwzs4UadEdn2oMZZVWRtH0xZGJ3HXiimTnZ"), nil},
|
||||
|
||||
{"sample exec container layer promotion", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0700},
|
||||
@@ -472,9 +472,9 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU": {Mode: fs.ModeDir | 0500},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/1tQZOGmVk_JkpyiG84AKW_BXmlK_MvHUbh5WtMuthGbHUq7i7nL1bvdF-LoJbqNh": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/O-6VjlIUxc4PYLf5v35uhIeL8kkYCbHYklqlmDjFPXe0m4j6GkUDg5qwTzBRESnf": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
"identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/kvJIqZo5DKFOxC2ZQ-8_nPaQzEAz9cIm3p6guO-uLqm-xaiPu7oRkSnsu411jd_U": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
"identifier/xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
|
||||
"temp": {Mode: fs.ModeDir | 0700},
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
@@ -487,13 +487,13 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: fs.ModeDir | 0500, Path: "checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/1tQZOGmVk_JkpyiG84AKW_BXmlK_MvHUbh5WtMuthGbHUq7i7nL1bvdF-LoJbqNh", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/O-6VjlIUxc4PYLf5v35uhIeL8kkYCbHYklqlmDjFPXe0m4j6GkUDg5qwTzBRESnf", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/nfeISfLeFDr1k-g3hpE1oZ440kTqDdfF8TDpoLdbTPqaMMIl95oiqcvqjRkMjubA", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/kvJIqZo5DKFOxC2ZQ-8_nPaQzEAz9cIm3p6guO-uLqm-xaiPu7oRkSnsu411jd_U", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/vjz1MHPcGBKV7sjcs8jQP3cqxJ1hgPTiQBMCEHP9BGXjGxd-tJmEmXKaStObo5gK", Data: []byte("../checksum/MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ", Data: []byte("../checksum/GPa4aBakdSJd7Tz7LYj_VJFoojzyZinmVcG3k6M5xI6CZ821J5sXLhLDDuS47gi9")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "temp"},
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("fuC20BhMKr86TYzNPP2A-9P7mGLvdcOiG10exlhRvZm8ySI7csf0LhW3im_26l1N"), nil},
|
||||
}, pkg.MustDecode("3EaW6WibLi9gl03_UieiFPaFcPy5p4x3JPxrnLJxGaTI-bh3HU9DK9IMx7c3rrNm"), nil},
|
||||
|
||||
{"sample file short", fstest.MapFS{
|
||||
".": {Mode: fs.ModeDir | 0700},
|
||||
@@ -502,7 +502,7 @@ func TestFlatten(t *testing.T) {
|
||||
"checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX": {Mode: 0400, Data: []byte{0}},
|
||||
|
||||
"identifier": {Mode: fs.ModeDir | 0700},
|
||||
"identifier/lIx_W4M7tVOcQ8jh08EJOfXf4brRmkEEjvUa7c17vVUzlmtUxlhhrgqmc9aZhjbn": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
|
||||
"identifier/3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi": {Mode: fs.ModeSymlink | 0777, Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
|
||||
|
||||
"work": {Mode: fs.ModeDir | 0700},
|
||||
}, []pkg.FlatEntry{
|
||||
@@ -511,10 +511,10 @@ func TestFlatten(t *testing.T) {
|
||||
{Mode: 0400, Path: "checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX", Data: []byte{0}},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "identifier"},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/lIx_W4M7tVOcQ8jh08EJOfXf4brRmkEEjvUa7c17vVUzlmtUxlhhrgqmc9aZhjbn", Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
|
||||
{Mode: fs.ModeSymlink | 0777, Path: "identifier/3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi", Data: []byte("../checksum/vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX")},
|
||||
|
||||
{Mode: fs.ModeDir | 0700, Path: "work"},
|
||||
}, pkg.MustDecode("hnrfmJtivNKcgtETsKnU9gP_OwPgpNY3DSUJnmxnmeOODSO-YBvEBiTgieY4AAd7"), nil},
|
||||
}, pkg.MustDecode("iR6H5OIsyOW4EwEgtm9rGzGF6DVtyHLySEtwnFE8bnus9VJcoCbR4JIek7Lw-vwT"), nil},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"hakurei.app/container"
|
||||
"hakurei.app/container/check"
|
||||
"hakurei.app/container/fhs"
|
||||
"hakurei.app/container/seccomp"
|
||||
"hakurei.app/container/std"
|
||||
"hakurei.app/message"
|
||||
)
|
||||
@@ -32,7 +33,7 @@ type ExecPath struct {
|
||||
P *check.Absolute
|
||||
// Artifacts to mount on the pathname, must contain at least one [Artifact].
|
||||
// If there are multiple entries or W is true, P is set up as an overlay
|
||||
// mount, and entries of A must not implement [File].
|
||||
// mount, and entries of A must not implement [FileArtifact].
|
||||
A []Artifact
|
||||
// Whether to make the mount point writable via the temp directory.
|
||||
W bool
|
||||
@@ -102,9 +103,11 @@ type execArtifact struct {
|
||||
args []string
|
||||
|
||||
// Duration the initial process is allowed to run. The zero value is
|
||||
// equivalent to execTimeoutDefault. This value is never encoded in Params
|
||||
// because it cannot affect outcome.
|
||||
// equivalent to [ExecTimeoutDefault].
|
||||
timeout time.Duration
|
||||
|
||||
// Caller-supplied exclusivity value, returned as is by IsExclusive.
|
||||
exclusive bool
|
||||
}
|
||||
|
||||
var _ fmt.Stringer = new(execArtifact)
|
||||
@@ -123,13 +126,7 @@ var _ KnownChecksum = new(execNetArtifact)
|
||||
func (a *execNetArtifact) Checksum() Checksum { return a.checksum }
|
||||
|
||||
// Kind returns the hardcoded [Kind] constant.
|
||||
func (a *execNetArtifact) Kind() Kind { return KindExecNet }
|
||||
|
||||
// Params is [Checksum] concatenated with [KindExec] params.
|
||||
func (a *execNetArtifact) Params(ctx *IContext) {
|
||||
ctx.GetHash().Write(a.checksum[:])
|
||||
a.execArtifact.Params(ctx)
|
||||
}
|
||||
func (*execNetArtifact) Kind() Kind { return KindExecNet }
|
||||
|
||||
// Cure cures the [Artifact] in the container described by the caller. The
|
||||
// container retains host networking.
|
||||
@@ -157,13 +154,14 @@ func (a *execNetArtifact) Cure(f *FContext) error {
|
||||
// negative timeout value is equivalent tp [ExecTimeoutDefault], a timeout value
|
||||
// greater than [ExecTimeoutMax] is equivalent to [ExecTimeoutMax].
|
||||
//
|
||||
// The user-facing name is not accessible from the container and does not
|
||||
// affect curing outcome. Because of this, it is omitted from parameter data
|
||||
// for computing identifier.
|
||||
// The user-facing name and exclusivity value are not accessible from the
|
||||
// container and does not affect curing outcome. Because of this, it is omitted
|
||||
// from parameter data for computing identifier.
|
||||
func NewExec(
|
||||
name string,
|
||||
checksum *Checksum,
|
||||
timeout time.Duration,
|
||||
exclusive bool,
|
||||
|
||||
dir *check.Absolute,
|
||||
env []string,
|
||||
@@ -181,7 +179,7 @@ func NewExec(
|
||||
if timeout > ExecTimeoutMax {
|
||||
timeout = ExecTimeoutMax
|
||||
}
|
||||
a := execArtifact{name, paths, dir, env, pathname, args, timeout}
|
||||
a := execArtifact{name, paths, dir, env, pathname, args, timeout, exclusive}
|
||||
if checksum == nil {
|
||||
return &a
|
||||
}
|
||||
@@ -189,42 +187,135 @@ func NewExec(
|
||||
}
|
||||
|
||||
// Kind returns the hardcoded [Kind] constant.
|
||||
func (a *execArtifact) Kind() Kind { return KindExec }
|
||||
func (*execArtifact) Kind() Kind { return KindExec }
|
||||
|
||||
// Params writes paths, executable pathname and args.
|
||||
func (a *execArtifact) Params(ctx *IContext) {
|
||||
h := ctx.GetHash()
|
||||
ctx.WriteString(a.name)
|
||||
|
||||
_0, _1 := []byte{0}, []byte{1}
|
||||
ctx.WriteUint32(uint32(len(a.paths)))
|
||||
for _, p := range a.paths {
|
||||
if p.W {
|
||||
h.Write(_1)
|
||||
} else {
|
||||
h.Write(_0)
|
||||
}
|
||||
if p.P != nil {
|
||||
h.Write([]byte(p.P.String()))
|
||||
ctx.WriteString(p.P.String())
|
||||
} else {
|
||||
h.Write([]byte("invalid P\x00"))
|
||||
ctx.WriteString("invalid P\x00")
|
||||
}
|
||||
h.Write(_0)
|
||||
|
||||
ctx.WriteUint32(uint32(len(p.A)))
|
||||
for _, d := range p.A {
|
||||
ctx.WriteIdent(d)
|
||||
}
|
||||
h.Write(_0)
|
||||
|
||||
if p.W {
|
||||
ctx.WriteUint32(1)
|
||||
} else {
|
||||
ctx.WriteUint32(0)
|
||||
}
|
||||
h.Write(_0)
|
||||
h.Write([]byte(a.dir.String()))
|
||||
h.Write(_0)
|
||||
}
|
||||
|
||||
ctx.WriteString(a.dir.String())
|
||||
|
||||
ctx.WriteUint32(uint32(len(a.env)))
|
||||
for _, e := range a.env {
|
||||
h.Write([]byte(e))
|
||||
ctx.WriteString(e)
|
||||
}
|
||||
h.Write(_0)
|
||||
h.Write([]byte(a.path.String()))
|
||||
h.Write(_0)
|
||||
|
||||
ctx.WriteString(a.path.String())
|
||||
|
||||
ctx.WriteUint32(uint32(len(a.args)))
|
||||
for _, arg := range a.args {
|
||||
h.Write([]byte(arg))
|
||||
ctx.WriteString(arg)
|
||||
}
|
||||
|
||||
ctx.WriteUint32(uint32(a.timeout & 0xffffffff))
|
||||
ctx.WriteUint32(uint32(a.timeout >> 32))
|
||||
|
||||
if a.exclusive {
|
||||
ctx.WriteUint32(1)
|
||||
} else {
|
||||
ctx.WriteUint32(0)
|
||||
}
|
||||
}
|
||||
|
||||
// readExecArtifact interprets IR values and returns the address of execArtifact
|
||||
// or execNetArtifact.
|
||||
func readExecArtifact(r *IRReader, net bool) Artifact {
|
||||
r.DiscardAll()
|
||||
|
||||
name := r.ReadString()
|
||||
|
||||
sz := r.ReadUint32()
|
||||
if sz > irMaxDeps {
|
||||
panic(ErrIRDepend)
|
||||
}
|
||||
paths := make([]ExecPath, sz)
|
||||
for i := range paths {
|
||||
paths[i].P = check.MustAbs(r.ReadString())
|
||||
|
||||
sz = r.ReadUint32()
|
||||
if sz > irMaxDeps {
|
||||
panic(ErrIRDepend)
|
||||
}
|
||||
paths[i].A = make([]Artifact, sz)
|
||||
for j := range paths[i].A {
|
||||
paths[i].A[j] = r.ReadIdent()
|
||||
}
|
||||
|
||||
paths[i].W = r.ReadUint32() != 0
|
||||
}
|
||||
|
||||
dir := check.MustAbs(r.ReadString())
|
||||
|
||||
sz = r.ReadUint32()
|
||||
if sz > irMaxValues {
|
||||
panic(ErrIRValues)
|
||||
}
|
||||
env := make([]string, sz)
|
||||
for i := range env {
|
||||
env[i] = r.ReadString()
|
||||
}
|
||||
|
||||
pathname := check.MustAbs(r.ReadString())
|
||||
|
||||
sz = r.ReadUint32()
|
||||
if sz > irMaxValues {
|
||||
panic(ErrIRValues)
|
||||
}
|
||||
args := make([]string, sz)
|
||||
for i := range args {
|
||||
args[i] = r.ReadString()
|
||||
}
|
||||
|
||||
timeout := time.Duration(r.ReadUint32())
|
||||
timeout |= time.Duration(r.ReadUint32()) << 32
|
||||
|
||||
exclusive := r.ReadUint32() != 0
|
||||
|
||||
checksum, ok := r.Finalise()
|
||||
|
||||
var checksumP *Checksum
|
||||
if net {
|
||||
if !ok {
|
||||
panic(ErrExpectedChecksum)
|
||||
}
|
||||
checksumVal := checksum.Value()
|
||||
checksumP = &checksumVal
|
||||
} else {
|
||||
if ok {
|
||||
panic(ErrUnexpectedChecksum)
|
||||
}
|
||||
}
|
||||
|
||||
return NewExec(
|
||||
name, checksumP, timeout, exclusive, dir, env, pathname, args, paths...,
|
||||
)
|
||||
}
|
||||
|
||||
func init() {
|
||||
register(KindExec,
|
||||
func(r *IRReader) Artifact { return readExecArtifact(r, false) })
|
||||
register(KindExecNet,
|
||||
func(r *IRReader) Artifact { return readExecArtifact(r, true) })
|
||||
}
|
||||
|
||||
// Dependencies returns a slice of all artifacts collected from caller-supplied
|
||||
@@ -237,6 +328,9 @@ func (a *execArtifact) Dependencies() []Artifact {
|
||||
return slices.Concat(artifacts...)
|
||||
}
|
||||
|
||||
// IsExclusive returns the caller-supplied exclusivity value.
|
||||
func (a *execArtifact) IsExclusive() bool { return a.exclusive }
|
||||
|
||||
// String returns the caller-supplied reporting name.
|
||||
func (a *execArtifact) String() string { return a.name }
|
||||
|
||||
@@ -259,6 +353,10 @@ func scanVerbose(
|
||||
) {
|
||||
defer close(done)
|
||||
s := bufio.NewScanner(r)
|
||||
s.Buffer(
|
||||
make([]byte, bufio.MaxScanTokenSize),
|
||||
bufio.MaxScanTokenSize<<12,
|
||||
)
|
||||
for s.Scan() {
|
||||
msg.Verbose(prefix, s.Text())
|
||||
}
|
||||
@@ -291,6 +389,7 @@ func (a *execArtifact) cure(f *FContext, hostNet bool) (err error) {
|
||||
z := container.New(ctx, f.GetMessage())
|
||||
z.WaitDelay = execWaitDelay
|
||||
z.SeccompPresets |= std.PresetStrict & ^std.PresetDenyNS
|
||||
z.SeccompFlags |= seccomp.AllowMultiarch
|
||||
z.ParentPerm = 0700
|
||||
z.HostNet = hostNet
|
||||
z.Hostname = "cure"
|
||||
|
||||
@@ -39,7 +39,7 @@ func TestExec(t *testing.T) {
|
||||
|
||||
cureMany(t, c, []cureStep{
|
||||
{"container", pkg.NewExec(
|
||||
"exec-offline", nil, 0,
|
||||
"exec-offline", nil, 0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -62,7 +62,7 @@ func TestExec(t *testing.T) {
|
||||
), ignorePathname, wantChecksumOffline, nil},
|
||||
|
||||
{"error passthrough", pkg.NewExec(
|
||||
"", nil, 0,
|
||||
"", nil, 0, true,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -78,14 +78,14 @@ func TestExec(t *testing.T) {
|
||||
), nil, pkg.Checksum{}, &pkg.DependencyCureError{
|
||||
{
|
||||
Ident: unique.Make(pkg.ID(pkg.MustDecode(
|
||||
"CWEoJqnSBpWf8uryC2qnIe3O1a_FZWUWZGbiVPsQFGW7pvDHiSwoK3QCU9-uxN87",
|
||||
"Sowo6oZRmG6xVtUaxB6bDWZhVsqAJsIJWUp0OPKlE103cY0lodx7dem8J-qQF0Z1",
|
||||
))),
|
||||
Err: stub.UniqueError(0xcafe),
|
||||
},
|
||||
}},
|
||||
|
||||
{"invalid paths", pkg.NewExec(
|
||||
"", nil, 0,
|
||||
"", nil, 0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -98,7 +98,7 @@ func TestExec(t *testing.T) {
|
||||
// check init failure passthrough
|
||||
var exitError *exec.ExitError
|
||||
if _, _, err := c.Cure(pkg.NewExec(
|
||||
"", nil, 0,
|
||||
"", nil, 0, false,
|
||||
pkg.AbsWork,
|
||||
nil,
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -109,7 +109,7 @@ func TestExec(t *testing.T) {
|
||||
}
|
||||
|
||||
testtoolDestroy(t, base, c)
|
||||
}, pkg.MustDecode("UiV6kMz7KrTsc_yphiyQzFLqjRanHxUOwrBMtkKuWo4mOO6WgPFAcoUEeSp7eVIW")},
|
||||
}, pkg.MustDecode("Q5DluWQCAeohLoiGRImurwFp3vdz9IfQCoj7Fuhh73s4KQPRHpEQEnHTdNHmB8Fx")},
|
||||
|
||||
{"net", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
c.SetStrict(true)
|
||||
@@ -120,7 +120,7 @@ func TestExec(t *testing.T) {
|
||||
)
|
||||
cureMany(t, c, []cureStep{
|
||||
{"container", pkg.NewExec(
|
||||
"exec-net", &wantChecksum, 0,
|
||||
"exec-net", &wantChecksum, 0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -144,7 +144,7 @@ func TestExec(t *testing.T) {
|
||||
})
|
||||
|
||||
testtoolDestroy(t, base, c)
|
||||
}, pkg.MustDecode("ek4K-0d4iRSArkY2TCs3WK34DbiYeOmhE_4vsJTSu_6roY4ZF3YG6eKRooal-i1o")},
|
||||
}, pkg.MustDecode("bPYvvqxpfV7xcC1EptqyKNK1klLJgYHMDkzBcoOyK6j_Aj5hb0mXNPwTwPSK5F6Z")},
|
||||
|
||||
{"overlay root", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
c.SetStrict(true)
|
||||
@@ -152,7 +152,7 @@ func TestExec(t *testing.T) {
|
||||
|
||||
cureMany(t, c, []cureStep{
|
||||
{"container", pkg.NewExec(
|
||||
"exec-overlay-root", nil, 0,
|
||||
"exec-overlay-root", nil, 0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -170,7 +170,7 @@ func TestExec(t *testing.T) {
|
||||
})
|
||||
|
||||
testtoolDestroy(t, base, c)
|
||||
}, pkg.MustDecode("VIqqpf0ip9jcyw63i6E8lCMGUcLivQBe4Bevt3WusNac-1MSy5bzB647qGUBzl-W")},
|
||||
}, pkg.MustDecode("PO2DSSCa4yoSgEYRcCSZfQfwow1yRigL3Ry-hI0RDI4aGuFBha-EfXeSJnG_5_Rl")},
|
||||
|
||||
{"overlay work", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
c.SetStrict(true)
|
||||
@@ -178,7 +178,7 @@ func TestExec(t *testing.T) {
|
||||
|
||||
cureMany(t, c, []cureStep{
|
||||
{"container", pkg.NewExec(
|
||||
"exec-overlay-work", nil, 0,
|
||||
"exec-overlay-work", nil, 0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
|
||||
check.MustAbs("/work/bin/testtool"),
|
||||
@@ -201,7 +201,7 @@ func TestExec(t *testing.T) {
|
||||
})
|
||||
|
||||
testtoolDestroy(t, base, c)
|
||||
}, pkg.MustDecode("q8x2zQg4YZbKpPqKlEBj_uxXD9vOBaZ852qOuIsl9QdO73I_UMNpuUoPLtunxUYl")},
|
||||
}, pkg.MustDecode("iaRt6l_Wm2n-h5UsDewZxQkCmjZjyL8r7wv32QT2kyV55-Lx09Dq4gfg9BiwPnKs")},
|
||||
|
||||
{"multiple layers", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
c.SetStrict(true)
|
||||
@@ -209,7 +209,7 @@ func TestExec(t *testing.T) {
|
||||
|
||||
cureMany(t, c, []cureStep{
|
||||
{"container", pkg.NewExec(
|
||||
"exec-multiple-layers", nil, 0,
|
||||
"exec-multiple-layers", nil, 0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -239,7 +239,9 @@ func TestExec(t *testing.T) {
|
||||
cure: func(t *pkg.TContext) error {
|
||||
return os.MkdirAll(t.GetWorkDir().String(), 0700)
|
||||
},
|
||||
}}, 1<<5 /* concurrent cache hits */), cure: func(f *pkg.FContext) error {
|
||||
}}, 1<<5 /* concurrent cache hits */),
|
||||
|
||||
cure: func(f *pkg.FContext) error {
|
||||
work := f.GetWorkDir()
|
||||
if err := os.MkdirAll(work.String(), 0700); err != nil {
|
||||
return err
|
||||
@@ -252,7 +254,7 @@ func TestExec(t *testing.T) {
|
||||
})
|
||||
|
||||
testtoolDestroy(t, base, c)
|
||||
}, pkg.MustDecode("SITnQ6PTV12PAQQjIuLUxkvsXQiC9Gq_HJQlcb4BPL5YnRHnx8lsW7PRM9YMLBsx")},
|
||||
}, pkg.MustDecode("O2YzyR7IUGU5J2CADy0hUZ3A5NkP_Vwzs4UadEdn2oMZZVWRtH0xZGJ3HXiimTnZ")},
|
||||
|
||||
{"overlay layer promotion", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
c.SetStrict(true)
|
||||
@@ -260,7 +262,7 @@ func TestExec(t *testing.T) {
|
||||
|
||||
cureMany(t, c, []cureStep{
|
||||
{"container", pkg.NewExec(
|
||||
"exec-layer-promotion", nil, 0,
|
||||
"exec-layer-promotion", nil, 0, true,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1", "HAKUREI_ROOT=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
@@ -284,7 +286,7 @@ func TestExec(t *testing.T) {
|
||||
})
|
||||
|
||||
testtoolDestroy(t, base, c)
|
||||
}, pkg.MustDecode("fuC20BhMKr86TYzNPP2A-9P7mGLvdcOiG10exlhRvZm8ySI7csf0LhW3im_26l1N")},
|
||||
}, pkg.MustDecode("3EaW6WibLi9gl03_UieiFPaFcPy5p4x3JPxrnLJxGaTI-bh3HU9DK9IMx7c3rrNm")},
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"context"
|
||||
"bytes"
|
||||
"crypto/sha512"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// A fileArtifact is an [Artifact] that cures into data known ahead of time.
|
||||
@@ -24,10 +25,16 @@ var _ KnownChecksum = new(fileArtifactNamed)
|
||||
// String returns the caller-supplied reporting name.
|
||||
func (a *fileArtifactNamed) String() string { return a.name }
|
||||
|
||||
// NewFile returns a [File] that cures into a caller-supplied byte slice.
|
||||
// Params writes the caller-supplied reporting name and the file body.
|
||||
func (a *fileArtifactNamed) Params(ctx *IContext) {
|
||||
ctx.WriteString(a.name)
|
||||
ctx.Write(a.fileArtifact)
|
||||
}
|
||||
|
||||
// NewFile returns a [FileArtifact] that cures into a caller-supplied byte slice.
|
||||
//
|
||||
// Caller must not modify data after NewFile returns.
|
||||
func NewFile(name string, data []byte) File {
|
||||
func NewFile(name string, data []byte) FileArtifact {
|
||||
f := fileArtifact(data)
|
||||
if name != "" {
|
||||
return &fileArtifactNamed{f, name}
|
||||
@@ -36,13 +43,30 @@ func NewFile(name string, data []byte) File {
|
||||
}
|
||||
|
||||
// Kind returns the hardcoded [Kind] constant.
|
||||
func (a *fileArtifact) Kind() Kind { return KindFile }
|
||||
func (*fileArtifact) Kind() Kind { return KindFile }
|
||||
|
||||
// Params writes the result of Cure.
|
||||
func (a *fileArtifact) Params(ctx *IContext) { ctx.GetHash().Write(*a) }
|
||||
// Params writes an empty string and the file body.
|
||||
func (a *fileArtifact) Params(ctx *IContext) {
|
||||
ctx.WriteString("")
|
||||
ctx.Write(*a)
|
||||
}
|
||||
|
||||
func init() {
|
||||
register(KindFile, func(r *IRReader) Artifact {
|
||||
name := r.ReadString()
|
||||
data := r.ReadStringBytes()
|
||||
if _, ok := r.Finalise(); !ok {
|
||||
panic(ErrExpectedChecksum)
|
||||
}
|
||||
return NewFile(name, data)
|
||||
})
|
||||
}
|
||||
|
||||
// Dependencies returns a nil slice.
|
||||
func (a *fileArtifact) Dependencies() []Artifact { return nil }
|
||||
func (*fileArtifact) Dependencies() []Artifact { return nil }
|
||||
|
||||
// IsExclusive returns false: Cure returns a prepopulated buffer.
|
||||
func (*fileArtifact) IsExclusive() bool { return false }
|
||||
|
||||
// Checksum computes and returns the checksum of caller-supplied data.
|
||||
func (a *fileArtifact) Checksum() Checksum {
|
||||
@@ -52,4 +76,6 @@ func (a *fileArtifact) Checksum() Checksum {
|
||||
}
|
||||
|
||||
// Cure returns the caller-supplied data.
|
||||
func (a *fileArtifact) Cure(context.Context) ([]byte, error) { return *a, nil }
|
||||
func (a *fileArtifact) Cure(*RContext) (io.ReadCloser, error) {
|
||||
return io.NopCloser(bytes.NewReader(*a)), nil
|
||||
}
|
||||
|
||||
@@ -17,13 +17,13 @@ func TestFile(t *testing.T) {
|
||||
cureMany(t, c, []cureStep{
|
||||
{"short", pkg.NewFile("null", []byte{0}), base.Append(
|
||||
"identifier",
|
||||
"lIx_W4M7tVOcQ8jh08EJOfXf4brRmkEEjvUa7c17vVUzlmtUxlhhrgqmc9aZhjbn",
|
||||
"3376ALA7hIUm2LbzH2fDvRezgzod1eTK_G6XjyOgbM2u-6swvkFaF0BOwSl_juBi",
|
||||
), pkg.MustDecode(
|
||||
"vsAhtPNo4waRNOASwrQwcIPTqb3SBuJOXw2G4T1mNmVZM-wrQTRllmgXqcIIoRcX",
|
||||
), nil},
|
||||
})
|
||||
}, pkg.MustDecode(
|
||||
"hnrfmJtivNKcgtETsKnU9gP_OwPgpNY3DSUJnmxnmeOODSO-YBvEBiTgieY4AAd7",
|
||||
"iR6H5OIsyOW4EwEgtm9rGzGF6DVtyHLySEtwnFE8bnus9VJcoCbR4JIek7Lw-vwT",
|
||||
)},
|
||||
})
|
||||
}
|
||||
|
||||
762
internal/pkg/ir.go
Normal file
762
internal/pkg/ir.go
Normal file
@@ -0,0 +1,762 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha512"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"slices"
|
||||
"strconv"
|
||||
"syscall"
|
||||
"unique"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// wordSize is the boundary which binary segments are always aligned to.
|
||||
const wordSize = 8
|
||||
|
||||
// alignSize returns the padded size for aligning sz.
|
||||
func alignSize(sz int) int {
|
||||
return sz + (wordSize-(sz)%wordSize)%wordSize
|
||||
}
|
||||
|
||||
// panicToError recovers from a panic and replaces a nil error with the panicked
|
||||
// error value. If the value does not implement error, it is re-panicked.
|
||||
func panicToError(errP *error) {
|
||||
r := recover()
|
||||
if r == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err, ok := r.(error); !ok {
|
||||
panic(r)
|
||||
} else if *errP == nil {
|
||||
*errP = err
|
||||
}
|
||||
}
|
||||
|
||||
// IContext is passed to [Artifact.Params] and provides methods for writing
|
||||
// values to the IR writer. It does not expose the underlying [io.Writer].
|
||||
//
|
||||
// IContext is valid until [Artifact.Params] returns.
|
||||
type IContext struct {
|
||||
// Address of underlying [Cache], should be zeroed or made unusable after
|
||||
// [Artifact.Params] returns and must not be exposed directly.
|
||||
cache *Cache
|
||||
// Written to by various methods, should be zeroed after [Artifact.Params]
|
||||
// returns and must not be exposed directly.
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
// Unwrap returns the underlying [context.Context].
|
||||
func (i *IContext) Unwrap() context.Context { return i.cache.ctx }
|
||||
|
||||
// irZero is a zero IR word.
|
||||
var irZero [wordSize]byte
|
||||
|
||||
// IRValueKind denotes the kind of encoded value.
|
||||
type IRValueKind uint32
|
||||
|
||||
const (
|
||||
// IRKindEnd denotes the end of the current parameters stream. The ancillary
|
||||
// value is interpreted as [IREndFlag].
|
||||
IRKindEnd IRValueKind = iota
|
||||
// IRKindIdent denotes the identifier of a dependency [Artifact]. The
|
||||
// ancillary value is reserved for future use.
|
||||
IRKindIdent
|
||||
// IRKindUint32 denotes an inlined uint32 value.
|
||||
IRKindUint32
|
||||
// IRKindString denotes a string with its true length encoded in header
|
||||
// ancillary data. Its wire length is always aligned to 8 byte boundary.
|
||||
IRKindString
|
||||
|
||||
irHeaderShift = 32
|
||||
irHeaderMask = 0xffffffff
|
||||
)
|
||||
|
||||
// String returns a user-facing name of k.
|
||||
func (k IRValueKind) String() string {
|
||||
switch k {
|
||||
case IRKindEnd:
|
||||
return "terminator"
|
||||
case IRKindIdent:
|
||||
return "ident"
|
||||
case IRKindUint32:
|
||||
return "uint32"
|
||||
case IRKindString:
|
||||
return "string"
|
||||
default:
|
||||
return "invalid kind " + strconv.Itoa(int(k))
|
||||
}
|
||||
}
|
||||
|
||||
// irValueHeader encodes [IRValueKind] and a 32-bit ancillary value.
|
||||
type irValueHeader uint64
|
||||
|
||||
// encodeHeader returns irValueHeader encoding [IRValueKind] and ancillary data.
|
||||
func (k IRValueKind) encodeHeader(v uint32) irValueHeader {
|
||||
return irValueHeader(v)<<irHeaderShift | irValueHeader(k)
|
||||
}
|
||||
|
||||
// put stores h in b[0:8].
|
||||
func (h irValueHeader) put(b []byte) {
|
||||
binary.LittleEndian.PutUint64(b[:], uint64(h))
|
||||
}
|
||||
|
||||
// append appends the bytes of h to b and returns the appended slice.
|
||||
func (h irValueHeader) append(b []byte) []byte {
|
||||
return binary.LittleEndian.AppendUint64(b, uint64(h))
|
||||
}
|
||||
|
||||
// IREndFlag is ancillary data encoded in the header of an [IRKindEnd] value and
|
||||
// specifies the presence of optional fields in the remaining [IRKindEnd] data.
|
||||
// Order of present fields is the order of their corresponding constants defined
|
||||
// below.
|
||||
type IREndFlag uint32
|
||||
|
||||
const (
|
||||
// IREndKnownChecksum denotes a [KnownChecksum] artifact. For an [IRKindEnd]
|
||||
// value with this flag set, the remaining data contains the [Checksum].
|
||||
IREndKnownChecksum IREndFlag = 1 << iota
|
||||
)
|
||||
|
||||
// mustWrite writes to IContext.w and panics on error. The panic is recovered
|
||||
// from by the caller and used as the return value.
|
||||
func (i *IContext) mustWrite(p []byte) {
|
||||
if _, err := i.w.Write(p); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// WriteIdent writes the identifier of [Artifact] to the IR. The behaviour of
|
||||
// WriteIdent is not defined for an [Artifact] not part of the slice returned by
|
||||
// [Artifact.Dependencies].
|
||||
func (i *IContext) WriteIdent(a Artifact) {
|
||||
buf := i.cache.getIdentBuf()
|
||||
defer i.cache.putIdentBuf(buf)
|
||||
|
||||
IRKindIdent.encodeHeader(0).put(buf[:])
|
||||
*(*ID)(buf[wordSize:]) = i.cache.Ident(a).Value()
|
||||
i.mustWrite(buf[:])
|
||||
}
|
||||
|
||||
// WriteUint32 writes a uint32 value to the IR.
|
||||
func (i *IContext) WriteUint32(v uint32) {
|
||||
i.mustWrite(IRKindUint32.encodeHeader(v).append(nil))
|
||||
}
|
||||
|
||||
// irMaxStringLength is the maximum acceptable wire size of [IRKindString].
|
||||
const irMaxStringLength = 1 << 20
|
||||
|
||||
// IRStringError is a string value too big to encode in IR.
|
||||
type IRStringError string
|
||||
|
||||
func (IRStringError) Error() string {
|
||||
return "params value too big to encode in IR"
|
||||
}
|
||||
|
||||
// Write writes p as a string value to the IR.
|
||||
func (i *IContext) Write(p []byte) {
|
||||
sz := alignSize(len(p))
|
||||
if len(p) > irMaxStringLength || sz > irMaxStringLength {
|
||||
panic(IRStringError(p))
|
||||
}
|
||||
|
||||
i.mustWrite(IRKindString.encodeHeader(uint32(len(p))).append(nil))
|
||||
i.mustWrite(p)
|
||||
|
||||
psz := sz - len(p)
|
||||
if psz > 0 {
|
||||
i.mustWrite(irZero[:psz])
|
||||
}
|
||||
}
|
||||
|
||||
// WriteString writes s as a string value to the IR.
|
||||
func (i *IContext) WriteString(s string) {
|
||||
p := unsafe.Slice(unsafe.StringData(s), len(s))
|
||||
i.Write(p)
|
||||
}
|
||||
|
||||
// Encode writes a deterministic, efficient representation of a to w and returns
|
||||
// the first non-nil error encountered while writing to w.
|
||||
func (c *Cache) Encode(w io.Writer, a Artifact) (err error) {
|
||||
deps := a.Dependencies()
|
||||
idents := make([]*extIdent, len(deps))
|
||||
for i, d := range deps {
|
||||
dbuf, did := c.unsafeIdent(d, true)
|
||||
if dbuf == nil {
|
||||
dbuf = c.getIdentBuf()
|
||||
binary.LittleEndian.PutUint64(dbuf[:], uint64(d.Kind()))
|
||||
*(*ID)(dbuf[wordSize:]) = did.Value()
|
||||
} else {
|
||||
c.storeIdent(d, dbuf)
|
||||
}
|
||||
defer c.putIdentBuf(dbuf)
|
||||
idents[i] = dbuf
|
||||
}
|
||||
slices.SortFunc(idents, func(a, b *extIdent) int {
|
||||
return bytes.Compare(a[:], b[:])
|
||||
})
|
||||
idents = slices.CompactFunc(idents, func(a, b *extIdent) bool {
|
||||
return *a == *b
|
||||
})
|
||||
|
||||
// kind uint64 | deps_sz uint64
|
||||
var buf [wordSize * 2]byte
|
||||
binary.LittleEndian.PutUint64(buf[:], uint64(a.Kind()))
|
||||
binary.LittleEndian.PutUint64(buf[wordSize:], uint64(len(idents)))
|
||||
if _, err = w.Write(buf[:]); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, dn := range idents {
|
||||
// kind uint64 | ident ID
|
||||
if _, err = w.Write(dn[:]); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func() {
|
||||
i := IContext{c, w}
|
||||
|
||||
defer panicToError(&err)
|
||||
defer func() { i.cache, i.w = nil, nil }()
|
||||
|
||||
a.Params(&i)
|
||||
}()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var f IREndFlag
|
||||
kcBuf := c.getIdentBuf()
|
||||
sz := wordSize
|
||||
if kc, ok := a.(KnownChecksum); ok {
|
||||
f |= IREndKnownChecksum
|
||||
*(*Checksum)(kcBuf[wordSize:]) = kc.Checksum()
|
||||
sz += len(Checksum{})
|
||||
}
|
||||
IRKindEnd.encodeHeader(uint32(f)).put(kcBuf[:])
|
||||
|
||||
_, err = w.Write(kcBuf[:sz])
|
||||
c.putIdentBuf(kcBuf)
|
||||
return
|
||||
}
|
||||
|
||||
// encodeAll implements EncodeAll by recursively encoding dependencies and
|
||||
// performs deduplication by value via the encoded map.
|
||||
func (c *Cache) encodeAll(
|
||||
w io.Writer,
|
||||
a Artifact,
|
||||
encoded map[Artifact]struct{},
|
||||
) (err error) {
|
||||
if _, ok := encoded[a]; ok {
|
||||
return
|
||||
}
|
||||
|
||||
for _, d := range a.Dependencies() {
|
||||
if err = c.encodeAll(w, d, encoded); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
encoded[a] = struct{}{}
|
||||
return c.Encode(w, a)
|
||||
}
|
||||
|
||||
// EncodeAll writes a self-describing IR stream of a to w and returns the first
|
||||
// non-nil error encountered while writing to w.
|
||||
//
|
||||
// EncodeAll tries to avoid encoding the same [Artifact] more than once, however
|
||||
// it will fail to do so if they do not compare equal by value, as that will
|
||||
// require buffering and greatly reduce performance. It is therefore up to the
|
||||
// caller to avoid causing dependencies to be represented in a way such that
|
||||
// two equivalent artifacts do not compare equal. While an IR stream with
|
||||
// repeated artifacts is valid, it is somewhat inefficient, and the reference
|
||||
// [IRDecoder] implementation produces a warning for it.
|
||||
//
|
||||
// Note that while EncodeAll makes use of the ident free list, it does not use
|
||||
// the ident cache, nor does it contribute identifiers it computes back to the
|
||||
// ident cache. Because of this, multiple invocations of EncodeAll will have
|
||||
// similar cost and does not amortise when combined with a call to Cure.
|
||||
func (c *Cache) EncodeAll(w io.Writer, a Artifact) error {
|
||||
return c.encodeAll(w, a, make(map[Artifact]struct{}))
|
||||
}
|
||||
|
||||
// ErrRemainingIR is returned for a [IRReadFunc] that failed to call
|
||||
// [IRReader.Finalise] before returning.
|
||||
var ErrRemainingIR = errors.New("implementation did not consume final value")
|
||||
|
||||
// DanglingIdentError is an identifier in a [IRKindIdent] value that was never
|
||||
// described in the IR stream before it was encountered.
|
||||
type DanglingIdentError unique.Handle[ID]
|
||||
|
||||
func (e DanglingIdentError) Error() string {
|
||||
return "artifact " + Encode(unique.Handle[ID](e).Value()) +
|
||||
" was never described"
|
||||
}
|
||||
|
||||
type (
|
||||
// IRDecoder decodes [Artifact] from an IR stream. The stream is read to
|
||||
// EOF and the final [Artifact] is returned. Previous artifacts may be
|
||||
// looked up by their identifier.
|
||||
//
|
||||
// An [Artifact] may appear more than once in the same IR stream. A
|
||||
// repeating [Artifact] generates a warning via [Cache] and will appear if
|
||||
// verbose logging is enabled. Artifacts may only depend on artifacts
|
||||
// previously described in the IR stream.
|
||||
//
|
||||
// Methods of IRDecoder are not safe for concurrent use.
|
||||
IRDecoder struct {
|
||||
// Address of underlying [Cache], must not be exposed directly.
|
||||
c *Cache
|
||||
|
||||
// Underlying IR reader. Methods of [IRReader] must not use this as it
|
||||
// bypasses ident measurement.
|
||||
r io.Reader
|
||||
// Artifacts already seen in the IR stream.
|
||||
ident map[unique.Handle[ID]]Artifact
|
||||
|
||||
// Whether Decode returned, and the entire IR stream was decoded.
|
||||
done, ok bool
|
||||
}
|
||||
|
||||
// IRReader provides methods to decode the IR wire format and read values
|
||||
// from the reader embedded in the underlying [IRDecoder]. It is
|
||||
// deliberately impossible to obtain the [IRValueKind] of the next value,
|
||||
// and callers must never recover from panics in any read method.
|
||||
//
|
||||
// It is the responsibility of the caller to call Finalise after all IR
|
||||
// values have been read. Failure to call Finalise causes the resulting
|
||||
// [Artifact] to be rejected with [ErrRemainingIR].
|
||||
//
|
||||
// For an [Artifact] expected to have dependencies, the caller must consume
|
||||
// all dependencies by calling Next until all dependencies are depleted, or
|
||||
// call DiscardAll to explicitly discard them and rely on values encoded as
|
||||
// [IRKindIdent] instead. Failure to consume all unstructured dependencies
|
||||
// causes the resulting [Artifact] to be rejected with [MissedDependencyError].
|
||||
//
|
||||
// Requesting the value of an unstructured dependency not yet described in
|
||||
// the IR stream via Next, or reading an [IRKindIdent] value not part of
|
||||
// unstructured dependencies via ReadIdent may cause the resulting
|
||||
// [Artifact] to be rejected with [DanglingIdentError], however either
|
||||
// method may return a non-nil [Artifact] implementation of unspecified
|
||||
// value.
|
||||
IRReader struct {
|
||||
// Address of underlying [IRDecoder], should be zeroed or made unusable
|
||||
// after finalisation and must not be exposed directly.
|
||||
d *IRDecoder
|
||||
// Common buffer for word-sized reads.
|
||||
buf [wordSize]byte
|
||||
|
||||
// Dependencies sent before params, sorted by identifier. Resliced on
|
||||
// each call to Next and checked to be depleted during Finalise.
|
||||
deps []*extIdent
|
||||
|
||||
// Number of values already read, -1 denotes a finalised IRReader.
|
||||
count int
|
||||
// Header of value currently being read.
|
||||
h irValueHeader
|
||||
|
||||
// Measured IR reader. All reads for the current [Artifact] must go
|
||||
// through this to produce a correct ident.
|
||||
r io.Reader
|
||||
// Buffers measure writes. Flushed and returned to d during Finalise.
|
||||
ibw *bufio.Writer
|
||||
}
|
||||
|
||||
// IRReadFunc reads IR values written by [Artifact.Params] to produce an
|
||||
// instance of [Artifact] identical to the one to produce these values.
|
||||
IRReadFunc func(r *IRReader) Artifact
|
||||
)
|
||||
|
||||
// kind returns the [IRValueKind] encoded in h.
|
||||
func (h irValueHeader) kind() IRValueKind {
|
||||
return IRValueKind(h & irHeaderMask)
|
||||
}
|
||||
|
||||
// value returns ancillary data encoded in h.
|
||||
func (h irValueHeader) value() uint32 {
|
||||
return uint32(h >> irHeaderShift)
|
||||
}
|
||||
|
||||
// irArtifact refers to artifact IR interpretation functions and must not be
|
||||
// written to directly.
|
||||
var irArtifact = make(map[Kind]IRReadFunc)
|
||||
|
||||
// InvalidKindError is an unregistered [Kind] value.
|
||||
type InvalidKindError Kind
|
||||
|
||||
func (e InvalidKindError) Error() string {
|
||||
return "invalid artifact kind " + strconv.Itoa(int(e))
|
||||
}
|
||||
|
||||
// register records the [IRReadFunc] of an implementation of [Artifact] under
|
||||
// the specified [Kind]. Expecting to be used only during initialization, it
|
||||
// panics if the mapping between [Kind] and [IRReadFunc] is not a bijection.
|
||||
//
|
||||
// register is not safe for concurrent use. register must not be called after
|
||||
// the first instance of [Cache] has been opened.
|
||||
func register(k Kind, f IRReadFunc) {
|
||||
if _, ok := irArtifact[k]; ok {
|
||||
panic("attempting to register " + strconv.Itoa(int(k)) + " twice")
|
||||
}
|
||||
irArtifact[k] = f
|
||||
}
|
||||
|
||||
// Register records the [IRReadFunc] of a custom implementation of [Artifact]
|
||||
// under the specified [Kind]. Expecting to be used only during initialization,
|
||||
// it panics if the mapping between [Kind] and [IRReadFunc] is not a bijection,
|
||||
// or the specified [Kind] is below [KindCustomOffset].
|
||||
//
|
||||
// Register is not safe for concurrent use. Register must not be called after
|
||||
// the first instance of [Cache] has been opened.
|
||||
func Register(k Kind, f IRReadFunc) {
|
||||
if k < KindCustomOffset {
|
||||
panic("attempting to register within internal kind range")
|
||||
}
|
||||
register(k, f)
|
||||
}
|
||||
|
||||
// NewDecoder returns a new [IRDecoder] that reads from the [io.Reader].
|
||||
func (c *Cache) NewDecoder(r io.Reader) *IRDecoder {
|
||||
return &IRDecoder{c, r, make(map[unique.Handle[ID]]Artifact), false, false}
|
||||
}
|
||||
|
||||
const (
|
||||
// irMaxValues is the arbitrary maximum number of values allowed to be
|
||||
// written by [Artifact.Params] and subsequently read via [IRReader].
|
||||
irMaxValues = 1 << 12
|
||||
|
||||
// irMaxDeps is the arbitrary maximum number of direct dependencies allowed
|
||||
// to be returned by [Artifact.Dependencies] and subsequently decoded by
|
||||
// [IRDecoder].
|
||||
irMaxDeps = 1 << 10
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrIRValues is returned for an [Artifact] with too many parameter values.
|
||||
ErrIRValues = errors.New("artifact has too many IR parameter values")
|
||||
|
||||
// ErrIRDepend is returned for an [Artifact] with too many dependencies.
|
||||
ErrIRDepend = errors.New("artifact has too many dependencies")
|
||||
|
||||
// ErrAlreadyFinalised is returned when attempting to use an [IRReader] that
|
||||
// has already been finalised.
|
||||
ErrAlreadyFinalised = errors.New("reader has already finalised")
|
||||
)
|
||||
|
||||
// enterReader panics with an appropriate error for an out-of-bounds count and
|
||||
// must be called at some point in any exported method.
|
||||
func (ir *IRReader) enterReader(read bool) {
|
||||
if ir.count < 0 {
|
||||
panic(ErrAlreadyFinalised)
|
||||
}
|
||||
if ir.count >= irMaxValues {
|
||||
panic(ErrIRValues)
|
||||
}
|
||||
|
||||
if read {
|
||||
ir.count++
|
||||
}
|
||||
}
|
||||
|
||||
// IRKindError describes an attempt to read an IR value of unexpected kind.
|
||||
type IRKindError struct {
|
||||
Got, Want IRValueKind
|
||||
Ancillary uint32
|
||||
}
|
||||
|
||||
func (e *IRKindError) Error() string {
|
||||
return fmt.Sprintf(
|
||||
"got %s IR value (%#x) instead of %s",
|
||||
e.Got, e.Ancillary, e.Want,
|
||||
)
|
||||
}
|
||||
|
||||
// readFull reads until either p is filled or an error is encountered.
|
||||
func (ir *IRReader) readFull(p []byte) (n int, err error) {
|
||||
for n < len(p) && err == nil {
|
||||
var nn int
|
||||
nn, err = ir.r.Read(p[n:])
|
||||
n += nn
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// mustRead reads from the underlying measured reader and panics on error. If
|
||||
// an [io.EOF] is encountered and n != len(p), the error is promoted to a
|
||||
// [io.ErrUnexpectedEOF], if n == 0, [io.EOF] is kept as is, otherwise it is
|
||||
// zeroed.
|
||||
func (ir *IRReader) mustRead(p []byte) {
|
||||
n, err := ir.readFull(p)
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if errors.Is(err, io.EOF) {
|
||||
if n == len(p) {
|
||||
return
|
||||
}
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// mustReadHeader reads the next header via d and checks its kind.
|
||||
func (ir *IRReader) mustReadHeader(k IRValueKind) {
|
||||
ir.mustRead(ir.buf[:])
|
||||
ir.h = irValueHeader(binary.LittleEndian.Uint64(ir.buf[:]))
|
||||
if wk := ir.h.kind(); wk != k {
|
||||
panic(&IRKindError{wk, k, ir.h.value()})
|
||||
}
|
||||
}
|
||||
|
||||
// putAll returns all dependency buffers to the underlying [Cache].
|
||||
func (ir *IRReader) putAll() {
|
||||
for _, buf := range ir.deps {
|
||||
ir.d.c.putIdentBuf(buf)
|
||||
}
|
||||
ir.deps = nil
|
||||
}
|
||||
|
||||
// DiscardAll discards all unstructured dependencies. This is useful to
|
||||
// implementations that encode dependencies as [IRKindIdent] which are read back
|
||||
// via ReadIdent.
|
||||
func (ir *IRReader) DiscardAll() {
|
||||
if ir.deps == nil {
|
||||
panic("attempting to discard dependencies twice")
|
||||
}
|
||||
ir.putAll()
|
||||
}
|
||||
|
||||
// ErrDependencyDepleted is returned when attempting to advance to the next
|
||||
// unstructured dependency when there are none left.
|
||||
var ErrDependencyDepleted = errors.New("reading past end of dependencies")
|
||||
|
||||
// Next returns the next unstructured dependency.
|
||||
func (ir *IRReader) Next() Artifact {
|
||||
if len(ir.deps) == 0 {
|
||||
panic(ErrDependencyDepleted)
|
||||
}
|
||||
|
||||
id := unique.Make(ID(ir.deps[0][wordSize:]))
|
||||
ir.d.c.putIdentBuf(ir.deps[0])
|
||||
ir.deps = ir.deps[1:]
|
||||
|
||||
if a, ok := ir.d.ident[id]; !ok {
|
||||
ir.putAll()
|
||||
panic(DanglingIdentError(id))
|
||||
} else {
|
||||
return a
|
||||
}
|
||||
}
|
||||
|
||||
// MissedDependencyError is the number of unstructured dependencies remaining
|
||||
// in [IRReader] that was never requested or explicitly discarded before
|
||||
// finalisation.
|
||||
type MissedDependencyError int
|
||||
|
||||
func (e MissedDependencyError) Error() string {
|
||||
return "missed " + strconv.Itoa(int(e)) + " unstructured dependencies"
|
||||
}
|
||||
|
||||
var (
|
||||
// ErrUnexpectedChecksum is returned by a [IRReadFunc] that does not expect
|
||||
// a checksum but received one in [IRKindEnd] anyway.
|
||||
ErrUnexpectedChecksum = errors.New("checksum specified on unsupported artifact")
|
||||
// ErrExpectedChecksum is returned by a [IRReadFunc] that expects a checksum
|
||||
// but did not receive one in [IRKindEnd].
|
||||
ErrExpectedChecksum = errors.New("checksum required but not specified")
|
||||
)
|
||||
|
||||
// Finalise reads the final [IRKindEnd] value and marks r as finalised. Methods
|
||||
// of r are invalid upon entry into Finalise. If a [Checksum] is available via
|
||||
// [IREndKnownChecksum], its handle is returned and the caller must store its
|
||||
// value in the resulting [Artifact].
|
||||
func (ir *IRReader) Finalise() (checksum unique.Handle[Checksum], ok bool) {
|
||||
ir.enterReader(true)
|
||||
ir.count = -1
|
||||
|
||||
ir.mustReadHeader(IRKindEnd)
|
||||
f := IREndFlag(ir.h.value())
|
||||
|
||||
if f&IREndKnownChecksum != 0 {
|
||||
buf := ir.d.c.getIdentBuf()
|
||||
defer ir.d.c.putIdentBuf(buf)
|
||||
|
||||
ir.mustRead(buf[wordSize:])
|
||||
checksum = unique.Make(Checksum(buf[wordSize:]))
|
||||
ok = true
|
||||
}
|
||||
|
||||
if err := ir.ibw.Flush(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
ir.r, ir.ibw = nil, nil
|
||||
|
||||
if len(ir.deps) != 0 {
|
||||
panic(MissedDependencyError(len(ir.deps)))
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ReadIdent reads the next value as [IRKindIdent].
|
||||
func (ir *IRReader) ReadIdent() Artifact {
|
||||
ir.enterReader(true)
|
||||
ir.mustReadHeader(IRKindIdent)
|
||||
|
||||
buf := ir.d.c.getIdentBuf()
|
||||
defer ir.d.c.putIdentBuf(buf)
|
||||
|
||||
ir.mustRead(buf[wordSize:])
|
||||
id := unique.Make(ID(buf[wordSize:]))
|
||||
|
||||
if a, ok := ir.d.ident[id]; !ok {
|
||||
panic(DanglingIdentError(id))
|
||||
} else {
|
||||
return a
|
||||
}
|
||||
}
|
||||
|
||||
// ReadUint32 reads the next value as [IRKindUint32].
|
||||
func (ir *IRReader) ReadUint32() uint32 {
|
||||
ir.enterReader(true)
|
||||
ir.mustReadHeader(IRKindUint32)
|
||||
return ir.h.value()
|
||||
}
|
||||
|
||||
// ReadStringBytes reads the next value as [IRKindString] but returns it as a
|
||||
// byte slice instead.
|
||||
func (ir *IRReader) ReadStringBytes() []byte {
|
||||
ir.enterReader(true)
|
||||
ir.mustReadHeader(IRKindString)
|
||||
|
||||
sz := int(ir.h.value())
|
||||
szWire := alignSize(sz)
|
||||
if szWire > irMaxStringLength {
|
||||
panic(IRStringError("\x00"))
|
||||
}
|
||||
|
||||
p := make([]byte, szWire)
|
||||
ir.mustRead(p)
|
||||
return p[:sz]
|
||||
}
|
||||
|
||||
// ReadString reads the next value as [IRKindString].
|
||||
func (ir *IRReader) ReadString() string {
|
||||
p := ir.ReadStringBytes()
|
||||
return unsafe.String(unsafe.SliceData(p), len(p))
|
||||
}
|
||||
|
||||
// decode decodes the next [Artifact] in the IR stream and returns any buffer
|
||||
// originating from [Cache] before returning. decode returns [io.EOF] if and
|
||||
// only if the underlying [io.Reader] is already read to EOF.
|
||||
func (d *IRDecoder) decode() (a Artifact, err error) {
|
||||
defer panicToError(&err)
|
||||
var ir IRReader
|
||||
|
||||
defer func() { ir.d = nil }()
|
||||
ir.d = d
|
||||
|
||||
h := sha512.New384()
|
||||
ir.ibw = d.c.getWriter(h)
|
||||
defer d.c.putWriter(ir.ibw)
|
||||
ir.r = io.TeeReader(d.r, ir.ibw)
|
||||
|
||||
if n, _err := ir.readFull(ir.buf[:]); _err != nil {
|
||||
if errors.Is(_err, io.EOF) {
|
||||
if n != 0 {
|
||||
_err = io.ErrUnexpectedEOF
|
||||
}
|
||||
}
|
||||
|
||||
err = _err
|
||||
return
|
||||
}
|
||||
ak := Kind(binary.LittleEndian.Uint64(ir.buf[:]))
|
||||
f, ok := irArtifact[ak]
|
||||
if !ok {
|
||||
err = InvalidKindError(ak)
|
||||
return
|
||||
}
|
||||
|
||||
defer ir.putAll()
|
||||
ir.mustRead(ir.buf[:])
|
||||
sz := binary.LittleEndian.Uint64(ir.buf[:])
|
||||
if sz > irMaxDeps {
|
||||
err = ErrIRDepend
|
||||
return
|
||||
}
|
||||
ir.deps = make([]*extIdent, sz)
|
||||
for i := range ir.deps {
|
||||
ir.deps[i] = d.c.getIdentBuf()
|
||||
}
|
||||
for _, buf := range ir.deps {
|
||||
ir.mustRead(buf[:])
|
||||
}
|
||||
|
||||
a = f(&ir)
|
||||
if a == nil {
|
||||
err = syscall.ENOTRECOVERABLE
|
||||
return
|
||||
}
|
||||
|
||||
if ir.count != -1 {
|
||||
err = ErrRemainingIR
|
||||
return
|
||||
}
|
||||
|
||||
buf := d.c.getIdentBuf()
|
||||
h.Sum(buf[wordSize:wordSize])
|
||||
id := unique.Make(ID(buf[wordSize:]))
|
||||
d.c.putIdentBuf(buf)
|
||||
if _, ok = d.ident[id]; !ok {
|
||||
d.ident[id] = a
|
||||
} else {
|
||||
d.c.msg.Verbosef(
|
||||
"artifact %s appeared more than once in IR stream",
|
||||
Encode(id.Value()),
|
||||
)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Decode consumes the IR stream to EOF and returns the final [Artifact]. After
|
||||
// Decode returns, Lookup is available and Decode must not be called again.
|
||||
func (d *IRDecoder) Decode() (a Artifact, err error) {
|
||||
if d.done {
|
||||
panic("attempting to decode an IR stream twice")
|
||||
}
|
||||
defer func() { d.done = true }()
|
||||
|
||||
var cur Artifact
|
||||
next:
|
||||
a, err = d.decode()
|
||||
|
||||
if err == nil {
|
||||
cur = a
|
||||
goto next
|
||||
}
|
||||
|
||||
if errors.Is(err, io.EOF) {
|
||||
a, err = cur, nil
|
||||
d.ok = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Lookup looks up an [Artifact] described by the IR stream by its identifier.
|
||||
func (d *IRDecoder) Lookup(id unique.Handle[ID]) (a Artifact, ok bool) {
|
||||
if !d.ok {
|
||||
panic("attempting to look up artifact without full IR stream")
|
||||
}
|
||||
a, ok = d.ident[id]
|
||||
return
|
||||
}
|
||||
114
internal/pkg/ir_test.go
Normal file
114
internal/pkg/ir_test.go
Normal file
@@ -0,0 +1,114 @@
|
||||
package pkg_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"hakurei.app/container/check"
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func TestIRRoundtrip(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
a pkg.Artifact
|
||||
}{
|
||||
{"http get aligned", pkg.NewHTTPGet(
|
||||
nil, "file:///testdata",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xfd}, len(pkg.Checksum{}))),
|
||||
)},
|
||||
{"http get unaligned", pkg.NewHTTPGet(
|
||||
nil, "https://hakurei.app",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
|
||||
)},
|
||||
|
||||
{"http get tar", pkg.NewHTTPGetTar(
|
||||
nil, "file:///testdata",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xff}, len(pkg.Checksum{}))),
|
||||
pkg.TarBzip2,
|
||||
)},
|
||||
{"http get tar unaligned", pkg.NewHTTPGetTar(
|
||||
nil, "https://hakurei.app",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xfe}, len(pkg.Checksum{}))),
|
||||
pkg.TarUncompressed,
|
||||
)},
|
||||
|
||||
{"exec offline", pkg.NewExec(
|
||||
"exec-offline", nil, 0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
[]string{"testtool"},
|
||||
|
||||
pkg.MustPath("/file", false, pkg.NewFile("file", []byte(
|
||||
"stub file",
|
||||
))), pkg.MustPath("/.hakurei", false, pkg.NewHTTPGetTar(
|
||||
nil, "file:///hakurei.tar",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
|
||||
pkg.TarUncompressed,
|
||||
)), pkg.MustPath("/opt", false, pkg.NewHTTPGetTar(
|
||||
nil, "file:///testtool.tar.gz",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
|
||||
pkg.TarGzip,
|
||||
)),
|
||||
)},
|
||||
|
||||
{"exec net", pkg.NewExec(
|
||||
"exec-net",
|
||||
(*pkg.Checksum)(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
|
||||
0, false,
|
||||
pkg.AbsWork,
|
||||
[]string{"HAKUREI_TEST=1"},
|
||||
check.MustAbs("/opt/bin/testtool"),
|
||||
[]string{"testtool", "net"},
|
||||
|
||||
pkg.MustPath("/file", false, pkg.NewFile("file", []byte(
|
||||
"stub file",
|
||||
))), pkg.MustPath("/.hakurei", false, pkg.NewHTTPGetTar(
|
||||
nil, "file:///hakurei.tar",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
|
||||
pkg.TarUncompressed,
|
||||
)), pkg.MustPath("/opt", false, pkg.NewHTTPGetTar(
|
||||
nil, "file:///testtool.tar.gz",
|
||||
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
|
||||
pkg.TarGzip,
|
||||
)),
|
||||
)},
|
||||
|
||||
{"file anonymous", pkg.NewFile("", []byte{0})},
|
||||
{"file", pkg.NewFile("stub", []byte("stub"))},
|
||||
}
|
||||
testCasesCache := make([]cacheTestCase, len(testCases))
|
||||
for i, tc := range testCases {
|
||||
want := tc.a
|
||||
testCasesCache[i] = cacheTestCase{tc.name, nil,
|
||||
func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
r, w := io.Pipe()
|
||||
|
||||
done := make(chan error, 1)
|
||||
go func() {
|
||||
t.Helper()
|
||||
done <- c.EncodeAll(w, want)
|
||||
_ = w.Close()
|
||||
}()
|
||||
|
||||
if got, err := c.NewDecoder(r).Decode(); err != nil {
|
||||
t.Fatalf("Decode: error = %v", err)
|
||||
} else if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("Decode: %#v, want %#v", got, want)
|
||||
}
|
||||
|
||||
if err := <-done; err != nil {
|
||||
t.Fatalf("EncodeAll: error = %v", err)
|
||||
}
|
||||
}, pkg.MustDecode(
|
||||
"E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C",
|
||||
),
|
||||
}
|
||||
}
|
||||
checkWithCache(t, testCasesCache)
|
||||
}
|
||||
@@ -1,13 +1,11 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha512"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"sync"
|
||||
"unique"
|
||||
)
|
||||
|
||||
// An httpArtifact is an [Artifact] backed by a [http] url string. The method is
|
||||
@@ -17,50 +15,53 @@ type httpArtifact struct {
|
||||
// Caller-supplied url string.
|
||||
url string
|
||||
|
||||
// Caller-supplied checksum of the response body. This is validated during
|
||||
// curing and the first call to Data.
|
||||
checksum Checksum
|
||||
// Caller-supplied checksum of the response body. This is validated when
|
||||
// closing the [io.ReadCloser] returned by Cure.
|
||||
checksum unique.Handle[Checksum]
|
||||
|
||||
// doFunc is the Do method of [http.Client] supplied by the caller.
|
||||
doFunc func(req *http.Request) (*http.Response, error)
|
||||
|
||||
// Response body read to EOF.
|
||||
data []byte
|
||||
|
||||
// Synchronises access to data.
|
||||
mu sync.Mutex
|
||||
// client is the address of the caller-supplied [http.Client].
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
var _ KnownChecksum = new(httpArtifact)
|
||||
var _ fmt.Stringer = new(httpArtifact)
|
||||
|
||||
// NewHTTPGet returns a new [File] backed by the supplied client. A GET request
|
||||
// is set up for url. If c is nil, [http.DefaultClient] is used instead.
|
||||
// NewHTTPGet returns a new [FileArtifact] backed by the supplied client. A GET
|
||||
// request is set up for url. If c is nil, [http.DefaultClient] is used instead.
|
||||
func NewHTTPGet(
|
||||
c *http.Client,
|
||||
url string,
|
||||
checksum Checksum,
|
||||
) File {
|
||||
if c == nil {
|
||||
c = http.DefaultClient
|
||||
}
|
||||
return &httpArtifact{url: url, checksum: checksum, doFunc: c.Do}
|
||||
) FileArtifact {
|
||||
return &httpArtifact{url: url, checksum: unique.Make(checksum), client: c}
|
||||
}
|
||||
|
||||
// Kind returns the hardcoded [Kind] constant.
|
||||
func (a *httpArtifact) Kind() Kind { return KindHTTPGet }
|
||||
func (*httpArtifact) Kind() Kind { return KindHTTPGet }
|
||||
|
||||
// Params writes the backing url string. Client is not represented as it does
|
||||
// not affect [Cache.Cure] outcome.
|
||||
func (a *httpArtifact) Params(ctx *IContext) {
|
||||
ctx.GetHash().Write([]byte(a.url))
|
||||
func (a *httpArtifact) Params(ctx *IContext) { ctx.WriteString(a.url) }
|
||||
|
||||
func init() {
|
||||
register(KindHTTPGet, func(r *IRReader) Artifact {
|
||||
url := r.ReadString()
|
||||
checksum, ok := r.Finalise()
|
||||
if !ok {
|
||||
panic(ErrExpectedChecksum)
|
||||
}
|
||||
return NewHTTPGet(nil, url, checksum.Value())
|
||||
})
|
||||
}
|
||||
|
||||
// Dependencies returns a nil slice.
|
||||
func (a *httpArtifact) Dependencies() []Artifact { return nil }
|
||||
func (*httpArtifact) Dependencies() []Artifact { return nil }
|
||||
|
||||
// IsExclusive returns false: Cure returns as soon as a response is received.
|
||||
func (*httpArtifact) IsExclusive() bool { return false }
|
||||
|
||||
// Checksum returns the caller-supplied checksum.
|
||||
func (a *httpArtifact) Checksum() Checksum { return a.checksum }
|
||||
func (a *httpArtifact) Checksum() Checksum { return a.checksum.Value() }
|
||||
|
||||
// String returns [path.Base] over the backing url.
|
||||
func (a *httpArtifact) String() string { return path.Base(a.url) }
|
||||
@@ -73,17 +74,25 @@ func (e ResponseStatusError) Error() string {
|
||||
return "the requested URL returned non-OK status: " + http.StatusText(int(e))
|
||||
}
|
||||
|
||||
// do sends the caller-supplied request on the caller-supplied [http.Client]
|
||||
// and reads its response body to EOF and returns the resulting bytes.
|
||||
func (a *httpArtifact) do(ctx context.Context) (data []byte, err error) {
|
||||
// Cure sends the http request and returns the resulting response body reader
|
||||
// wrapped to perform checksum validation. It is valid but not encouraged to
|
||||
// close the resulting [io.ReadCloser] before it is read to EOF, as that causes
|
||||
// Close to block until all remaining data is consumed and validated.
|
||||
func (a *httpArtifact) Cure(r *RContext) (rc io.ReadCloser, err error) {
|
||||
var req *http.Request
|
||||
req, err = http.NewRequestWithContext(ctx, http.MethodGet, a.url, nil)
|
||||
req, err = http.NewRequestWithContext(r.Unwrap(), http.MethodGet, a.url, nil)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
req.Header.Set("User-Agent", "Hakurei/1.1")
|
||||
|
||||
c := a.client
|
||||
if c == nil {
|
||||
c = http.DefaultClient
|
||||
}
|
||||
|
||||
var resp *http.Response
|
||||
if resp, err = a.doFunc(req); err != nil {
|
||||
if resp, err = c.Do(req); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -92,35 +101,6 @@ func (a *httpArtifact) do(ctx context.Context) (data []byte, err error) {
|
||||
return nil, ResponseStatusError(resp.StatusCode)
|
||||
}
|
||||
|
||||
if data, err = io.ReadAll(resp.Body); err != nil {
|
||||
_ = resp.Body.Close()
|
||||
return
|
||||
}
|
||||
|
||||
err = resp.Body.Close()
|
||||
return
|
||||
}
|
||||
|
||||
// Cure completes the http request and returns the resulting response body read
|
||||
// to EOF. Data does not interact with the filesystem.
|
||||
func (a *httpArtifact) Cure(ctx context.Context) (data []byte, err error) {
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
|
||||
if a.data != nil {
|
||||
// validated by cache or a previous call to Data
|
||||
return a.data, nil
|
||||
}
|
||||
|
||||
if data, err = a.do(ctx); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
h := sha512.New384()
|
||||
h.Write(data)
|
||||
if got := (Checksum)(h.Sum(nil)); got != a.checksum {
|
||||
return nil, &ChecksumMismatchError{got, a.checksum}
|
||||
}
|
||||
a.data = data
|
||||
rc = r.NewMeasuredReader(resp.Body, a.checksum)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -2,11 +2,13 @@ package pkg_test
|
||||
|
||||
import (
|
||||
"crypto/sha512"
|
||||
"io"
|
||||
"net/http"
|
||||
"reflect"
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
"unique"
|
||||
"unsafe"
|
||||
|
||||
"hakurei.app/container/check"
|
||||
"hakurei.app/internal/pkg"
|
||||
@@ -31,15 +33,27 @@ func TestHTTPGet(t *testing.T) {
|
||||
|
||||
checkWithCache(t, []cacheTestCase{
|
||||
{"direct", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
var r pkg.RContext
|
||||
rCacheVal := reflect.ValueOf(&r).Elem().FieldByName("cache")
|
||||
reflect.NewAt(
|
||||
rCacheVal.Type(),
|
||||
unsafe.Pointer(rCacheVal.UnsafeAddr()),
|
||||
).Elem().Set(reflect.ValueOf(c))
|
||||
|
||||
f := pkg.NewHTTPGet(
|
||||
&client,
|
||||
"file:///testdata",
|
||||
testdataChecksum.Value(),
|
||||
)
|
||||
if got, err := f.Cure(t.Context()); err != nil {
|
||||
var got []byte
|
||||
if rc, err := f.Cure(&r); err != nil {
|
||||
t.Fatalf("Cure: error = %v", err)
|
||||
} else if got, err = io.ReadAll(rc); err != nil {
|
||||
t.Fatalf("ReadAll: error = %v", err)
|
||||
} else if string(got) != testdata {
|
||||
t.Fatalf("Cure: %x, want %x", got, testdata)
|
||||
} else if err = rc.Close(); err != nil {
|
||||
t.Fatalf("Close: error = %v", err)
|
||||
}
|
||||
|
||||
// check direct validation
|
||||
@@ -51,8 +65,21 @@ func TestHTTPGet(t *testing.T) {
|
||||
wantErrMismatch := &pkg.ChecksumMismatchError{
|
||||
Got: testdataChecksum.Value(),
|
||||
}
|
||||
if _, err := f.Cure(t.Context()); !reflect.DeepEqual(err, wantErrMismatch) {
|
||||
t.Fatalf("Cure: error = %#v, want %#v", err, wantErrMismatch)
|
||||
if rc, err := f.Cure(&r); err != nil {
|
||||
t.Fatalf("Cure: error = %v", err)
|
||||
} else if got, err = io.ReadAll(rc); err != nil {
|
||||
t.Fatalf("ReadAll: error = %v", err)
|
||||
} else if string(got) != testdata {
|
||||
t.Fatalf("Cure: %x, want %x", got, testdata)
|
||||
} else if err = rc.Close(); !reflect.DeepEqual(err, wantErrMismatch) {
|
||||
t.Fatalf("Close: error = %#v, want %#v", err, wantErrMismatch)
|
||||
}
|
||||
|
||||
// check fallback validation
|
||||
if rc, err := f.Cure(&r); err != nil {
|
||||
t.Fatalf("Cure: error = %v", err)
|
||||
} else if err = rc.Close(); !reflect.DeepEqual(err, wantErrMismatch) {
|
||||
t.Fatalf("Close: error = %#v, want %#v", err, wantErrMismatch)
|
||||
}
|
||||
|
||||
// check direct response error
|
||||
@@ -62,12 +89,19 @@ func TestHTTPGet(t *testing.T) {
|
||||
pkg.Checksum{},
|
||||
)
|
||||
wantErrNotFound := pkg.ResponseStatusError(http.StatusNotFound)
|
||||
if _, err := f.Cure(t.Context()); !reflect.DeepEqual(err, wantErrNotFound) {
|
||||
if _, err := f.Cure(&r); !reflect.DeepEqual(err, wantErrNotFound) {
|
||||
t.Fatalf("Cure: error = %#v, want %#v", err, wantErrNotFound)
|
||||
}
|
||||
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
|
||||
|
||||
{"cure", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
var r pkg.RContext
|
||||
rCacheVal := reflect.ValueOf(&r).Elem().FieldByName("cache")
|
||||
reflect.NewAt(
|
||||
rCacheVal.Type(),
|
||||
unsafe.Pointer(rCacheVal.UnsafeAddr()),
|
||||
).Elem().Set(reflect.ValueOf(c))
|
||||
|
||||
f := pkg.NewHTTPGet(
|
||||
&client,
|
||||
"file:///testdata",
|
||||
@@ -75,7 +109,7 @@ func TestHTTPGet(t *testing.T) {
|
||||
)
|
||||
wantPathname := base.Append(
|
||||
"identifier",
|
||||
"NqVORkT6L9HX6Za7kT2zcibY10qFqBaxEjPiYFrBQX-ZFr3yxCzJxbKOP0zVjeWb",
|
||||
"oM-2pUlk-mOxK1t3aMWZer69UdOQlAXiAgMrpZ1476VoOqpYVP1aGFS9_HYy-D8_",
|
||||
)
|
||||
if pathname, checksum, err := c.Cure(f); err != nil {
|
||||
t.Fatalf("Cure: error = %v", err)
|
||||
@@ -85,10 +119,15 @@ func TestHTTPGet(t *testing.T) {
|
||||
t.Fatalf("Cure: %x, want %x", checksum.Value(), testdataChecksum.Value())
|
||||
}
|
||||
|
||||
if got, err := f.Cure(t.Context()); err != nil {
|
||||
var got []byte
|
||||
if rc, err := f.Cure(&r); err != nil {
|
||||
t.Fatalf("Cure: error = %v", err)
|
||||
} else if got, err = io.ReadAll(rc); err != nil {
|
||||
t.Fatalf("ReadAll: error = %v", err)
|
||||
} else if string(got) != testdata {
|
||||
t.Fatalf("Cure: %x, want %x", got, testdata)
|
||||
} else if err = rc.Close(); err != nil {
|
||||
t.Fatalf("Close: error = %v", err)
|
||||
}
|
||||
|
||||
// check load from cache
|
||||
@@ -97,10 +136,14 @@ func TestHTTPGet(t *testing.T) {
|
||||
"file:///testdata",
|
||||
testdataChecksum.Value(),
|
||||
)
|
||||
if got, err := f.Cure(t.Context()); err != nil {
|
||||
if rc, err := f.Cure(&r); err != nil {
|
||||
t.Fatalf("Cure: error = %v", err)
|
||||
} else if got, err = io.ReadAll(rc); err != nil {
|
||||
t.Fatalf("ReadAll: error = %v", err)
|
||||
} else if string(got) != testdata {
|
||||
t.Fatalf("Cure: %x, want %x", got, testdata)
|
||||
} else if err = rc.Close(); err != nil {
|
||||
t.Fatalf("Close: error = %v", err)
|
||||
}
|
||||
|
||||
// check error passthrough
|
||||
@@ -113,6 +156,6 @@ func TestHTTPGet(t *testing.T) {
|
||||
if _, _, err := c.Cure(f); !reflect.DeepEqual(err, wantErrNotFound) {
|
||||
t.Fatalf("Pathname: error = %#v, want %#v", err, wantErrNotFound)
|
||||
}
|
||||
}, pkg.MustDecode("bqtn69RkV5E7V7GhhgCFjcvbxmaqrO8DywamM4Tyjf10F6EJBHjXiIa_tFRtF4iN")},
|
||||
}, pkg.MustDecode("L_0RFHpr9JUS4Zp14rz2dESSRvfLzpvqsLhR1-YjQt8hYlmEdVl7vI3_-v8UNPKs")},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha512"
|
||||
@@ -64,35 +65,6 @@ func MustDecode(s string) (checksum Checksum) {
|
||||
return
|
||||
}
|
||||
|
||||
// IContext is passed to [Artifact.Params] and provides identifier information
|
||||
// and the target [hash.Hash] for writing params into.
|
||||
//
|
||||
// Methods of IContext are safe for concurrent use. IContext is valid
|
||||
// until [Artifact.Params] returns.
|
||||
type IContext struct {
|
||||
// Address of underlying [Cache], should be zeroed or made unusable after
|
||||
// [Artifact.Params] returns and must not be exposed directly.
|
||||
cache *Cache
|
||||
// Made available for writing, should be zeroed after [Artifact.Params]
|
||||
// returns. Internal state must not be inspected.
|
||||
h hash.Hash
|
||||
}
|
||||
|
||||
// Unwrap returns the underlying [context.Context].
|
||||
func (i *IContext) Unwrap() context.Context { return i.cache.ctx }
|
||||
|
||||
// GetHash returns the underlying [hash.Hash] for writing. Callers must not
|
||||
// attempt to inspect its internal state.
|
||||
func (i *IContext) GetHash() hash.Hash { return i.h }
|
||||
|
||||
// WriteIdent writes the identifier of [Artifact] to the underlying [hash.Hash].
|
||||
func (i *IContext) WriteIdent(a Artifact) {
|
||||
buf := i.cache.getIdentBuf()
|
||||
*(*ID)(buf[wordSize:]) = i.cache.Ident(a).Value()
|
||||
i.h.Write(buf[wordSize:])
|
||||
i.cache.putIdentBuf(buf)
|
||||
}
|
||||
|
||||
// TContext is passed to [TrivialArtifact.Cure] and provides information and
|
||||
// methods required for curing the [TrivialArtifact].
|
||||
//
|
||||
@@ -147,14 +119,15 @@ func (t *TContext) GetWorkDir() *check.Absolute { return t.work }
|
||||
// create it if they wish to use it, using [os.MkdirAll].
|
||||
func (t *TContext) GetTempDir() *check.Absolute { return t.temp }
|
||||
|
||||
// Open tries to open [Artifact] for reading. If a implements [File], its data
|
||||
// might be used directly, eliminating the roundtrip to vfs. Otherwise, it must
|
||||
// cure into a directory containing a single regular file.
|
||||
// Open tries to open [Artifact] for reading. If a implements [FileArtifact],
|
||||
// its reader might be used directly, eliminating the roundtrip to vfs.
|
||||
// Otherwise, it must cure into a directory containing a single regular file.
|
||||
//
|
||||
// If err is nil, the caller is responsible for closing the resulting
|
||||
// [io.ReadCloser].
|
||||
// If err is nil, the caller must close the resulting [io.ReadCloser] and return
|
||||
// its error, if any. Failure to read r to EOF may result in a spurious
|
||||
// [ChecksumMismatchError], or the underlying implementation may block on Close.
|
||||
func (t *TContext) Open(a Artifact) (r io.ReadCloser, err error) {
|
||||
if f, ok := a.(File); ok {
|
||||
if f, ok := a.(FileArtifact); ok {
|
||||
return t.cache.openFile(f)
|
||||
}
|
||||
|
||||
@@ -213,6 +186,20 @@ func (f *FContext) GetArtifact(a Artifact) (
|
||||
panic(InvalidLookupError(f.cache.Ident(a).Value()))
|
||||
}
|
||||
|
||||
// RContext is passed to [FileArtifact.Cure] and provides helper methods useful
|
||||
// for curing the [FileArtifact].
|
||||
//
|
||||
// Methods of RContext are safe for concurrent use. RContext is valid
|
||||
// until [FileArtifact.Cure] returns.
|
||||
type RContext struct {
|
||||
// Address of underlying [Cache], should be zeroed or made unusable after
|
||||
// [FileArtifact.Cure] returns and must not be exposed directly.
|
||||
cache *Cache
|
||||
}
|
||||
|
||||
// Unwrap returns the underlying [context.Context].
|
||||
func (r *RContext) Unwrap() context.Context { return r.cache.ctx }
|
||||
|
||||
// An Artifact is a read-only reference to a piece of data that may be created
|
||||
// deterministically but might not currently be available in memory or on the
|
||||
// filesystem.
|
||||
@@ -222,10 +209,12 @@ type Artifact interface {
|
||||
// [Artifact] is allowed to return the same [Kind] value.
|
||||
Kind() Kind
|
||||
|
||||
// Params writes opaque bytes that describes [Artifact]. Implementations
|
||||
// Params writes deterministic values describing [Artifact]. Implementations
|
||||
// must guarantee that these values are unique among differing instances
|
||||
// of the same implementation with the same dependencies. Callers must not
|
||||
// attempt to interpret these params.
|
||||
// of the same implementation with identical dependencies and conveys enough
|
||||
// information to create another instance of [Artifact] identical to the
|
||||
// instance emitting these values. The new instance created via [IRReadFunc]
|
||||
// from these values must then produce identical IR values.
|
||||
//
|
||||
// Result must remain identical across multiple invocations.
|
||||
Params(ctx *IContext)
|
||||
@@ -237,6 +226,24 @@ type Artifact interface {
|
||||
//
|
||||
// Result must remain identical across multiple invocations.
|
||||
Dependencies() []Artifact
|
||||
|
||||
// IsExclusive returns whether the [Artifact] is exclusive. Exclusive
|
||||
// artifacts might not run in parallel with each other, and are still
|
||||
// subject to the cures limit.
|
||||
//
|
||||
// Some implementations may saturate the CPU for a nontrivial amount of
|
||||
// time. Curing multiple such implementations simultaneously causes
|
||||
// significant CPU scheduler overhead. An exclusive artifact will generally
|
||||
// not be cured alongside another exclusive artifact, thus alleviating this
|
||||
// overhead.
|
||||
//
|
||||
// Note that [Cache] reserves the right to still cure exclusive
|
||||
// artifacts concurrently as this is not a synchronisation primitive but
|
||||
// an optimisation one. Implementations are forbidden from accessing global
|
||||
// state regardless of exclusivity.
|
||||
//
|
||||
// Result must remain identical across multiple invocations.
|
||||
IsExclusive() bool
|
||||
}
|
||||
|
||||
// FloodArtifact refers to an [Artifact] requiring its entire dependency graph
|
||||
@@ -274,7 +281,7 @@ func Flood(a Artifact) iter.Seq[Artifact] {
|
||||
//
|
||||
// TrivialArtifact is unable to cure any other [Artifact] and it cannot access
|
||||
// pathnames. This type of [Artifact] is primarily intended for dependency-less
|
||||
// artifacts or direct dependencies that only consists of [File].
|
||||
// artifacts or direct dependencies that only consists of [FileArtifact].
|
||||
type TrivialArtifact interface {
|
||||
// Cure cures the current [Artifact] to the working directory obtained via
|
||||
// [TContext.GetWorkDir].
|
||||
@@ -309,16 +316,19 @@ type KnownChecksum interface {
|
||||
Checksum() Checksum
|
||||
}
|
||||
|
||||
// A File refers to an [Artifact] backed by a single file.
|
||||
type File interface {
|
||||
// Cure returns the full contents of [File]. If [File] implements
|
||||
// [KnownChecksum], Cure is responsible for validating any data it produces
|
||||
// and must return [ChecksumMismatchError] if validation fails.
|
||||
// FileArtifact refers to an [Artifact] backed by a single file.
|
||||
type FileArtifact interface {
|
||||
// Cure returns [io.ReadCloser] of the full contents of [FileArtifact]. If
|
||||
// [FileArtifact] implements [KnownChecksum], Cure is responsible for
|
||||
// validating any data it produces and must return [ChecksumMismatchError]
|
||||
// if validation fails. This error is conventionally returned during the
|
||||
// first call to Close, but may be returned during any call to Read before
|
||||
// EOF, or by Cure itself.
|
||||
//
|
||||
// Callers must not modify the returned byte slice.
|
||||
// Callers are responsible for closing the resulting [io.ReadCloser].
|
||||
//
|
||||
// Result must remain identical across multiple invocations.
|
||||
Cure(ctx context.Context) ([]byte, error)
|
||||
Cure(r *RContext) (io.ReadCloser, error)
|
||||
|
||||
Artifact
|
||||
}
|
||||
@@ -413,9 +423,9 @@ type pendingArtifactDep struct {
|
||||
// Cache is a support layer that implementations of [Artifact] can use to store
|
||||
// cured [Artifact] data in a content addressed fashion.
|
||||
type Cache struct {
|
||||
// Work for curing dependency [Artifact] is sent here and cured concurrently
|
||||
// while subject to the cures limit. Invalid after the context is canceled.
|
||||
cureDep chan<- *pendingArtifactDep
|
||||
// Cures of any variant of [Artifact] sends to cures before entering the
|
||||
// implementation and receives an equal amount of elements after.
|
||||
cures chan struct{}
|
||||
|
||||
// [context.WithCancel] over caller-supplied context, used by [Artifact] and
|
||||
// all dependency curing goroutines.
|
||||
@@ -430,7 +440,7 @@ type Cache struct {
|
||||
// Directory where all [Cache] related files are placed.
|
||||
base *check.Absolute
|
||||
|
||||
// Whether to validate [File.Cure] for a [KnownChecksum] file. This
|
||||
// Whether to validate [FileArtifact.Cure] for a [KnownChecksum] file. This
|
||||
// significantly reduces performance.
|
||||
strict bool
|
||||
// Maximum size of a dependency graph.
|
||||
@@ -453,6 +463,11 @@ type Cache struct {
|
||||
// Synchronises access to ident and corresponding filesystem entries.
|
||||
identMu sync.RWMutex
|
||||
|
||||
// Synchronises entry into exclusive artifacts for the cure method.
|
||||
exclMu sync.Mutex
|
||||
// Buffered I/O free list, must not be accessed directly.
|
||||
bufioPool sync.Pool
|
||||
|
||||
// Unlocks the on-filesystem cache. Must only be called from Close.
|
||||
unlock func()
|
||||
// Synchronises calls to Close.
|
||||
@@ -522,38 +537,13 @@ func (c *Cache) unsafeIdent(a Artifact, encodeKind bool) (
|
||||
return
|
||||
}
|
||||
|
||||
deps := a.Dependencies()
|
||||
idents := make([]*extIdent, len(deps))
|
||||
for i, d := range deps {
|
||||
dbuf, did := c.unsafeIdent(d, true)
|
||||
if dbuf == nil {
|
||||
dbuf = c.getIdentBuf()
|
||||
binary.LittleEndian.PutUint64(dbuf[:], uint64(d.Kind()))
|
||||
*(*ID)(dbuf[wordSize:]) = did.Value()
|
||||
} else {
|
||||
c.storeIdent(d, dbuf)
|
||||
}
|
||||
defer c.putIdentBuf(dbuf)
|
||||
idents[i] = dbuf
|
||||
}
|
||||
slices.SortFunc(idents, func(a, b *extIdent) int {
|
||||
return bytes.Compare(a[:], b[:])
|
||||
})
|
||||
idents = slices.CompactFunc(idents, func(a, b *extIdent) bool {
|
||||
return *a == *b
|
||||
})
|
||||
|
||||
buf = c.getIdentBuf()
|
||||
h := sha512.New384()
|
||||
binary.LittleEndian.PutUint64(buf[:], uint64(a.Kind()))
|
||||
h.Write(buf[:wordSize])
|
||||
i := IContext{c, h}
|
||||
a.Params(&i)
|
||||
i.cache, i.h = nil, nil
|
||||
for _, dn := range idents {
|
||||
h.Write(dn[:])
|
||||
if err := c.Encode(h, a); err != nil {
|
||||
// unreachable
|
||||
panic(err)
|
||||
}
|
||||
|
||||
binary.LittleEndian.PutUint64(buf[:], uint64(a.Kind()))
|
||||
h.Sum(buf[wordSize:wordSize])
|
||||
return
|
||||
}
|
||||
@@ -573,8 +563,8 @@ func (e *ChecksumMismatchError) Error() string {
|
||||
// found and removed from the underlying storage of [Cache].
|
||||
type ScrubError struct {
|
||||
// Content-addressed entries not matching their checksum. This can happen
|
||||
// if an incorrect [File] implementation was cured against a non-strict
|
||||
// [Cache].
|
||||
// if an incorrect [FileArtifact] implementation was cured against
|
||||
// a non-strict [Cache].
|
||||
ChecksumMismatches []ChecksumMismatchError
|
||||
// Dangling identifier symlinks. This can happen if the content-addressed
|
||||
// entry was removed while scrubbing due to a checksum mismatch.
|
||||
@@ -910,10 +900,11 @@ func (c *Cache) finaliseIdent(
|
||||
close(done)
|
||||
}
|
||||
|
||||
// openFile tries to load [File] from [Cache], and if that fails, obtains it via
|
||||
// [File.Cure] instead. Notably, it does not cure [File]. If err is nil, the
|
||||
// caller is responsible for closing the resulting [io.ReadCloser].
|
||||
func (c *Cache) openFile(f File) (r io.ReadCloser, err error) {
|
||||
// openFile tries to load [FileArtifact] from [Cache], and if that fails,
|
||||
// obtains it via [FileArtifact.Cure] instead. Notably, it does not cure
|
||||
// [FileArtifact] to the filesystem. If err is nil, the caller is responsible
|
||||
// for closing the resulting [io.ReadCloser].
|
||||
func (c *Cache) openFile(f FileArtifact) (r io.ReadCloser, err error) {
|
||||
if kc, ok := f.(KnownChecksum); ok {
|
||||
c.checksumMu.RLock()
|
||||
r, err = os.Open(c.base.Append(
|
||||
@@ -943,17 +934,14 @@ func (c *Cache) openFile(f File) (r io.ReadCloser, err error) {
|
||||
}
|
||||
}()
|
||||
}
|
||||
var data []byte
|
||||
if data, err = f.Cure(c.ctx); err != nil {
|
||||
return
|
||||
}
|
||||
r = io.NopCloser(bytes.NewReader(data))
|
||||
return f.Cure(&RContext{c})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// InvalidFileModeError describes an [Artifact.Cure] that did not result in
|
||||
// a regular file or directory located at the work pathname.
|
||||
// InvalidFileModeError describes a [FloodArtifact.Cure] or
|
||||
// [TrivialArtifact.Cure] that did not result in a regular file or directory
|
||||
// located at the work pathname.
|
||||
type InvalidFileModeError fs.FileMode
|
||||
|
||||
// Error returns a constant string.
|
||||
@@ -961,8 +949,8 @@ func (e InvalidFileModeError) Error() string {
|
||||
return "artifact did not produce a regular file or directory"
|
||||
}
|
||||
|
||||
// NoOutputError describes an [Artifact.Cure] that did not populate its
|
||||
// work pathname despite completing successfully.
|
||||
// NoOutputError describes a [FloodArtifact.Cure] or [TrivialArtifact.Cure]
|
||||
// that did not populate its work pathname despite completing successfully.
|
||||
type NoOutputError struct{}
|
||||
|
||||
// Unwrap returns [os.ErrNotExist].
|
||||
@@ -1102,6 +1090,14 @@ func (c *Cache) Cure(a Artifact) (
|
||||
checksum unique.Handle[Checksum],
|
||||
err error,
|
||||
) {
|
||||
select {
|
||||
case <-c.ctx.Done():
|
||||
err = c.ctx.Err()
|
||||
return
|
||||
|
||||
default:
|
||||
}
|
||||
|
||||
if c.threshold > 0 {
|
||||
var n uintptr
|
||||
for range Flood(a) {
|
||||
@@ -1114,7 +1110,7 @@ func (c *Cache) Cure(a Artifact) (
|
||||
c.msg.Verbosef("visited %d artifacts", n)
|
||||
}
|
||||
|
||||
return c.cure(a)
|
||||
return c.cure(a, true)
|
||||
}
|
||||
|
||||
// CureError wraps a non-nil error returned attempting to cure an [Artifact].
|
||||
@@ -1132,39 +1128,38 @@ func (e *CureError) Error() string { return e.Err.Error() }
|
||||
// A DependencyCureError wraps errors returned while curing dependencies.
|
||||
type DependencyCureError []*CureError
|
||||
|
||||
// sort sorts underlying errors by their identifier.
|
||||
func (e *DependencyCureError) sort() {
|
||||
var identBuf [2]ID
|
||||
slices.SortFunc(*e, func(a, b *CureError) int {
|
||||
identBuf[0], identBuf[1] = a.Ident.Value(), b.Ident.Value()
|
||||
return slices.Compare(identBuf[0][:], identBuf[1][:])
|
||||
})
|
||||
// unwrapM recursively expands underlying errors into a caller-supplied map.
|
||||
func (e *DependencyCureError) unwrapM(me map[unique.Handle[ID]]*CureError) {
|
||||
for _, err := range *e {
|
||||
if _e, ok := err.Err.(*DependencyCureError); ok {
|
||||
_e.unwrapM(me)
|
||||
continue
|
||||
}
|
||||
me[err.Ident] = err
|
||||
}
|
||||
}
|
||||
|
||||
// unwrap recursively expands and deduplicates underlying errors.
|
||||
func (e *DependencyCureError) unwrap() DependencyCureError {
|
||||
errs := make(DependencyCureError, 0, len(*e))
|
||||
for _, err := range *e {
|
||||
if _e, ok := err.Err.(*DependencyCureError); ok {
|
||||
errs = append(errs, _e.unwrap()...)
|
||||
continue
|
||||
}
|
||||
errs = append(errs, err)
|
||||
}
|
||||
me := make(map[unique.Handle[ID]]*CureError, len(errs))
|
||||
for _, err := range errs {
|
||||
me[err.Ident] = err
|
||||
}
|
||||
return slices.AppendSeq(
|
||||
me := make(map[unique.Handle[ID]]*CureError)
|
||||
e.unwrapM(me)
|
||||
errs := slices.AppendSeq(
|
||||
make(DependencyCureError, 0, len(me)),
|
||||
maps.Values(me),
|
||||
)
|
||||
|
||||
var identBuf [2]ID
|
||||
slices.SortFunc(errs, func(a, b *CureError) int {
|
||||
identBuf[0], identBuf[1] = a.Ident.Value(), b.Ident.Value()
|
||||
return slices.Compare(identBuf[0][:], identBuf[1][:])
|
||||
})
|
||||
|
||||
return errs
|
||||
}
|
||||
|
||||
// Unwrap returns a deduplicated slice of underlying errors.
|
||||
func (e *DependencyCureError) Unwrap() []error {
|
||||
errs := e.unwrap()
|
||||
errs.sort()
|
||||
_errs := make([]error, len(errs))
|
||||
for i, err := range errs {
|
||||
_errs[i] = err
|
||||
@@ -1175,7 +1170,6 @@ func (e *DependencyCureError) Unwrap() []error {
|
||||
// Error returns a user-facing multiline error message.
|
||||
func (e *DependencyCureError) Error() string {
|
||||
errs := e.unwrap()
|
||||
errs.sort()
|
||||
if len(errs) == 0 {
|
||||
return "invalid dependency cure outcome"
|
||||
}
|
||||
@@ -1187,8 +1181,133 @@ func (e *DependencyCureError) Error() string {
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// enterCure must be called before entering an [Artifact] implementation.
|
||||
func (c *Cache) enterCure(a Artifact, curesExempt bool) error {
|
||||
if a.IsExclusive() {
|
||||
c.exclMu.Lock()
|
||||
}
|
||||
if curesExempt {
|
||||
return nil
|
||||
}
|
||||
|
||||
select {
|
||||
case c.cures <- struct{}{}:
|
||||
return nil
|
||||
|
||||
case <-c.ctx.Done():
|
||||
if a.IsExclusive() {
|
||||
c.exclMu.Unlock()
|
||||
}
|
||||
return c.ctx.Err()
|
||||
}
|
||||
}
|
||||
|
||||
// exitCure must be called after exiting an [Artifact] implementation.
|
||||
func (c *Cache) exitCure(a Artifact, curesExempt bool) {
|
||||
if a.IsExclusive() {
|
||||
c.exclMu.Unlock()
|
||||
}
|
||||
if curesExempt {
|
||||
return
|
||||
}
|
||||
|
||||
<-c.cures
|
||||
}
|
||||
|
||||
// getWriter is like [bufio.NewWriter] but for bufioPool.
|
||||
func (c *Cache) getWriter(w io.Writer) *bufio.Writer {
|
||||
bw := c.bufioPool.Get().(*bufio.Writer)
|
||||
bw.Reset(w)
|
||||
return bw
|
||||
}
|
||||
|
||||
// measuredReader implements [io.ReadCloser] and measures the checksum during
|
||||
// Close. If the underlying reader is not read to EOF, Close blocks until all
|
||||
// remaining data is consumed and validated.
|
||||
type measuredReader struct {
|
||||
// Underlying reader. Never exposed directly.
|
||||
r io.ReadCloser
|
||||
// For validating checksum. Never exposed directly.
|
||||
h hash.Hash
|
||||
// Buffers writes to h, initialised by [Cache]. Never exposed directly.
|
||||
hbw *bufio.Writer
|
||||
// Expected checksum, compared during Close.
|
||||
want unique.Handle[Checksum]
|
||||
|
||||
// For accessing free lists.
|
||||
c *Cache
|
||||
|
||||
// Set up via [io.TeeReader] by [Cache].
|
||||
io.Reader
|
||||
}
|
||||
|
||||
// Close reads the underlying [io.ReadCloser] to EOF, closes it and measures its
|
||||
// outcome. It returns a [ChecksumMismatchError] for an unexpected checksum.
|
||||
func (mr *measuredReader) Close() (err error) {
|
||||
if mr.hbw == nil || mr.Reader == nil {
|
||||
return os.ErrInvalid
|
||||
}
|
||||
err = mr.hbw.Flush()
|
||||
mr.c.putWriter(mr.hbw)
|
||||
mr.hbw, mr.Reader = nil, nil
|
||||
if err != nil {
|
||||
_ = mr.r.Close()
|
||||
return
|
||||
}
|
||||
var n int64
|
||||
if n, err = io.Copy(mr.h, mr.r); err != nil {
|
||||
_ = mr.r.Close()
|
||||
return
|
||||
}
|
||||
|
||||
if n > 0 {
|
||||
mr.c.msg.Verbosef("missed %d bytes on measured reader", n)
|
||||
}
|
||||
|
||||
if err = mr.r.Close(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
buf := mr.c.getIdentBuf()
|
||||
mr.h.Sum(buf[:0])
|
||||
|
||||
if got := Checksum(buf[:]); got != mr.want.Value() {
|
||||
err = &ChecksumMismatchError{
|
||||
Got: got,
|
||||
Want: mr.want.Value(),
|
||||
}
|
||||
}
|
||||
|
||||
mr.c.putIdentBuf(buf)
|
||||
return
|
||||
}
|
||||
|
||||
// newMeasuredReader implements [RContext.NewMeasuredReader].
|
||||
func (c *Cache) newMeasuredReader(
|
||||
r io.ReadCloser,
|
||||
checksum unique.Handle[Checksum],
|
||||
) io.ReadCloser {
|
||||
mr := measuredReader{r: r, h: sha512.New384(), want: checksum, c: c}
|
||||
mr.hbw = c.getWriter(mr.h)
|
||||
mr.Reader = io.TeeReader(r, mr.hbw)
|
||||
return &mr
|
||||
}
|
||||
|
||||
// NewMeasuredReader returns an [io.ReadCloser] implementing behaviour required
|
||||
// by [FileArtifact]. The resulting [io.ReadCloser] holds a buffer originating
|
||||
// from [Cache] and must be closed to return this buffer.
|
||||
func (r *RContext) NewMeasuredReader(
|
||||
rc io.ReadCloser,
|
||||
checksum unique.Handle[Checksum],
|
||||
) io.ReadCloser {
|
||||
return r.cache.newMeasuredReader(rc, checksum)
|
||||
}
|
||||
|
||||
// putWriter adds bw to bufioPool.
|
||||
func (c *Cache) putWriter(bw *bufio.Writer) { c.bufioPool.Put(bw) }
|
||||
|
||||
// cure implements Cure without checking the full dependency graph.
|
||||
func (c *Cache) cure(a Artifact) (
|
||||
func (c *Cache) cure(a Artifact, curesExempt bool) (
|
||||
pathname *check.Absolute,
|
||||
checksum unique.Handle[Checksum],
|
||||
err error,
|
||||
@@ -1283,8 +1402,8 @@ func (c *Cache) cure(a Artifact) (
|
||||
}()
|
||||
}
|
||||
|
||||
// cure File outside type switch to skip TContext initialisation
|
||||
if f, ok := a.(File); ok {
|
||||
// cure FileArtifact outside type switch to skip TContext initialisation
|
||||
if f, ok := a.(FileArtifact); ok {
|
||||
if checksumFi != nil {
|
||||
if !checksumFi.Mode().IsRegular() {
|
||||
// unreachable
|
||||
@@ -1293,63 +1412,96 @@ func (c *Cache) cure(a Artifact) (
|
||||
return
|
||||
}
|
||||
|
||||
var data []byte
|
||||
data, err = f.Cure(c.ctx)
|
||||
if err != nil {
|
||||
work := c.base.Append(dirWork, ids)
|
||||
var w *os.File
|
||||
if w, err = os.OpenFile(
|
||||
work.String(),
|
||||
os.O_CREATE|os.O_EXCL|os.O_WRONLY,
|
||||
0400,
|
||||
); err != nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
closeErr := w.Close()
|
||||
if err == nil {
|
||||
err = closeErr
|
||||
}
|
||||
|
||||
if checksumPathname == nil {
|
||||
removeErr := os.Remove(work.String())
|
||||
if err == nil && !errors.Is(removeErr, os.ErrNotExist) {
|
||||
err = removeErr
|
||||
}
|
||||
}()
|
||||
|
||||
var r io.ReadCloser
|
||||
if err = c.enterCure(a, curesExempt); err != nil {
|
||||
return
|
||||
}
|
||||
r, err = f.Cure(&RContext{c})
|
||||
if err == nil {
|
||||
if checksumPathname == nil || c.IsStrict() {
|
||||
h := sha512.New384()
|
||||
h.Write(data)
|
||||
hbw := c.getWriter(h)
|
||||
_, err = io.Copy(w, io.TeeReader(r, hbw))
|
||||
flushErr := hbw.Flush()
|
||||
c.putWriter(hbw)
|
||||
if err == nil {
|
||||
err = flushErr
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
buf := c.getIdentBuf()
|
||||
h.Sum(buf[:0])
|
||||
|
||||
if checksumPathname == nil {
|
||||
checksum = unique.Make(Checksum(buf[:]))
|
||||
checksums = Encode(Checksum(buf[:]))
|
||||
c.putIdentBuf(buf)
|
||||
checksumPathname = c.base.Append(
|
||||
dirChecksum,
|
||||
checksums,
|
||||
)
|
||||
} else if c.IsStrict() {
|
||||
h := sha512.New384()
|
||||
h.Write(data)
|
||||
if got := Checksum(h.Sum(nil)); got != checksum.Value() {
|
||||
if got := Checksum(buf[:]); got != checksum.Value() {
|
||||
err = &ChecksumMismatchError{
|
||||
Got: got,
|
||||
Want: checksum.Value(),
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
c.putIdentBuf(buf)
|
||||
|
||||
if checksumPathname == nil {
|
||||
checksumPathname = c.base.Append(
|
||||
dirChecksum,
|
||||
checksums,
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_, err = io.Copy(w, r)
|
||||
}
|
||||
|
||||
closeErr := r.Close()
|
||||
if err == nil {
|
||||
err = closeErr
|
||||
}
|
||||
}
|
||||
c.exitCure(a, curesExempt)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
c.checksumMu.Lock()
|
||||
var w *os.File
|
||||
w, err = os.OpenFile(
|
||||
if err = os.Rename(
|
||||
work.String(),
|
||||
checksumPathname.String(),
|
||||
os.O_CREATE|os.O_EXCL|os.O_WRONLY,
|
||||
0400,
|
||||
)
|
||||
if err != nil {
|
||||
); err != nil {
|
||||
c.checksumMu.Unlock()
|
||||
|
||||
if errors.Is(err, os.ErrExist) {
|
||||
err = nil
|
||||
}
|
||||
return
|
||||
}
|
||||
_, err = w.Write(data)
|
||||
closeErr := w.Close()
|
||||
timeErr := zeroTimes(checksumPathname.String())
|
||||
c.checksumMu.Unlock()
|
||||
|
||||
if err == nil {
|
||||
err = timeErr
|
||||
}
|
||||
if err == nil {
|
||||
err = closeErr
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1365,7 +1517,12 @@ func (c *Cache) cure(a Artifact) (
|
||||
switch ca := a.(type) {
|
||||
case TrivialArtifact:
|
||||
defer t.destroy(&err)
|
||||
if err = ca.Cure(&t); err != nil {
|
||||
if err = c.enterCure(a, curesExempt); err != nil {
|
||||
return
|
||||
}
|
||||
err = ca.Cure(&t)
|
||||
c.exitCure(a, curesExempt)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
break
|
||||
@@ -1381,14 +1538,7 @@ func (c *Cache) cure(a Artifact) (
|
||||
var errsMu sync.Mutex
|
||||
for i, d := range deps {
|
||||
pending := pendingArtifactDep{d, &res[i], &errs, &errsMu, &wg}
|
||||
select {
|
||||
case c.cureDep <- &pending:
|
||||
break
|
||||
|
||||
case <-c.ctx.Done():
|
||||
err = c.ctx.Err()
|
||||
return
|
||||
}
|
||||
go pending.cure(c)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
@@ -1401,7 +1551,12 @@ func (c *Cache) cure(a Artifact) (
|
||||
}
|
||||
|
||||
defer f.destroy(&err)
|
||||
if err = ca.Cure(&f); err != nil {
|
||||
if err = c.enterCure(a, curesExempt); err != nil {
|
||||
return
|
||||
}
|
||||
err = ca.Cure(&f)
|
||||
c.exitCure(a, curesExempt)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
break
|
||||
@@ -1486,7 +1641,7 @@ func (pending *pendingArtifactDep) cure(c *Cache) {
|
||||
defer pending.Done()
|
||||
|
||||
var err error
|
||||
pending.resP.pathname, pending.resP.checksum, err = c.cure(pending.a)
|
||||
pending.resP.pathname, pending.resP.checksum, err = c.cure(pending.a, false)
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
@@ -1501,6 +1656,7 @@ func (c *Cache) Close() {
|
||||
c.closeOnce.Do(func() {
|
||||
c.cancel()
|
||||
c.wg.Wait()
|
||||
close(c.cures)
|
||||
c.unlock()
|
||||
})
|
||||
}
|
||||
@@ -1536,6 +1692,10 @@ func open(
|
||||
base *check.Absolute,
|
||||
lock bool,
|
||||
) (*Cache, error) {
|
||||
if cures < 1 {
|
||||
cures = runtime.NumCPU()
|
||||
}
|
||||
|
||||
for _, name := range []string{
|
||||
dirIdentifier,
|
||||
dirChecksum,
|
||||
@@ -1548,6 +1708,8 @@ func open(
|
||||
}
|
||||
|
||||
c := Cache{
|
||||
cures: make(chan struct{}, cures),
|
||||
|
||||
msg: msg,
|
||||
base: base,
|
||||
|
||||
@@ -1556,9 +1718,8 @@ func open(
|
||||
identPending: make(map[unique.Handle[ID]]<-chan struct{}),
|
||||
}
|
||||
c.ctx, c.cancel = context.WithCancel(ctx)
|
||||
cureDep := make(chan *pendingArtifactDep, cures)
|
||||
c.cureDep = cureDep
|
||||
c.identPool.New = func() any { return new(extIdent) }
|
||||
c.bufioPool.New = func() any { return new(bufio.Writer) }
|
||||
|
||||
if lock || !testing.Testing() {
|
||||
if unlock, err := lockedfile.MutexAt(
|
||||
@@ -1572,23 +1733,5 @@ func open(
|
||||
c.unlock = func() {}
|
||||
}
|
||||
|
||||
if cures < 1 {
|
||||
cures = runtime.NumCPU()
|
||||
}
|
||||
for i := 0; i < cures; i++ {
|
||||
c.wg.Go(func() {
|
||||
for {
|
||||
select {
|
||||
case <-c.ctx.Done():
|
||||
return
|
||||
|
||||
case pending := <-cureDep:
|
||||
pending.cure(&c)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return &c, nil
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"syscall"
|
||||
"testing"
|
||||
"unique"
|
||||
@@ -47,10 +48,10 @@ type overrideIdent struct {
|
||||
|
||||
func (a overrideIdent) ID() pkg.ID { return a.id }
|
||||
|
||||
// overrideIdentFile overrides the ID method of [File].
|
||||
// overrideIdentFile overrides the ID method of [FileArtifact].
|
||||
type overrideIdentFile struct {
|
||||
id pkg.ID
|
||||
pkg.File
|
||||
pkg.FileArtifact
|
||||
}
|
||||
|
||||
func (a overrideIdentFile) ID() pkg.ID { return a.id }
|
||||
@@ -61,10 +62,10 @@ type knownIdentArtifact interface {
|
||||
pkg.TrivialArtifact
|
||||
}
|
||||
|
||||
// A knownIdentFile implements [pkg.KnownIdent] and [File]
|
||||
// A knownIdentFile implements [pkg.KnownIdent] and [FileArtifact]
|
||||
type knownIdentFile interface {
|
||||
pkg.KnownIdent
|
||||
pkg.File
|
||||
pkg.FileArtifact
|
||||
}
|
||||
|
||||
// overrideChecksum overrides the Checksum method of [Artifact].
|
||||
@@ -75,7 +76,7 @@ type overrideChecksum struct {
|
||||
|
||||
func (a overrideChecksum) Checksum() pkg.Checksum { return a.checksum }
|
||||
|
||||
// overrideChecksumFile overrides the Checksum method of [File].
|
||||
// overrideChecksumFile overrides the Checksum method of [FileArtifact].
|
||||
type overrideChecksumFile struct {
|
||||
checksum pkg.Checksum
|
||||
knownIdentFile
|
||||
@@ -93,25 +94,28 @@ type stubArtifact struct {
|
||||
}
|
||||
|
||||
func (a *stubArtifact) Kind() pkg.Kind { return a.kind }
|
||||
func (a *stubArtifact) Params(ctx *pkg.IContext) { ctx.GetHash().Write(a.params) }
|
||||
func (a *stubArtifact) Params(ctx *pkg.IContext) { ctx.Write(a.params) }
|
||||
func (a *stubArtifact) Dependencies() []pkg.Artifact { return a.deps }
|
||||
func (a *stubArtifact) Cure(t *pkg.TContext) error { return a.cure(t) }
|
||||
func (*stubArtifact) IsExclusive() bool { return false }
|
||||
|
||||
// A stubArtifactF implements [FloodArtifact] with hardcoded behaviour.
|
||||
type stubArtifactF struct {
|
||||
kind pkg.Kind
|
||||
params []byte
|
||||
deps []pkg.Artifact
|
||||
excl bool
|
||||
|
||||
cure func(f *pkg.FContext) error
|
||||
}
|
||||
|
||||
func (a *stubArtifactF) Kind() pkg.Kind { return a.kind }
|
||||
func (a *stubArtifactF) Params(ctx *pkg.IContext) { ctx.GetHash().Write(a.params) }
|
||||
func (a *stubArtifactF) Params(ctx *pkg.IContext) { ctx.Write(a.params) }
|
||||
func (a *stubArtifactF) Dependencies() []pkg.Artifact { return a.deps }
|
||||
func (a *stubArtifactF) Cure(f *pkg.FContext) error { return a.cure(f) }
|
||||
func (a *stubArtifactF) IsExclusive() bool { return a.excl }
|
||||
|
||||
// A stubFile implements [File] with hardcoded behaviour.
|
||||
// A stubFile implements [FileArtifact] with hardcoded behaviour.
|
||||
type stubFile struct {
|
||||
data []byte
|
||||
err error
|
||||
@@ -119,7 +123,9 @@ type stubFile struct {
|
||||
stubArtifact
|
||||
}
|
||||
|
||||
func (a *stubFile) Cure(context.Context) ([]byte, error) { return a.data, a.err }
|
||||
func (a *stubFile) Cure(*pkg.RContext) (io.ReadCloser, error) {
|
||||
return io.NopCloser(bytes.NewReader(a.data)), a.err
|
||||
}
|
||||
|
||||
// newStubFile returns an implementation of [pkg.File] with hardcoded behaviour.
|
||||
func newStubFile(
|
||||
@@ -128,7 +134,7 @@ func newStubFile(
|
||||
sum *pkg.Checksum,
|
||||
data []byte,
|
||||
err error,
|
||||
) pkg.File {
|
||||
) pkg.FileArtifact {
|
||||
f := overrideIdentFile{id, &stubFile{data, err, stubArtifact{
|
||||
kind,
|
||||
nil,
|
||||
@@ -213,7 +219,7 @@ func TestIdent(t *testing.T) {
|
||||
},
|
||||
nil,
|
||||
}, unique.Make[pkg.ID](pkg.MustDecode(
|
||||
"HnySzeLQvSBZuTUcvfmLEX_OmH4yJWWH788NxuLuv7kVn8_uPM6Ks4rqFWM2NZJY",
|
||||
"WKErnjTOVbuH2P9a0gM4OcAAO4p-CoX2HQu7CbZrg8ZOzApvWoO3-ISzPw6av_rN",
|
||||
))},
|
||||
}
|
||||
|
||||
@@ -283,7 +289,7 @@ func checkWithCache(t *testing.T, testCases []cacheTestCase) {
|
||||
msg.SwapVerbose(testing.Verbose())
|
||||
|
||||
var scrubFunc func() error // scrub after hashing
|
||||
if c, err := pkg.Open(t.Context(), msg, 0, base); err != nil {
|
||||
if c, err := pkg.Open(t.Context(), msg, 1<<4, base); err != nil {
|
||||
t.Fatalf("Open: error = %v", err)
|
||||
} else {
|
||||
t.Cleanup(c.Close)
|
||||
@@ -526,7 +532,7 @@ func TestCache(t *testing.T) {
|
||||
kind: pkg.KindExec,
|
||||
params: []byte("artifact overridden to be incomplete"),
|
||||
}}, nil, pkg.Checksum{}, pkg.InvalidArtifactError(pkg.MustDecode(
|
||||
"da4kLKa94g1wN2M0qcKflqgf2-Y2UL36iehhczqsIIW8G0LGvM7S8jjtnBc0ftB0",
|
||||
"E__uZ1sLIvb84vzSm5Uezb03RogsiaeTt1nfIVv8TKnnf4LqwtSi-smdHhlkZrUJ",
|
||||
))},
|
||||
|
||||
{"error passthrough", newStubFile(
|
||||
@@ -561,6 +567,10 @@ func TestCache(t *testing.T) {
|
||||
stub.UniqueError
|
||||
}{UniqueError: 0xbad},
|
||||
)},
|
||||
|
||||
cure: func(f *pkg.FContext) error {
|
||||
panic("attempting to cure impossible artifact")
|
||||
},
|
||||
}, nil, pkg.Checksum{}, &pkg.DependencyCureError{
|
||||
{
|
||||
Ident: unique.Make(pkg.ID{0xff, 3}),
|
||||
@@ -944,6 +954,17 @@ func TestErrors(t *testing.T) {
|
||||
{"NoOutputError", pkg.NoOutputError{
|
||||
// empty struct
|
||||
}, "artifact cured successfully but did not produce any output"},
|
||||
|
||||
{"IRKindError", &pkg.IRKindError{
|
||||
Got: pkg.IRKindEnd,
|
||||
Want: pkg.IRKindIdent,
|
||||
Ancillary: 0xcafebabe,
|
||||
}, "got terminator IR value (0xcafebabe) instead of ident"},
|
||||
{"IRKindError invalid", &pkg.IRKindError{
|
||||
Got: 0xbeef,
|
||||
Want: pkg.IRKindIdent,
|
||||
Ancillary: 0xcafe,
|
||||
}, "got invalid kind 48879 IR value (0xcafe) instead of ident"},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -1128,6 +1149,40 @@ func TestDependencyCureError(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// earlyFailureF is a [FloodArtifact] with a large dependency graph resulting in
|
||||
// a large [DependencyCureError].
|
||||
type earlyFailureF int
|
||||
|
||||
func (earlyFailureF) Kind() pkg.Kind { return pkg.KindExec }
|
||||
func (earlyFailureF) Params(*pkg.IContext) {}
|
||||
func (earlyFailureF) IsExclusive() bool { return false }
|
||||
|
||||
func (a earlyFailureF) Dependencies() []pkg.Artifact {
|
||||
deps := make([]pkg.Artifact, a)
|
||||
for i := range deps {
|
||||
deps[i] = a - 1
|
||||
}
|
||||
return deps
|
||||
}
|
||||
|
||||
func (a earlyFailureF) Cure(*pkg.FContext) error {
|
||||
if a != 0 {
|
||||
panic("unexpected cure on " + strconv.Itoa(int(a)))
|
||||
}
|
||||
return stub.UniqueError(0xcafe)
|
||||
}
|
||||
|
||||
func TestDependencyCureErrorEarly(t *testing.T) {
|
||||
checkWithCache(t, []cacheTestCase{
|
||||
{"early", nil, func(t *testing.T, _ *check.Absolute, c *pkg.Cache) {
|
||||
_, _, err := c.Cure(earlyFailureF(8))
|
||||
if !errors.Is(err, stub.UniqueError(0xcafe)) {
|
||||
t.Fatalf("Cure: error = %v", err)
|
||||
}
|
||||
}, pkg.MustDecode("E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C")},
|
||||
})
|
||||
}
|
||||
|
||||
func TestNew(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"archive/tar"
|
||||
"compress/bzip2"
|
||||
"compress/gzip"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -24,12 +23,12 @@ const (
|
||||
TarBzip2
|
||||
)
|
||||
|
||||
// A tarArtifact is an [Artifact] unpacking a tarball backed by a [File].
|
||||
// A tarArtifact is an [Artifact] unpacking a tarball backed by a [FileArtifact].
|
||||
type tarArtifact struct {
|
||||
// Caller-supplied backing tarball.
|
||||
f Artifact
|
||||
// Compression on top of the tarball.
|
||||
compression uint64
|
||||
compression uint32
|
||||
}
|
||||
|
||||
// tarArtifactNamed embeds tarArtifact for a [fmt.Stringer] tarball.
|
||||
@@ -47,7 +46,7 @@ func (a *tarArtifactNamed) String() string { return a.name + "-unpack" }
|
||||
// NewTar returns a new [Artifact] backed by the supplied [Artifact] and
|
||||
// compression method. The source [Artifact] must be compatible with
|
||||
// [TContext.Open].
|
||||
func NewTar(a Artifact, compression uint64) Artifact {
|
||||
func NewTar(a Artifact, compression uint32) Artifact {
|
||||
ta := tarArtifact{a, compression}
|
||||
if s, ok := a.(fmt.Stringer); ok {
|
||||
if name := s.String(); name != "" {
|
||||
@@ -62,7 +61,7 @@ func NewHTTPGetTar(
|
||||
hc *http.Client,
|
||||
url string,
|
||||
checksum Checksum,
|
||||
compression uint64,
|
||||
compression uint32,
|
||||
) Artifact {
|
||||
return NewTar(NewHTTPGet(hc, url, checksum), compression)
|
||||
}
|
||||
@@ -71,8 +70,16 @@ func NewHTTPGetTar(
|
||||
func (a *tarArtifact) Kind() Kind { return KindTar }
|
||||
|
||||
// Params writes compression encoded in little endian.
|
||||
func (a *tarArtifact) Params(ctx *IContext) {
|
||||
ctx.GetHash().Write(binary.LittleEndian.AppendUint64(nil, a.compression))
|
||||
func (a *tarArtifact) Params(ctx *IContext) { ctx.WriteUint32(a.compression) }
|
||||
|
||||
func init() {
|
||||
register(KindTar, func(r *IRReader) Artifact {
|
||||
a := NewTar(r.Next(), r.ReadUint32())
|
||||
if _, ok := r.Finalise(); ok {
|
||||
panic(ErrUnexpectedChecksum)
|
||||
}
|
||||
return a
|
||||
})
|
||||
}
|
||||
|
||||
// Dependencies returns a slice containing the backing file.
|
||||
@@ -80,6 +87,9 @@ func (a *tarArtifact) Dependencies() []Artifact {
|
||||
return []Artifact{a.f}
|
||||
}
|
||||
|
||||
// IsExclusive returns false: decompressor and tar reader are fully sequential.
|
||||
func (a *tarArtifact) IsExclusive() bool { return false }
|
||||
|
||||
// A DisallowedTypeflagError describes a disallowed typeflag encountered while
|
||||
// unpacking a tarball.
|
||||
type DisallowedTypeflagError byte
|
||||
@@ -97,12 +107,11 @@ func (a *tarArtifact) Cure(t *TContext) (err error) {
|
||||
}
|
||||
|
||||
defer func(f io.ReadCloser) {
|
||||
closeErr := tr.Close()
|
||||
if err == nil {
|
||||
err = closeErr
|
||||
err = tr.Close()
|
||||
}
|
||||
|
||||
closeErr = f.Close()
|
||||
closeErr := f.Close()
|
||||
if err == nil {
|
||||
err = closeErr
|
||||
}
|
||||
@@ -175,7 +184,10 @@ func (a *tarArtifact) Cure(t *TContext) (err error) {
|
||||
break
|
||||
|
||||
case tar.TypeLink:
|
||||
if err = os.Link(header.Linkname, pathname.String()); err != nil {
|
||||
if err = os.Link(
|
||||
temp.Append(header.Linkname).String(),
|
||||
pathname.String(),
|
||||
); err != nil {
|
||||
return
|
||||
}
|
||||
break
|
||||
|
||||
@@ -40,7 +40,7 @@ func TestTar(t *testing.T) {
|
||||
}, pkg.MustDecode(
|
||||
"cTw0h3AmYe7XudSoyEMByduYXqGi-N5ZkTZ0t9K5elsu3i_jNIVF5T08KR1roBFM",
|
||||
))
|
||||
}, pkg.MustDecode("sxbgyX-bPoezbha214n2lbQhiVfTUBkhZ0EX6zI7mmkMdrCdwuMwhMBJphLQsy94")},
|
||||
}, pkg.MustDecode("NQTlc466JmSVLIyWklm_u8_g95jEEb98PxJU-kjwxLpfdjwMWJq0G8ze9R4Vo1Vu")},
|
||||
|
||||
{"http expand", nil, func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
|
||||
checkTarHTTP(t, base, c, fstest.MapFS{
|
||||
@@ -51,7 +51,7 @@ func TestTar(t *testing.T) {
|
||||
}, pkg.MustDecode(
|
||||
"CH3AiUrCCcVOjOYLaMKKK1Da78989JtfHeIsxMzWOQFiN4mrCLDYpoDxLWqJWCUN",
|
||||
))
|
||||
}, pkg.MustDecode("4I8wx_h7NSJTlG5lbuz-GGEXrOg0GYC3M_503LYEBhv5XGWXfNIdIY9Q3eVSYldX")},
|
||||
}, pkg.MustDecode("hSoSSgCYTNonX3Q8FjvjD1fBl-E-BQyA6OTXro2OadXqbST4tZ-akGXszdeqphRe")},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -98,14 +98,37 @@ func checkTarHTTP(
|
||||
|
||||
wantIdent := func() pkg.ID {
|
||||
h := sha512.New384()
|
||||
h.Write([]byte{byte(pkg.KindTar), 0, 0, 0, 0, 0, 0, 0})
|
||||
h.Write([]byte{pkg.TarGzip, 0, 0, 0, 0, 0, 0, 0})
|
||||
h.Write([]byte{byte(pkg.KindHTTPGet), 0, 0, 0, 0, 0, 0, 0})
|
||||
|
||||
// kind uint64
|
||||
h.Write([]byte{byte(pkg.KindTar), 0, 0, 0, 0, 0, 0, 0})
|
||||
// deps_sz uint64
|
||||
h.Write([]byte{1, 0, 0, 0, 0, 0, 0, 0})
|
||||
|
||||
// kind uint64
|
||||
h.Write([]byte{byte(pkg.KindHTTPGet), 0, 0, 0, 0, 0, 0, 0})
|
||||
// ident ID
|
||||
h0 := sha512.New384()
|
||||
// kind uint64
|
||||
h0.Write([]byte{byte(pkg.KindHTTPGet), 0, 0, 0, 0, 0, 0, 0})
|
||||
// deps_sz uint64
|
||||
h0.Write([]byte{0, 0, 0, 0, 0, 0, 0, 0})
|
||||
// url string
|
||||
h0.Write([]byte{byte(pkg.IRKindString), 0, 0, 0})
|
||||
h0.Write([]byte{0x10, 0, 0, 0})
|
||||
h0.Write([]byte("file:///testdata"))
|
||||
// end(KnownChecksum)
|
||||
h0.Write([]byte{byte(pkg.IRKindEnd), 0, 0, 0})
|
||||
h0.Write([]byte{byte(pkg.IREndKnownChecksum), 0, 0, 0})
|
||||
// checksum Checksum
|
||||
h0.Write(testdataChecksum[:])
|
||||
h.Write(h0.Sum(nil))
|
||||
// compression uint32
|
||||
h.Write([]byte{byte(pkg.IRKindUint32), 0, 0, 0})
|
||||
h.Write([]byte{pkg.TarGzip, 0, 0, 0})
|
||||
// end
|
||||
h.Write([]byte{byte(pkg.IRKindEnd), 0, 0, 0})
|
||||
h.Write([]byte{0, 0, 0, 0})
|
||||
|
||||
return pkg.ID(h.Sum(nil))
|
||||
}()
|
||||
|
||||
|
||||
12
internal/pkg/testdata/main.go
vendored
12
internal/pkg/testdata/main.go
vendored
@@ -142,12 +142,12 @@ func main() {
|
||||
}
|
||||
|
||||
const checksumEmptyDir = "MGWmEfjut2QE2xPJwTsmUzpff4BN_FEnQ7T0j7gvUCCiugJQNwqt9m151fm9D1yU"
|
||||
ident := "U2cbgVgEtjfRuvHfE1cQnZ3t8yoexULQyo_VLgvxAVJSsobMcNaFIsuDWtmt7kzK"
|
||||
ident := "dztPS6jRjiZtCF4_p8AzfnxGp6obkhrgFVsxdodbKWUoAEVtDz3MykepJB4kI_ks"
|
||||
log.Println(m)
|
||||
next := func() { m = m.Next; log.Println(m) }
|
||||
|
||||
if overlayRoot {
|
||||
ident = "5ey2wpmMpj483YYa7ZZQciYLA2cx3_l167JCqWW4Pd-5DVp81dj9EsBtVTwYptF6"
|
||||
ident = "RdMA-mubnrHuu3Ky1wWyxauSYCO0ZH_zCPUj3uDHqkfwv5sGcByoF_g5PjlGiClb"
|
||||
|
||||
if m.Root != "/" || m.Target != "/" ||
|
||||
m.Source != "overlay" || m.FsType != "overlay" {
|
||||
@@ -165,7 +165,7 @@ func main() {
|
||||
log.Fatal("unexpected artifact checksum")
|
||||
}
|
||||
} else {
|
||||
ident = "tfjrsVuBuFgzWgwz-yPppFtylYuC1VFWnKhyBiHbWTGkyz8lt7Ee9QXWaIHPXs4x"
|
||||
ident = "p1t_drXr34i-jZNuxDMLaMOdL6tZvQqhavNafGynGqxOZoXAUTSn7kqNh3Ovv3DT"
|
||||
|
||||
lowerdirsEscaped := strings.Split(lowerdir, ":")
|
||||
lowerdirs := lowerdirsEscaped[:0]
|
||||
@@ -194,7 +194,7 @@ func main() {
|
||||
}
|
||||
} else {
|
||||
if hostNet {
|
||||
ident = "QdsJhGgnk5N2xdUNGcndXQxFKifxf1V_2t9X8CQ-pDcg24x6mGJC_BiLfGbs6Qml"
|
||||
ident = "G8qPxD9puvvoOVV7lrT80eyDeIl3G_CCFoKw12c8mCjMdG1zF7NEPkwYpNubClK3"
|
||||
}
|
||||
|
||||
if m.Root != "/sysroot" || m.Target != "/" {
|
||||
@@ -213,14 +213,14 @@ func main() {
|
||||
}
|
||||
|
||||
if promote {
|
||||
ident = "O-6VjlIUxc4PYLf5v35uhIeL8kkYCbHYklqlmDjFPXe0m4j6GkUDg5qwTzBRESnf"
|
||||
ident = "xXTIYcXmgJWNLC91c417RRrNM9cjELwEZHpGvf8Fk_GNP5agRJp_SicD0w9aMeLJ"
|
||||
}
|
||||
|
||||
next() // testtool artifact
|
||||
|
||||
next()
|
||||
if overlayWork {
|
||||
ident = "acaDzHZv40dZaz4cGAXayqbRMgbEOuiuiUijZL8IgDQvyeCNMFE3onBMYfny-kXA"
|
||||
ident = "5hlaukCirnXE4W_RSLJFOZN47Z5RiHnacXzdFp_70cLgiJUGR6cSb_HaFftkzi0-"
|
||||
if m.Root != "/" || m.Target != "/work" ||
|
||||
m.Source != "overlay" || m.FsType != "overlay" {
|
||||
log.Fatal("unexpected work mount entry")
|
||||
|
||||
90
internal/rosa/acl.go
Normal file
90
internal/rosa/acl.go
Normal file
@@ -0,0 +1,90 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newAttr() pkg.Artifact {
|
||||
const (
|
||||
version = "2.5.2"
|
||||
checksum = "YWEphrz6vg1sUMmHHVr1CRo53pFXRhq_pjN-AlG8UgwZK1y6m7zuDhxqJhD0SV0l"
|
||||
)
|
||||
return t.NewViaMake("attr", version, t.NewPatchedSource(
|
||||
"attr", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://download.savannah.nongnu.org/releases/attr/"+
|
||||
"attr-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), true, [2]string{"libgen-basename", `From 8a80d895dfd779373363c3a4b62ecce5a549efb2 Mon Sep 17 00:00:00 2001
|
||||
From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me>
|
||||
Date: Sat, 30 Mar 2024 10:17:10 +0100
|
||||
Subject: tools/attr.c: Add missing libgen.h include for basename(3)
|
||||
|
||||
Fixes compilation issue with musl and modern C99 compilers.
|
||||
|
||||
See: https://bugs.gentoo.org/926294
|
||||
---
|
||||
tools/attr.c | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/tools/attr.c b/tools/attr.c
|
||||
index f12e4af..6a3c1e9 100644
|
||||
--- a/tools/attr.c
|
||||
+++ b/tools/attr.c
|
||||
@@ -28,6 +28,7 @@
|
||||
#include <errno.h>
|
||||
#include <string.h>
|
||||
#include <locale.h>
|
||||
+#include <libgen.h>
|
||||
|
||||
#include <attr/attributes.h>
|
||||
|
||||
--
|
||||
cgit v1.1`}, [2]string{"musl-errno", `diff --git a/test/attr.test b/test/attr.test
|
||||
index 6ce2f9b..e9bde92 100644
|
||||
--- a/test/attr.test
|
||||
+++ b/test/attr.test
|
||||
@@ -11,7 +11,7 @@ Try various valid and invalid names
|
||||
|
||||
$ touch f
|
||||
$ setfattr -n user -v value f
|
||||
- > setfattr: f: Operation not supported
|
||||
+ > setfattr: f: Not supported
|
||||
|
||||
$ setfattr -n user. -v value f
|
||||
> setfattr: f: Invalid argument
|
||||
`},
|
||||
), &MakeAttr{
|
||||
ScriptEarly: `
|
||||
ln -s ../../system/bin/perl /usr/bin
|
||||
`,
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(Perl),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Attr] = Toolchain.newAttr }
|
||||
|
||||
func (t Toolchain) newACL() pkg.Artifact {
|
||||
const (
|
||||
version = "2.3.2"
|
||||
checksum = "-fY5nwH4K8ZHBCRXrzLdguPkqjKI6WIiGu4dBtrZ1o0t6AIU73w8wwJz_UyjIS0P"
|
||||
)
|
||||
return t.NewViaMake("acl", version, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://download.savannah.nongnu.org/releases/acl/"+
|
||||
"acl-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
|
||||
// makes assumptions about uid_map/gid_map
|
||||
SkipCheck: true,
|
||||
},
|
||||
t.Load(Attr),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[ACL] = Toolchain.newACL }
|
||||
175
internal/rosa/all.go
Normal file
175
internal/rosa/all.go
Normal file
@@ -0,0 +1,175 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
// PArtifact is a lazily-initialised [pkg.Artifact] preset.
|
||||
type PArtifact int
|
||||
|
||||
const (
|
||||
ACL PArtifact = iota
|
||||
Attr
|
||||
Autoconf
|
||||
Automake
|
||||
Bash
|
||||
Binutils
|
||||
CMake
|
||||
Coreutils
|
||||
Curl
|
||||
Diffutils
|
||||
Findutils
|
||||
Fuse
|
||||
Gawk
|
||||
GMP
|
||||
Gettext
|
||||
Git
|
||||
Go
|
||||
Gperf
|
||||
Grep
|
||||
Gzip
|
||||
Hakurei
|
||||
HakureiDist
|
||||
IniConfig
|
||||
KernelHeaders
|
||||
LibXau
|
||||
Libexpat
|
||||
Libpsl
|
||||
Libffi
|
||||
Libgd
|
||||
Libtool
|
||||
Libseccomp
|
||||
Libucontext
|
||||
Libxml2
|
||||
M4
|
||||
MPC
|
||||
MPFR
|
||||
Make
|
||||
Meson
|
||||
Mksh
|
||||
NSS
|
||||
NSSCACert
|
||||
Ninja
|
||||
OpenSSL
|
||||
Packaging
|
||||
Patch
|
||||
Perl
|
||||
PkgConfig
|
||||
Pluggy
|
||||
PyTest
|
||||
Pygments
|
||||
Python
|
||||
Rsync
|
||||
Sed
|
||||
Setuptools
|
||||
Toybox
|
||||
toyboxEarly
|
||||
Unzip
|
||||
utilMacros
|
||||
Wayland
|
||||
WaylandProtocols
|
||||
XCB
|
||||
XCBProto
|
||||
Xproto
|
||||
XZ
|
||||
Zlib
|
||||
|
||||
buildcatrust
|
||||
|
||||
// gcc is a hacked-to-pieces GCC toolchain meant for use in intermediate
|
||||
// stages only. This preset and its direct output must never be exposed.
|
||||
gcc
|
||||
|
||||
// _presetEnd is the total number of presets and does not denote a preset.
|
||||
_presetEnd
|
||||
)
|
||||
|
||||
var (
|
||||
// artifactsF is an array of functions for the result of [PArtifact].
|
||||
artifactsF [_presetEnd]func(t Toolchain) pkg.Artifact
|
||||
|
||||
// artifacts stores the result of artifactsF.
|
||||
artifacts [_toolchainEnd][len(artifactsF)]pkg.Artifact
|
||||
// artifactsOnce is for lazy initialisation of artifacts.
|
||||
artifactsOnce [_toolchainEnd][len(artifactsF)]sync.Once
|
||||
)
|
||||
|
||||
// Load returns the resulting [pkg.Artifact] of [PArtifact].
|
||||
func (t Toolchain) Load(p PArtifact) pkg.Artifact {
|
||||
artifactsOnce[t][p].Do(func() {
|
||||
artifacts[t][p] = artifactsF[p](t)
|
||||
})
|
||||
return artifacts[t][p]
|
||||
}
|
||||
|
||||
// ResolveName returns a [PArtifact] by name.
|
||||
func ResolveName(name string) (p PArtifact, ok bool) {
|
||||
p, ok = map[string]PArtifact{
|
||||
"acl": ACL,
|
||||
"attr": Attr,
|
||||
"autoconf": Autoconf,
|
||||
"automake": Automake,
|
||||
"bash": Bash,
|
||||
"binutils": Binutils,
|
||||
"cmake": CMake,
|
||||
"coreutils": Coreutils,
|
||||
"curl": Curl,
|
||||
"diffutils": Diffutils,
|
||||
"findutils": Findutils,
|
||||
"fuse": Fuse,
|
||||
"gawk": Gawk,
|
||||
"gmp": GMP,
|
||||
"gettext": Gettext,
|
||||
"git": Git,
|
||||
"go": Go,
|
||||
"gperf": Gperf,
|
||||
"grep": Grep,
|
||||
"gzip": Gzip,
|
||||
"hakurei": Hakurei,
|
||||
"hakurei-dist": HakureiDist,
|
||||
"iniconfig": IniConfig,
|
||||
"kernel-headers": KernelHeaders,
|
||||
"libXau": LibXau,
|
||||
"libexpat": Libexpat,
|
||||
"libpsl": Libpsl,
|
||||
"libseccomp": Libseccomp,
|
||||
"libucontext": Libucontext,
|
||||
"libxml2": Libxml2,
|
||||
"libffi": Libffi,
|
||||
"libgd": Libgd,
|
||||
"libtool": Libtool,
|
||||
"m4": M4,
|
||||
"mpc": MPC,
|
||||
"mpfr": MPFR,
|
||||
"make": Make,
|
||||
"meson": Meson,
|
||||
"mksh": Mksh,
|
||||
"nss": NSS,
|
||||
"nss-cacert": NSSCACert,
|
||||
"ninja": Ninja,
|
||||
"openssl": OpenSSL,
|
||||
"packaging": Packaging,
|
||||
"patch": Patch,
|
||||
"perl": Perl,
|
||||
"pkg-config": PkgConfig,
|
||||
"pluggy": Pluggy,
|
||||
"pytest": PyTest,
|
||||
"pygments": Pygments,
|
||||
"python": Python,
|
||||
"rsync": Rsync,
|
||||
"sed": Sed,
|
||||
"setuptools": Setuptools,
|
||||
"toybox": Toybox,
|
||||
"unzip": Unzip,
|
||||
"wayland": Wayland,
|
||||
"wayland-protocols": WaylandProtocols,
|
||||
"xcb": XCB,
|
||||
"xcb-proto": XCBProto,
|
||||
"xproto": Xproto,
|
||||
"xz": XZ,
|
||||
"zlib": Zlib,
|
||||
}[name]
|
||||
return
|
||||
}
|
||||
@@ -5,8 +5,7 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"hakurei.app/container/fhs"
|
||||
@@ -16,7 +15,7 @@ import (
|
||||
// busyboxBin is a busybox binary distribution installed under bin/busybox.
|
||||
type busyboxBin struct {
|
||||
// Underlying busybox binary.
|
||||
bin pkg.File
|
||||
bin pkg.FileArtifact
|
||||
}
|
||||
|
||||
// Kind returns the hardcoded [pkg.Kind] value.
|
||||
@@ -25,11 +24,24 @@ func (a busyboxBin) Kind() pkg.Kind { return kindBusyboxBin }
|
||||
// Params is a noop.
|
||||
func (a busyboxBin) Params(*pkg.IContext) {}
|
||||
|
||||
// IsExclusive returns false: Cure performs a trivial filesystem write.
|
||||
func (busyboxBin) IsExclusive() bool { return false }
|
||||
|
||||
// Dependencies returns the underlying busybox [pkg.File].
|
||||
func (a busyboxBin) Dependencies() []pkg.Artifact {
|
||||
return []pkg.Artifact{a.bin}
|
||||
}
|
||||
|
||||
func init() {
|
||||
pkg.Register(kindBusyboxBin, func(r *pkg.IRReader) pkg.Artifact {
|
||||
a := busyboxBin{r.Next().(pkg.FileArtifact)}
|
||||
if _, ok := r.Finalise(); ok {
|
||||
panic(pkg.ErrUnexpectedChecksum)
|
||||
}
|
||||
return a
|
||||
})
|
||||
}
|
||||
|
||||
// String returns the reporting name of the underlying file prefixed with expand.
|
||||
func (a busyboxBin) String() string {
|
||||
return "expand-" + a.bin.(fmt.Stringer).String()
|
||||
@@ -75,12 +87,26 @@ func (a busyboxBin) Cure(t *pkg.TContext) (err error) {
|
||||
// newBusyboxBin returns a [pkg.Artifact] containing a busybox installation from
|
||||
// the https://busybox.net/downloads/binaries/ binary release.
|
||||
func newBusyboxBin() pkg.Artifact {
|
||||
const (
|
||||
var version, url, checksum string
|
||||
switch runtime.GOARCH {
|
||||
case "amd64":
|
||||
version = "1.35.0"
|
||||
url = "https://busybox.net/downloads/binaries/" +
|
||||
version + "-" + linuxArch() + "-linux-musl/busybox"
|
||||
checksum = "L7OBIsPu9enNHn7FqpBT1kOg_mCLNmetSeNMA3i4Y60Z5jTgnlX3qX3zcQtLx5AB"
|
||||
)
|
||||
case "arm64":
|
||||
version = "1.31.0"
|
||||
url = "https://busybox.net/downloads/binaries/" +
|
||||
version + "-defconfig-multiarch-musl/busybox-armv8l"
|
||||
checksum = "npJjBO7iwhjW6Kx2aXeSxf8kXhVgTCDChOZTTsI8ZfFfa3tbsklxRiidZQdrVERg"
|
||||
|
||||
default:
|
||||
panic("unsupported target " + runtime.GOARCH)
|
||||
}
|
||||
|
||||
return pkg.NewExec(
|
||||
"busybox-bin-"+version, nil, pkg.ExecTimeoutMax, fhs.AbsRoot, []string{
|
||||
"busybox-bin-"+version, nil, pkg.ExecTimeoutMax, false,
|
||||
fhs.AbsRoot, []string{
|
||||
"PATH=/system/bin",
|
||||
},
|
||||
AbsSystem.Append("bin", "busybox"),
|
||||
@@ -92,269 +118,8 @@ func newBusyboxBin() pkg.Artifact {
|
||||
&http.Client{Transport: &http.Transport{
|
||||
// busybox website is really slow to respond
|
||||
TLSHandshakeTimeout: 2 * time.Minute,
|
||||
}},
|
||||
"https://busybox.net/downloads/binaries/"+
|
||||
version+"-"+linuxArch()+"-linux-musl/busybox",
|
||||
}}, url,
|
||||
mustDecode(checksum),
|
||||
)}),
|
||||
)
|
||||
}
|
||||
|
||||
// NewBusybox returns a [pkg.Artifact] containing a dynamically linked busybox
|
||||
// installation usable within the [Toolchain] it is compiled against.
|
||||
func (t Toolchain) NewBusybox() pkg.Artifact {
|
||||
const (
|
||||
version = "1.37.0"
|
||||
checksum = "Ial94Tnt7esJ_YEeb0AxunVL6MGYFyOw7Rtu2o87CXCi1TLrc6rlznVsN1rZk7it"
|
||||
)
|
||||
|
||||
extra := []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
t.NewKernelHeaders(),
|
||||
}
|
||||
var env []string
|
||||
|
||||
if t == toolchainStage3 {
|
||||
extra = nil
|
||||
env = append(env, "EXTRA_LDFLAGS=-static")
|
||||
}
|
||||
|
||||
return t.New("busybox-"+version, extra, nil, slices.Concat([]string{
|
||||
"ROSA_BUSYBOX_ENABLE=" + strings.Join([]string{
|
||||
"STATIC",
|
||||
"PIE",
|
||||
}, " "),
|
||||
"ROSA_BUSYBOX_DISABLE=" + strings.Join([]string{
|
||||
"FEATURE_IPV6",
|
||||
"FEATURE_PREFER_IPV4_ADDRESS",
|
||||
"FEATURE_HWIB",
|
||||
"ARP",
|
||||
"ARPING",
|
||||
"BRCTL",
|
||||
"FEATURE_BRCTL_FANCY",
|
||||
"FEATURE_BRCTL_SHOW",
|
||||
"DNSD",
|
||||
"ETHER_WAKE",
|
||||
"FTPD",
|
||||
"FEATURE_FTPD_WRITE",
|
||||
"FEATURE_FTPD_ACCEPT_BROKEN_LIST",
|
||||
"FEATURE_FTPD_AUTHENTICATION",
|
||||
"FTPGET",
|
||||
"FTPPUT",
|
||||
"FEATURE_FTPGETPUT_LONG_OPTIONS",
|
||||
"HOSTNAME",
|
||||
"DNSDOMAINNAME",
|
||||
"HTTPD",
|
||||
"FEATURE_HTTPD_PORT_DEFAULT",
|
||||
"FEATURE_HTTPD_RANGES",
|
||||
"FEATURE_HTTPD_SETUID",
|
||||
"FEATURE_HTTPD_BASIC_AUTH",
|
||||
"FEATURE_HTTPD_AUTH_MD5",
|
||||
"FEATURE_HTTPD_CGI",
|
||||
"FEATURE_HTTPD_CONFIG_WITH_SCRIPT_INTERPR",
|
||||
"FEATURE_HTTPD_SET_REMOTE_PORT_TO_ENV",
|
||||
"FEATURE_HTTPD_ENCODE_URL_STR",
|
||||
"FEATURE_HTTPD_ERROR_PAGES",
|
||||
"FEATURE_HTTPD_PROXY",
|
||||
"FEATURE_HTTPD_GZIP",
|
||||
"FEATURE_HTTPD_ETAG",
|
||||
"FEATURE_HTTPD_LAST_MODIFIED",
|
||||
"FEATURE_HTTPD_DATE",
|
||||
"FEATURE_HTTPD_ACL_IP",
|
||||
"IFCONFIG",
|
||||
"FEATURE_IFCONFIG_STATUS",
|
||||
"FEATURE_IFCONFIG_SLIP",
|
||||
"FEATURE_IFCONFIG_MEMSTART_IOADDR_IRQ",
|
||||
"FEATURE_IFCONFIG_HW",
|
||||
"FEATURE_IFCONFIG_BROADCAST_PLUS",
|
||||
"IFENSLAVE",
|
||||
"IFPLUGD",
|
||||
"IFUP",
|
||||
"IFDOWN",
|
||||
"IFUPDOWN_IFSTATE_PATH",
|
||||
"FEATURE_IFUPDOWN_IP",
|
||||
"FEATURE_IFUPDOWN_IPV4",
|
||||
"FEATURE_IFUPDOWN_IPV6",
|
||||
"FEATURE_IFUPDOWN_MAPPING",
|
||||
"INETD",
|
||||
"FEATURE_INETD_SUPPORT_BUILTIN_ECHO",
|
||||
"FEATURE_INETD_SUPPORT_BUILTIN_DISCARD",
|
||||
"FEATURE_INETD_SUPPORT_BUILTIN_TIME",
|
||||
"FEATURE_INETD_SUPPORT_BUILTIN_DAYTIME",
|
||||
"FEATURE_INETD_SUPPORT_BUILTIN_CHARGEN",
|
||||
"IP",
|
||||
"IPADDR",
|
||||
"IPLINK",
|
||||
"IPROUTE",
|
||||
"IPTUNNEL",
|
||||
"IPRULE",
|
||||
"IPNEIGH",
|
||||
"FEATURE_IP_ADDRESS",
|
||||
"FEATURE_IP_LINK",
|
||||
"FEATURE_IP_LINK_CAN",
|
||||
"FEATURE_IP_ROUTE",
|
||||
"FEATURE_IP_ROUTE_DIR",
|
||||
"FEATURE_IP_TUNNEL",
|
||||
"FEATURE_IP_RULE",
|
||||
"FEATURE_IP_NEIGH",
|
||||
"IPCALC",
|
||||
"FEATURE_IPCALC_LONG_OPTIONS",
|
||||
"FEATURE_IPCALC_FANCY",
|
||||
"FAKEIDENTD",
|
||||
"NAMEIF",
|
||||
"FEATURE_NAMEIF_EXTENDED",
|
||||
"NBDCLIENT",
|
||||
"NC",
|
||||
"NC_SERVER",
|
||||
"NC_EXTRA",
|
||||
"NC_110_COMPAT",
|
||||
"NETSTAT",
|
||||
"FEATURE_NETSTAT_WIDE",
|
||||
"FEATURE_NETSTAT_PRG",
|
||||
"NSLOOKUP",
|
||||
"FEATURE_NSLOOKUP_BIG",
|
||||
"FEATURE_NSLOOKUP_LONG_OPTIONS",
|
||||
"NTPD",
|
||||
"FEATURE_NTPD_SERVER",
|
||||
"FEATURE_NTPD_CONF",
|
||||
"FEATURE_NTP_AUTH",
|
||||
"PING",
|
||||
"PING6",
|
||||
"FEATURE_FANCY_PING",
|
||||
"PSCAN",
|
||||
"ROUTE",
|
||||
"SLATTACH",
|
||||
"SSL_CLIENT",
|
||||
"TC",
|
||||
"FEATURE_TC_INGRESS",
|
||||
"TCPSVD",
|
||||
"UDPSVD",
|
||||
"TELNET",
|
||||
"FEATURE_TELNET_TTYPE",
|
||||
"FEATURE_TELNET_AUTOLOGIN",
|
||||
"FEATURE_TELNET_WIDTH",
|
||||
"TELNETD",
|
||||
"FEATURE_TELNETD_STANDALONE",
|
||||
"FEATURE_TELNETD_PORT_DEFAULT",
|
||||
"FEATURE_TELNETD_INETD_WAIT",
|
||||
"TFTP",
|
||||
"FEATURE_TFTP_PROGRESS_BAR",
|
||||
"FEATURE_TFTP_HPA_COMPAT",
|
||||
"TFTPD",
|
||||
"FEATURE_TFTP_GET",
|
||||
"FEATURE_TFTP_PUT",
|
||||
"FEATURE_TFTP_BLOCKSIZE",
|
||||
"TLS",
|
||||
"TRACEROUTE",
|
||||
"TRACEROUTE6",
|
||||
"FEATURE_TRACEROUTE_VERBOSE",
|
||||
"FEATURE_TRACEROUTE_USE_ICMP",
|
||||
"TUNCTL",
|
||||
"FEATURE_TUNCTL_UG",
|
||||
"VCONFIG",
|
||||
"WGET",
|
||||
"FEATURE_WGET_LONG_OPTIONS",
|
||||
"FEATURE_WGET_STATUSBAR",
|
||||
"FEATURE_WGET_FTP",
|
||||
"FEATURE_WGET_AUTHENTICATION",
|
||||
"FEATURE_WGET_TIMEOUT",
|
||||
"FEATURE_WGET_HTTPS",
|
||||
"FEATURE_WGET_OPENSSL",
|
||||
"WHOIS",
|
||||
"ZCIP",
|
||||
"UDHCPD",
|
||||
"FEATURE_UDHCPD_BOOTP",
|
||||
"FEATURE_UDHCPD_WRITE_LEASES_EARLY",
|
||||
"DHCPD_LEASES_FILE",
|
||||
"DUMPLEASES",
|
||||
"DHCPRELAY",
|
||||
"UDHCPC",
|
||||
"FEATURE_UDHCPC_ARPING",
|
||||
"FEATURE_UDHCPC_SANITIZEOPT",
|
||||
"UDHCPC_DEFAULT_SCRIPT",
|
||||
"UDHCPC6_DEFAULT_SCRIPT",
|
||||
"UDHCPC6",
|
||||
"FEATURE_UDHCPC6_RFC3646",
|
||||
"FEATURE_UDHCPC6_RFC4704",
|
||||
"FEATURE_UDHCPC6_RFC4833",
|
||||
"FEATURE_UDHCPC6_RFC5970",
|
||||
}, " "),
|
||||
}, env), `
|
||||
config_enable() {
|
||||
for ent in "$@"; do
|
||||
sed "s/^# CONFIG_${ent}.*/CONFIG_${ent}=y/" -i .config
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
config_disable() {
|
||||
for ent in "$@"; do
|
||||
sed "s/^CONFIG_${ent}=y/# CONFIG_${ent} is not set/" -i .config
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
cat > /bin/gcc << EOF
|
||||
exec clang \
|
||||
-Wno-ignored-optimization-argument \
|
||||
${ROSA_CFLAGS} \
|
||||
${LDFLAGS} \
|
||||
\$@
|
||||
EOF
|
||||
chmod +x /bin/gcc
|
||||
|
||||
cd /usr/src/busybox
|
||||
chmod +w editors editors/awk.c
|
||||
patch -p 1 < /usr/src/patches/awk-fix-literal-backslash.patch
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
make \
|
||||
KBUILD_SRC=/usr/src/busybox \
|
||||
-f /usr/src/busybox/Makefile \
|
||||
defconfig
|
||||
|
||||
config_enable $ROSA_BUSYBOX_ENABLE
|
||||
config_disable $ROSA_BUSYBOX_DISABLE
|
||||
ln -s ../system/bin/pwd /bin/pwd || true
|
||||
make CFLAGS_busybox="${LDFLAGS} ${EXTRA_LDFLAGS}" "-j$(nproc)"
|
||||
|
||||
mkdir -p /system/bin/ /work/bin/
|
||||
cp busybox /system/bin/
|
||||
|
||||
mkdir -pv /work/system/bin/
|
||||
busybox --install -s /work/system/bin/
|
||||
cp -v busybox /work/system/bin/
|
||||
ln -vs ../system/bin/hush /work/bin/sh
|
||||
mkdir -vp /work/usr/bin/
|
||||
ln -vs ../../system/bin/busybox /work/usr/bin/env
|
||||
`, pkg.Path(AbsUsrSrc.Append("busybox"), true, pkg.NewHTTPGetTar(
|
||||
&http.Client{Transport: &http.Transport{
|
||||
// busybox website is really slow to respond
|
||||
TLSHandshakeTimeout: 2 * time.Minute,
|
||||
}},
|
||||
"https://busybox.net/downloads/busybox-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
)), pkg.Path(
|
||||
AbsUsrSrc.Append("patches", "awk-fix-literal-backslash.patch"), false,
|
||||
pkg.NewFile("awk-fix-literal-backslash.patch", []byte(`diff --git a/editors/awk.c b/editors/awk.c
|
||||
index 64e752f4b..40f5ba7f7 100644
|
||||
--- a/editors/awk.c
|
||||
+++ b/editors/awk.c
|
||||
@@ -2636,8 +2636,13 @@ static int awk_sub(node *rn, const char *repl, int nm, var *src, var *dest /*,in
|
||||
resbuf = qrealloc(resbuf, residx + replen + n, &resbufsize);
|
||||
memcpy(resbuf + residx, sp + pmatch[j].rm_so - start_ofs, n);
|
||||
residx += n;
|
||||
- } else
|
||||
+ } else {
|
||||
+/* '\\' and '&' following a backslash keep its original meaning, any other
|
||||
+ * occurrence of a '\\' should be treated as literal */
|
||||
+ if (bslash && c != '\\' && c != '&')
|
||||
+ resbuf[residx++] = '\\';
|
||||
resbuf[residx++] = c;
|
||||
+ }
|
||||
bslash = 0;
|
||||
}
|
||||
}`)),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -8,19 +8,15 @@ import (
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
// NewCMake returns a [pkg.Artifact] containing an installation of CMake.
|
||||
func (t Toolchain) NewCMake() pkg.Artifact {
|
||||
func (t Toolchain) newCMake() pkg.Artifact {
|
||||
const (
|
||||
version = "4.2.1"
|
||||
checksum = "Y3OdbMsob6Xk2y1DCME6z4Fryb5_TkFD7knRT8dTNIRtSqbiCJyyDN9AxggN_I75"
|
||||
)
|
||||
return t.New("cmake-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
t.NewKernelHeaders(),
|
||||
return t.New("cmake-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Make),
|
||||
t.Load(KernelHeaders),
|
||||
}, nil, nil, `
|
||||
# expected to be writable in the copy made during bootstrap
|
||||
chmod -R +w /usr/src/cmake/Tests
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/cmake/bootstrap \
|
||||
--prefix=/system \
|
||||
@@ -29,24 +25,25 @@ cd "$(mktemp -d)"
|
||||
-DCMAKE_USE_OPENSSL=OFF
|
||||
make "-j$(nproc)"
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("cmake"), true,
|
||||
pkg.NewHTTPGetTar(
|
||||
`, pkg.Path(AbsUsrSrc.Append("cmake"), true, t.NewPatchedSource(
|
||||
// expected to be writable in the copy made during bootstrap
|
||||
"cmake", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/Kitware/CMake/releases/download/"+
|
||||
"v"+version+"/cmake-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[CMake] = Toolchain.newCMake }
|
||||
|
||||
// CMakeAttr holds the project-specific attributes that will be applied to a new
|
||||
// [pkg.Artifact] compiled via CMake.
|
||||
// [pkg.Artifact] compiled via [CMake].
|
||||
type CMakeAttr struct {
|
||||
// Path elements joined with source.
|
||||
Append []string
|
||||
// Use source tree as scratch space.
|
||||
Writable bool
|
||||
// Dependencies concatenated with the build system itself.
|
||||
Extra []pkg.Artifact
|
||||
|
||||
// CMake CACHE entries.
|
||||
Cache [][2]string
|
||||
@@ -54,18 +51,26 @@ type CMakeAttr struct {
|
||||
Env []string
|
||||
// Runs before cmake.
|
||||
ScriptEarly string
|
||||
// Runs after cmake.
|
||||
// Runs after cmake, replaces default.
|
||||
ScriptConfigured string
|
||||
// Runs after install.
|
||||
Script string
|
||||
|
||||
// Override the default installation prefix [AbsSystem].
|
||||
Prefix *check.Absolute
|
||||
|
||||
// Passed through to [Toolchain.New].
|
||||
Paths []pkg.ExecPath
|
||||
// Passed through to [Toolchain.New].
|
||||
Flag int
|
||||
}
|
||||
|
||||
// NewViaCMake returns a [pkg.Artifact] for compiling and installing via CMake.
|
||||
// NewViaCMake returns a [pkg.Artifact] for compiling and installing via [CMake].
|
||||
func (t Toolchain) NewViaCMake(
|
||||
name, version, variant string,
|
||||
source pkg.Artifact,
|
||||
attr *CMakeAttr,
|
||||
extra ...pkg.Artifact,
|
||||
) pkg.Artifact {
|
||||
if name == "" || version == "" || variant == "" {
|
||||
panic("names must be non-empty")
|
||||
@@ -81,19 +86,9 @@ func (t Toolchain) NewViaCMake(
|
||||
panic("CACHE must be non-empty")
|
||||
}
|
||||
|
||||
cmakeExtras := []pkg.Artifact{
|
||||
t.NewCMake(),
|
||||
t.NewNinja(),
|
||||
}
|
||||
if t == toolchainStage3 {
|
||||
cmakeExtras = nil
|
||||
}
|
||||
|
||||
scriptEarly := attr.ScriptEarly
|
||||
if attr.Writable {
|
||||
scriptEarly = `
|
||||
chmod -R +w "${ROSA_SOURCE}"
|
||||
` + scriptEarly
|
||||
scriptConfigured := "cmake --build .\ncmake --install .\n"
|
||||
if attr.ScriptConfigured != "" {
|
||||
scriptConfigured = attr.ScriptConfigured
|
||||
}
|
||||
|
||||
prefix := attr.Prefix
|
||||
@@ -102,14 +97,14 @@ chmod -R +w "${ROSA_SOURCE}"
|
||||
}
|
||||
|
||||
sourcePath := AbsUsrSrc.Append(name)
|
||||
return t.New(name+"-"+variant+"-"+version, slices.Concat(
|
||||
attr.Extra,
|
||||
cmakeExtras,
|
||||
return t.New(name+"-"+variant+"-"+version, attr.Flag, stage3Concat(t, extra,
|
||||
t.Load(CMake),
|
||||
t.Load(Ninja),
|
||||
), nil, slices.Concat([]string{
|
||||
"ROSA_SOURCE=" + sourcePath.String(),
|
||||
"ROSA_CMAKE_SOURCE=" + sourcePath.Append(attr.Append...).String(),
|
||||
"ROSA_INSTALL_PREFIX=/work" + prefix.String(),
|
||||
}, attr.Env), scriptEarly+`
|
||||
}, attr.Env), attr.ScriptEarly+`
|
||||
mkdir /cure && cd /cure
|
||||
cmake -G Ninja \
|
||||
-DCMAKE_C_COMPILER_TARGET="${ROSA_TRIPLE}" \
|
||||
@@ -124,7 +119,7 @@ cmake -G Ninja \
|
||||
}), " \\\n\t")+` \
|
||||
-DCMAKE_INSTALL_PREFIX="${ROSA_INSTALL_PREFIX}" \
|
||||
"${ROSA_CMAKE_SOURCE}"
|
||||
cmake --build .
|
||||
cmake --install .
|
||||
`+attr.Script, pkg.Path(sourcePath, attr.Writable, source))
|
||||
`+scriptConfigured+attr.Script, slices.Concat([]pkg.ExecPath{
|
||||
pkg.Path(sourcePath, attr.Writable, source),
|
||||
}, attr.Paths)...)
|
||||
}
|
||||
|
||||
32
internal/rosa/curl.go
Normal file
32
internal/rosa/curl.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newCurl() pkg.Artifact {
|
||||
const (
|
||||
version = "8.18.0"
|
||||
checksum = "YpOolP_sx1DIrCEJ3elgVAu0wTLDS-EZMZFvOP0eha7FaLueZUlEpuMwDzJNyi7i"
|
||||
)
|
||||
return t.NewViaMake("curl", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://curl.se/download/curl-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), &MakeAttr{
|
||||
Env: []string{
|
||||
"TFLAGS=-j256",
|
||||
},
|
||||
Configure: [][2]string{
|
||||
{"with-openssl"},
|
||||
{"with-ca-bundle", "/system/etc/ssl/certs/ca-bundle.crt"},
|
||||
},
|
||||
ScriptConfigured: `
|
||||
make "-j$(nproc)"
|
||||
`,
|
||||
},
|
||||
t.Load(Perl),
|
||||
|
||||
t.Load(Libpsl),
|
||||
t.Load(OpenSSL),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Curl] = Toolchain.newCurl }
|
||||
@@ -4,8 +4,10 @@ import (
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"sync"
|
||||
"syscall"
|
||||
|
||||
"hakurei.app/container/fhs"
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
@@ -86,8 +88,30 @@ nobody:x:65534:
|
||||
// Kind returns the hardcoded [pkg.Kind] value.
|
||||
func (cureEtc) Kind() pkg.Kind { return kindEtc }
|
||||
|
||||
// Params is a noop.
|
||||
func (cureEtc) Params(*pkg.IContext) {}
|
||||
// Params writes whether iana-etc is populated.
|
||||
func (a cureEtc) Params(ctx *pkg.IContext) {
|
||||
if a.iana != nil {
|
||||
ctx.WriteUint32(1)
|
||||
} else {
|
||||
ctx.WriteUint32(0)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
pkg.Register(kindEtc, func(r *pkg.IRReader) pkg.Artifact {
|
||||
a := cureEtc{}
|
||||
if r.ReadUint32() != 0 {
|
||||
a.iana = r.Next()
|
||||
}
|
||||
if _, ok := r.Finalise(); ok {
|
||||
panic(pkg.ErrUnexpectedChecksum)
|
||||
}
|
||||
return a
|
||||
})
|
||||
}
|
||||
|
||||
// IsExclusive returns false: Cure performs a few trivial filesystem writes.
|
||||
func (cureEtc) IsExclusive() bool { return false }
|
||||
|
||||
// Dependencies returns a slice containing the backing iana-etc release.
|
||||
func (a cureEtc) Dependencies() []pkg.Artifact {
|
||||
@@ -98,7 +122,12 @@ func (a cureEtc) Dependencies() []pkg.Artifact {
|
||||
}
|
||||
|
||||
// String returns a hardcoded reporting name.
|
||||
func (cureEtc) String() string { return "cure-etc" }
|
||||
func (a cureEtc) String() string {
|
||||
if a.iana == nil {
|
||||
return "cure-etc-minimal"
|
||||
}
|
||||
return "cure-etc"
|
||||
}
|
||||
|
||||
// newIANAEtc returns an unpacked iana-etc release.
|
||||
func newIANAEtc() pkg.Artifact {
|
||||
@@ -113,3 +142,22 @@ func newIANAEtc() pkg.Artifact {
|
||||
pkg.TarGzip,
|
||||
)
|
||||
}
|
||||
|
||||
var (
|
||||
resolvconfPath pkg.ExecPath
|
||||
resolvconfOnce sync.Once
|
||||
)
|
||||
|
||||
// resolvconf returns a hardcoded /etc/resolv.conf file.
|
||||
func resolvconf() pkg.ExecPath {
|
||||
resolvconfOnce.Do(func() {
|
||||
resolvconfPath = pkg.Path(
|
||||
fhs.AbsEtc.Append("resolv.conf"), false,
|
||||
pkg.NewFile("resolv.conf", []byte(`
|
||||
nameserver 1.1.1.1
|
||||
nameserver 1.0.0.1
|
||||
`)),
|
||||
)
|
||||
})
|
||||
return resolvconfPath
|
||||
}
|
||||
|
||||
45
internal/rosa/fuse.go
Normal file
45
internal/rosa/fuse.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newFuse() pkg.Artifact {
|
||||
const (
|
||||
version = "3.18.1"
|
||||
checksum = "COb-BgJRWXLbt9XUkNeuiroQizpMifXqxgieE1SlkMXhs_WGSyJStrmyewAw2hd6"
|
||||
)
|
||||
return t.New("fuse-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Python),
|
||||
t.Load(Meson),
|
||||
t.Load(Ninja),
|
||||
|
||||
t.Load(IniConfig),
|
||||
t.Load(Packaging),
|
||||
t.Load(Pluggy),
|
||||
t.Load(Pygments),
|
||||
t.Load(PyTest),
|
||||
|
||||
t.Load(KernelHeaders),
|
||||
}, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
meson setup \
|
||||
--reconfigure \
|
||||
--buildtype=release \
|
||||
--prefix=/system \
|
||||
--prefer-static \
|
||||
-Dtests=true \
|
||||
-Duseroot=false \
|
||||
-Dinitscriptdir=/system/init.d \
|
||||
-Ddefault_library=both \
|
||||
. /usr/src/fuse
|
||||
meson compile
|
||||
python3 -m pytest test/
|
||||
meson install \
|
||||
--destdir=/work
|
||||
`, pkg.Path(AbsUsrSrc.Append("fuse"), false, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/libfuse/libfuse/releases/download/"+
|
||||
"fuse-"+version+"/fuse-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Fuse] = Toolchain.newFuse }
|
||||
@@ -4,34 +4,98 @@ import (
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
// NewGit returns a [pkg.Artifact] containing an installation of git.
|
||||
func (t Toolchain) NewGit() pkg.Artifact {
|
||||
func (t Toolchain) newGit() pkg.Artifact {
|
||||
const (
|
||||
version = "2.52.0"
|
||||
checksum = "uH3J1HAN_c6PfGNJd2OBwW4zo36n71wmkdvityYnrh8Ak0D1IifiAvEWz9Vi9DmS"
|
||||
)
|
||||
extra := []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
t.NewPerl(),
|
||||
t.NewM4(),
|
||||
t.NewAutoconf(),
|
||||
t.NewGettext(),
|
||||
|
||||
t.NewZlib(),
|
||||
}
|
||||
if t == toolchainStage3 {
|
||||
extra = nil
|
||||
}
|
||||
return t.New("git-"+version, extra, nil, nil, `
|
||||
chmod -R +w /usr/src/git && cd /usr/src/git
|
||||
make configure
|
||||
./configure --prefix=/system
|
||||
make "-j$(nproc)" all
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("git"), true, pkg.NewHTTPGetTar(
|
||||
return t.NewViaMake("git", version, t.NewPatchedSource(
|
||||
"git", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://www.kernel.org/pub/software/scm/git/"+
|
||||
"git-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
), false,
|
||||
), &MakeAttr{
|
||||
// uses source tree as scratch space
|
||||
Writable: true,
|
||||
InPlace: true,
|
||||
|
||||
// test suite in subdirectory
|
||||
SkipCheck: true,
|
||||
|
||||
Make: []string{"all"},
|
||||
ScriptEarly: `
|
||||
cd /usr/src/git
|
||||
|
||||
make configure
|
||||
`,
|
||||
Script: `
|
||||
ln -s ../../system/bin/perl /usr/bin/ || true
|
||||
|
||||
function disable_test {
|
||||
local test=$1 pattern=$2
|
||||
if [ $# -eq 1 ]; then
|
||||
rm "t/${test}.sh"
|
||||
else
|
||||
sed -i "t/${test}.sh" \
|
||||
-e "/^\s*test_expect_.*$pattern/,/^\s*' *\$/{s/^/: #/}"
|
||||
fi
|
||||
}
|
||||
|
||||
disable_test t5319-multi-pack-index
|
||||
disable_test t1305-config-include
|
||||
disable_test t3900-i18n-commit
|
||||
disable_test t3507-cherry-pick-conflict
|
||||
disable_test t4201-shortlog
|
||||
disable_test t5303-pack-corruption-resilience
|
||||
disable_test t4301-merge-tree-write-tree
|
||||
disable_test t8005-blame-i18n
|
||||
disable_test t9350-fast-export
|
||||
disable_test t9300-fast-import
|
||||
disable_test t0211-trace2-perf
|
||||
disable_test t1517-outside-repo
|
||||
disable_test t2200-add-update
|
||||
|
||||
make \
|
||||
-C t \
|
||||
GIT_PROVE_OPTS="--jobs 32 --failures" \
|
||||
prove
|
||||
`,
|
||||
},
|
||||
t.Load(Perl),
|
||||
t.Load(Diffutils),
|
||||
t.Load(M4),
|
||||
t.Load(Autoconf),
|
||||
t.Load(Gettext),
|
||||
|
||||
t.Load(Zlib),
|
||||
t.Load(Curl),
|
||||
t.Load(OpenSSL),
|
||||
t.Load(Libexpat),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Git] = Toolchain.newGit }
|
||||
|
||||
// NewViaGit returns a [pkg.Artifact] for cloning a git repository.
|
||||
func (t Toolchain) NewViaGit(
|
||||
name, url, rev string,
|
||||
checksum pkg.Checksum,
|
||||
) pkg.Artifact {
|
||||
return t.New(name+"-"+rev, 0, []pkg.Artifact{
|
||||
t.Load(NSSCACert),
|
||||
t.Load(OpenSSL),
|
||||
t.Load(Libpsl),
|
||||
t.Load(Curl),
|
||||
t.Load(Libexpat),
|
||||
t.Load(Git),
|
||||
}, &checksum, nil, `
|
||||
git \
|
||||
-c advice.detachedHead=false \
|
||||
clone \
|
||||
--revision=`+rev+` \
|
||||
`+url+` \
|
||||
/work
|
||||
rm -rf /work/.git
|
||||
`, resolvconf())
|
||||
}
|
||||
|
||||
@@ -2,88 +2,147 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
// NewMake returns a [pkg.Artifact] containing an installation of GNU Make.
|
||||
func (t Toolchain) NewMake() pkg.Artifact {
|
||||
const (
|
||||
version = "4.4.1"
|
||||
checksum = "YS_B07ZcAy9PbaK5_vKGj64SrxO2VMpnMKfc9I0Q9IC1rn0RwOH7802pJoj2Mq4a"
|
||||
)
|
||||
return t.New("make-"+version, nil, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/make/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}" \
|
||||
--disable-dependency-tracking
|
||||
./build.sh
|
||||
./make DESTDIR=/work install check
|
||||
`, pkg.Path(AbsUsrSrc.Append("make"), false, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://ftp.gnu.org/gnu/make/make-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
|
||||
// NewM4 returns a [pkg.Artifact] containing an installation of GNU M4.
|
||||
func (t Toolchain) NewM4() pkg.Artifact {
|
||||
func (t Toolchain) newM4() pkg.Artifact {
|
||||
const (
|
||||
version = "1.4.20"
|
||||
checksum = "RT0_L3m4Co86bVBY3lCFAEs040yI1WdeNmRylFpah8IZovTm6O4wI7qiHJN3qsW9"
|
||||
)
|
||||
return t.New("m4-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
}, nil, nil, `
|
||||
cd /usr/src/m4
|
||||
chmod +w tests/test-c32ispunct.sh && echo '#!/bin/sh' > tests/test-c32ispunct.sh
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/m4/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}"
|
||||
make "-j$(nproc)" check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("m4"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://ftp.gnu.org/gnu/m4/m4-"+version+".tar.bz2",
|
||||
return t.NewViaMake("m4", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/m4/m4-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
)))
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/m4
|
||||
chmod +w tests/test-c32ispunct.sh && echo '#!/bin/sh' > tests/test-c32ispunct.sh
|
||||
`,
|
||||
},
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[M4] = Toolchain.newM4 }
|
||||
|
||||
// NewAutoconf returns a [pkg.Artifact] containing an installation of GNU Autoconf.
|
||||
func (t Toolchain) NewAutoconf() pkg.Artifact {
|
||||
func (t Toolchain) newSed() pkg.Artifact {
|
||||
const (
|
||||
version = "4.9"
|
||||
checksum = "pe7HWH4PHNYrazOTlUoE1fXmhn2GOPFN_xE62i0llOr3kYGrH1g2_orDz0UtZ9Nt"
|
||||
)
|
||||
return t.NewViaMake("sed", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/sed/sed-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), nil,
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Sed] = Toolchain.newSed }
|
||||
|
||||
func (t Toolchain) newAutoconf() pkg.Artifact {
|
||||
const (
|
||||
version = "2.72"
|
||||
checksum = "-c5blYkC-xLDer3TWEqJTyh1RLbOd1c5dnRLKsDnIrg_wWNOLBpaqMY8FvmUFJ33"
|
||||
)
|
||||
return t.New("autoconf-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
t.NewM4(),
|
||||
t.NewPerl(),
|
||||
}, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/autoconf/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}"
|
||||
make "-j$(nproc)" check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("autoconf"), false, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://ftp.gnu.org/gnu/autoconf/autoconf-"+version+".tar.gz",
|
||||
return t.NewViaMake("autoconf", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/autoconf/autoconf-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
|
||||
// NewGettext returns a [pkg.Artifact] containing an installation of GNU gettext.
|
||||
func (t Toolchain) NewGettext() pkg.Artifact {
|
||||
const (
|
||||
version = "0.26"
|
||||
checksum = "IMu7yDZX7xL5UO1ZxXc-iBMbY9LLEUlOroyuSlHMZwg9MKtxG7HIm8F2LheDua0y"
|
||||
), &MakeAttr{
|
||||
Make: []string{
|
||||
`TESTSUITEFLAGS="-j$(nproc)"`,
|
||||
},
|
||||
Flag: TExclusive,
|
||||
},
|
||||
t.Load(M4),
|
||||
t.Load(Perl),
|
||||
t.Load(Bash),
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
return t.New("gettext-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
}, nil, nil, `
|
||||
}
|
||||
func init() { artifactsF[Autoconf] = Toolchain.newAutoconf }
|
||||
|
||||
func (t Toolchain) newAutomake() pkg.Artifact {
|
||||
const (
|
||||
version = "1.18.1"
|
||||
checksum = "FjvLG_GdQP7cThTZJLDMxYpRcKdpAVG-YDs1Fj1yaHlSdh_Kx6nRGN14E0r_BjcG"
|
||||
)
|
||||
return t.NewViaMake("automake", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/automake/automake-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/automake
|
||||
|
||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||
|
||||
test_disable '#!/bin/sh' t/objcxx-minidemo.sh
|
||||
test_disable '#!/bin/sh' t/objcxx-deps.sh
|
||||
|
||||
test_disable '#!/bin/sh' t/dist-no-built-sources.sh
|
||||
test_disable '#!/bin/sh' t/distname.sh
|
||||
test_disable '#!/bin/sh' t/pr9.sh
|
||||
`,
|
||||
},
|
||||
t.Load(M4),
|
||||
t.Load(Perl),
|
||||
t.Load(Grep),
|
||||
t.Load(Gzip),
|
||||
t.Load(Autoconf),
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Automake] = Toolchain.newAutomake }
|
||||
|
||||
func (t Toolchain) newLibtool() pkg.Artifact {
|
||||
const (
|
||||
version = "2.5.4"
|
||||
checksum = "pa6LSrQggh8mSJHQfwGjysAApmZlGJt8wif2cCLzqAAa2jpsTY0jZ-6stS3BWZ2Q"
|
||||
)
|
||||
return t.NewViaMake("libtool", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/libtool/libtool-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Make: []string{
|
||||
`TESTSUITEFLAGS=32`,
|
||||
},
|
||||
},
|
||||
t.Load(M4),
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Libtool] = Toolchain.newLibtool }
|
||||
|
||||
func (t Toolchain) newGzip() pkg.Artifact {
|
||||
const (
|
||||
version = "1.14"
|
||||
checksum = "NWhjUavnNfTDFkZJyAUonL9aCOak8GVajWX2OMlzpFnuI0ErpBFyj88mz2xSjz0q"
|
||||
)
|
||||
return t.NewViaMake("gzip", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/gzip/gzip-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
// dependency loop
|
||||
SkipCheck: true,
|
||||
})
|
||||
}
|
||||
func init() { artifactsF[Gzip] = Toolchain.newGzip }
|
||||
|
||||
func (t Toolchain) newGettext() pkg.Artifact {
|
||||
const (
|
||||
version = "1.0"
|
||||
checksum = "3MasKeEdPeFEgWgzsBKk7JqWqql1wEMbgPmzAfs-mluyokoW0N8oQVxPQoOnSdgC"
|
||||
)
|
||||
return t.NewViaMake("gettext", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/gettext/gettext-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/gettext
|
||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||
|
||||
@@ -97,100 +156,468 @@ test_disable '#!/bin/sh' gettext-tools/tests/format-c-5
|
||||
test_disable '#!/bin/sh' gettext-tools/gnulib-tests/test-c32ispunct.sh
|
||||
test_disable 'int main(){return 0;}' gettext-tools/gnulib-tests/test-stdcountof-h.c
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/gettext/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}"
|
||||
make "-j$(nproc)" check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("gettext"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://ftp.gnu.org/pub/gnu/gettext/gettext-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
touch gettext-tools/autotools/archive.dir.tar
|
||||
`,
|
||||
},
|
||||
t.Load(Diffutils),
|
||||
t.Load(Gzip),
|
||||
t.Load(Sed),
|
||||
|
||||
// NewDiffutils returns a [pkg.Artifact] containing an installation of GNU diffutils.
|
||||
func (t Toolchain) NewDiffutils() pkg.Artifact {
|
||||
t.Load(KernelHeaders),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Gettext] = Toolchain.newGettext }
|
||||
|
||||
func (t Toolchain) newDiffutils() pkg.Artifact {
|
||||
const (
|
||||
version = "3.12"
|
||||
checksum = "9J5VAq5oA7eqwzS1Yvw-l3G5o-TccUrNQR3PvyB_lgdryOFAfxtvQfKfhdpquE44"
|
||||
)
|
||||
return t.New("diffutils-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
}, nil, nil, `
|
||||
return t.NewViaMake("diffutils", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/diffutils/diffutils-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/diffutils
|
||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||
|
||||
test_disable '#!/bin/sh' gnulib-tests/test-c32ispunct.sh
|
||||
test_disable 'int main(){return 0;}' gnulib-tests/test-c32ispunct.c
|
||||
test_disable '#!/bin/sh' tests/cmp
|
||||
`,
|
||||
Flag: TEarly,
|
||||
})
|
||||
}
|
||||
func init() { artifactsF[Diffutils] = Toolchain.newDiffutils }
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/diffutils/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}"
|
||||
make "-j$(nproc)" check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("diffutils"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://ftp.gnu.org/gnu/diffutils/diffutils-"+version+".tar.gz",
|
||||
func (t Toolchain) newPatch() pkg.Artifact {
|
||||
const (
|
||||
version = "2.8"
|
||||
checksum = "MA0BQc662i8QYBD-DdGgyyfTwaeALZ1K0yusV9rAmNiIsQdX-69YC4t9JEGXZkeR"
|
||||
)
|
||||
return t.NewViaMake("patch", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/patch/patch-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/patch
|
||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||
|
||||
// NewBash returns a [pkg.Artifact] containing an installation of GNU Bash.
|
||||
func (t Toolchain) NewBash() pkg.Artifact {
|
||||
test_disable '#!/bin/sh' tests/ed-style
|
||||
test_disable '#!/bin/sh' tests/need-filename
|
||||
`,
|
||||
Flag: TEarly,
|
||||
})
|
||||
}
|
||||
func init() { artifactsF[Patch] = Toolchain.newPatch }
|
||||
|
||||
func (t Toolchain) newBash() pkg.Artifact {
|
||||
const (
|
||||
version = "5.3"
|
||||
checksum = "4LQ_GRoB_ko-Ih8QPf_xRKA02xAm_TOxQgcJLmFDT6udUPxTAWrsj-ZNeuTusyDq"
|
||||
)
|
||||
return t.New("bash-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
}, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/bash/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}" \
|
||||
--without-bash-malloc
|
||||
make "-j$(nproc)" check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("bash"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://ftp.gnu.org/gnu/bash/bash-"+version+".tar.gz",
|
||||
return t.NewViaMake("bash", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/bash/bash-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
), &MakeAttr{
|
||||
Script: "ln -s bash /work/system/bin/sh\n",
|
||||
Configure: [][2]string{
|
||||
{"without-bash-malloc"},
|
||||
},
|
||||
Flag: TEarly,
|
||||
})
|
||||
}
|
||||
func init() { artifactsF[Bash] = Toolchain.newBash }
|
||||
|
||||
// NewCoreutils returns a [pkg.Artifact] containing an installation of GNU Coreutils.
|
||||
func (t Toolchain) NewCoreutils() pkg.Artifact {
|
||||
func (t Toolchain) newCoreutils() pkg.Artifact {
|
||||
const (
|
||||
version = "9.9"
|
||||
checksum = "B1_TaXj1j5aiVIcazLWu8Ix03wDV54uo2_iBry4qHG6Y-9bjDpUPlkNLmU_3Nvw6"
|
||||
)
|
||||
return t.New("coreutils-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
t.NewPerl(),
|
||||
|
||||
t.NewKernelHeaders(),
|
||||
}, nil, nil, `
|
||||
return t.NewViaMake("coreutils", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/coreutils/coreutils-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/coreutils
|
||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||
|
||||
test_disable '#!/bin/sh' gnulib-tests/test-c32ispunct.sh
|
||||
test_disable '#!/bin/sh' tests/split/line-bytes.sh
|
||||
test_disable '#!/bin/sh' tests/dd/no-allocate.sh
|
||||
test_disable 'int main(){return 0;}' gnulib-tests/test-chown.c
|
||||
test_disable 'int main(){return 0;}' gnulib-tests/test-fchownat.c
|
||||
test_disable 'int main(){return 0;}' gnulib-tests/test-lchown.c
|
||||
`,
|
||||
Flag: TEarly,
|
||||
},
|
||||
t.Load(Perl),
|
||||
t.Load(Bash),
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/coreutils/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}"
|
||||
make "-j$(nproc)" check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("coreutils"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://ftp.gnu.org/gnu/coreutils/coreutils-"+version+".tar.gz",
|
||||
t.Load(KernelHeaders),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Coreutils] = Toolchain.newCoreutils }
|
||||
|
||||
func (t Toolchain) newGperf() pkg.Artifact {
|
||||
const (
|
||||
version = "3.3"
|
||||
checksum = "RtIy9pPb_Bb8-31J2Nw-rRGso2JlS-lDlVhuNYhqR7Nt4xM_nObznxAlBMnarJv7"
|
||||
)
|
||||
return t.NewViaMake("gperf", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gperf/gperf-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
), nil,
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Gperf] = Toolchain.newGperf }
|
||||
|
||||
func (t Toolchain) newGawk() pkg.Artifact {
|
||||
const (
|
||||
version = "5.3.2"
|
||||
checksum = "uIs0d14h_d2DgMGYwrPtegGNyt_bxzG3D6Fe-MmExx_pVoVkQaHzrtmiXVr6NHKk"
|
||||
)
|
||||
return t.NewViaMake("gawk", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/gawk/gawk-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Flag: TEarly,
|
||||
|
||||
// dependency loop
|
||||
SkipCheck: true,
|
||||
})
|
||||
}
|
||||
func init() { artifactsF[Gawk] = Toolchain.newGawk }
|
||||
|
||||
func (t Toolchain) newGrep() pkg.Artifact {
|
||||
const (
|
||||
version = "3.12"
|
||||
checksum = "qMB4RjaPNRRYsxix6YOrjE8gyAT1zVSTy4nW4wKW9fqa0CHYAuWgPwDTirENzm_1"
|
||||
)
|
||||
return t.NewViaMake("grep", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/grep/grep-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/grep
|
||||
|
||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||
|
||||
test_disable '#!/bin/sh' gnulib-tests/test-c32ispunct.sh
|
||||
test_disable 'int main(){return 0;}' gnulib-tests/test-c32ispunct.c
|
||||
`,
|
||||
},
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Grep] = Toolchain.newGrep }
|
||||
|
||||
func (t Toolchain) newFindutils() pkg.Artifact {
|
||||
const (
|
||||
version = "4.10.0"
|
||||
checksum = "ZXABdNBQXL7QjTygynRRTdXYWxQKZ0Wn5eMd3NUnxR0xaS0u0VfcKoTlbo50zxv6"
|
||||
)
|
||||
return t.NewViaMake("findutils", version, pkg.NewHTTPGet(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/findutils/findutils-"+version+".tar.xz",
|
||||
mustDecode(checksum),
|
||||
), &MakeAttr{
|
||||
SourceSuffix: ".tar.xz",
|
||||
ScriptEarly: `
|
||||
cd /usr/src/
|
||||
tar xf findutils.tar.xz
|
||||
mv findutils-` + version + ` findutils
|
||||
|
||||
cd findutils
|
||||
echo '#!/bin/sh' > gnulib-tests/test-c32ispunct.sh
|
||||
echo 'int main(){return 0;}' > tests/xargs/test-sigusr.c
|
||||
`,
|
||||
},
|
||||
t.Load(Diffutils),
|
||||
t.Load(XZ),
|
||||
t.Load(Sed),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Findutils] = Toolchain.newFindutils }
|
||||
|
||||
func (t Toolchain) newBinutils() pkg.Artifact {
|
||||
const (
|
||||
version = "2.45"
|
||||
checksum = "hlLtqqHDmzAT2OQVHaKEd_io2DGFvJkaeS-igBuK8bRRir7LUKGHgHYNkDVKaHTT"
|
||||
)
|
||||
return t.NewViaMake("binutils", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/binutils/binutils-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), &MakeAttr{
|
||||
ScriptConfigured: `
|
||||
make "-j$(nproc)"
|
||||
`,
|
||||
},
|
||||
t.Load(Bash),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Binutils] = Toolchain.newBinutils }
|
||||
|
||||
func (t Toolchain) newGMP() pkg.Artifact {
|
||||
const (
|
||||
version = "6.3.0"
|
||||
checksum = "yrgbgEDWKDdMWVHh7gPbVl56-sRtVVhfvv0M_LX7xMUUk_mvZ1QOJEAnt7g4i3k5"
|
||||
)
|
||||
return t.NewViaMake("gmp", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://gcc.gnu.org/pub/gcc/infrastructure/"+
|
||||
"gmp-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), &MakeAttr{
|
||||
ScriptConfigured: `
|
||||
make "-j$(nproc)"
|
||||
`,
|
||||
},
|
||||
t.Load(M4),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[GMP] = Toolchain.newGMP }
|
||||
|
||||
func (t Toolchain) newMPFR() pkg.Artifact {
|
||||
const (
|
||||
version = "4.2.2"
|
||||
checksum = "wN3gx0zfIuCn9r3VAn_9bmfvAYILwrRfgBjYSD1IjLqyLrLojNN5vKyQuTE9kA-B"
|
||||
)
|
||||
return t.NewViaMake("mpfr", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://gcc.gnu.org/pub/gcc/infrastructure/"+
|
||||
"mpfr-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), nil,
|
||||
t.Load(GMP),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[MPFR] = Toolchain.newMPFR }
|
||||
|
||||
func (t Toolchain) newMPC() pkg.Artifact {
|
||||
const (
|
||||
version = "1.3.1"
|
||||
checksum = "o8r8K9R4x7PuRx0-JE3-bC5jZQrtxGV2nkB773aqJ3uaxOiBDCID1gKjPaaDxX4V"
|
||||
)
|
||||
return t.NewViaMake("mpc", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://gcc.gnu.org/pub/gcc/infrastructure/"+
|
||||
"mpc-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), nil,
|
||||
t.Load(GMP),
|
||||
t.Load(MPFR),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[MPC] = Toolchain.newMPC }
|
||||
|
||||
func (t Toolchain) newGCC() pkg.Artifact {
|
||||
const (
|
||||
version = "15.2.0"
|
||||
checksum = "TXJ5WrbXlGLzy1swghQTr4qxgDCyIZFgJry51XEPTBZ8QYbVmFeB4lZbSMtPJ-a1"
|
||||
)
|
||||
return t.NewViaMake("gcc", version, t.NewPatchedSource(
|
||||
"gcc", version,
|
||||
pkg.NewHTTPGetTar(
|
||||
nil, "https://ftp.tsukuba.wide.ad.jp/software/gcc/releases/"+
|
||||
"gcc-"+version+"/gcc-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), true, [2]string{"musl-off64_t-loff_t", `diff --git a/libgo/sysinfo.c b/libgo/sysinfo.c
|
||||
index 180f5c31d74..44d7ea73f7d 100644
|
||||
--- a/libgo/sysinfo.c
|
||||
+++ b/libgo/sysinfo.c
|
||||
@@ -365,11 +365,7 @@ enum {
|
||||
typedef loff_t libgo_loff_t_type;
|
||||
#endif
|
||||
|
||||
-#if defined(HAVE_OFF64_T)
|
||||
-typedef off64_t libgo_off_t_type;
|
||||
-#else
|
||||
typedef off_t libgo_off_t_type;
|
||||
-#endif
|
||||
|
||||
// The following section introduces explicit references to types and
|
||||
// constants of interest to support bootstrapping libgo using a
|
||||
`}, [2]string{"musl-legacy-lfs", `diff --git a/libgo/go/internal/syscall/unix/at_largefile.go b/libgo/go/internal/syscall/unix/at_largefile.go
|
||||
index 82e0dcfd074..16151ecad1b 100644
|
||||
--- a/libgo/go/internal/syscall/unix/at_largefile.go
|
||||
+++ b/libgo/go/internal/syscall/unix/at_largefile.go
|
||||
@@ -10,5 +10,5 @@ import (
|
||||
"syscall"
|
||||
)
|
||||
|
||||
-//extern fstatat64
|
||||
+//extern fstatat
|
||||
func fstatat(int32, *byte, *syscall.Stat_t, int32) int32
|
||||
diff --git a/libgo/go/os/dir_largefile.go b/libgo/go/os/dir_largefile.go
|
||||
index 1fc5ee0771f..0c6dffe1a75 100644
|
||||
--- a/libgo/go/os/dir_largefile.go
|
||||
+++ b/libgo/go/os/dir_largefile.go
|
||||
@@ -11,5 +11,5 @@ package os
|
||||
|
||||
import "syscall"
|
||||
|
||||
-//extern readdir64
|
||||
+//extern readdir
|
||||
func libc_readdir(*syscall.DIR) *syscall.Dirent
|
||||
diff --git a/libgo/go/syscall/libcall_glibc.go b/libgo/go/syscall/libcall_glibc.go
|
||||
index 5c1ec483c75..5a1245ed44b 100644
|
||||
--- a/libgo/go/syscall/libcall_glibc.go
|
||||
+++ b/libgo/go/syscall/libcall_glibc.go
|
||||
@@ -114,7 +114,7 @@ func Pipe2(p []int, flags int) (err error) {
|
||||
}
|
||||
|
||||
//sys sendfile(outfd int, infd int, offset *Offset_t, count int) (written int, err error)
|
||||
-//sendfile64(outfd _C_int, infd _C_int, offset *Offset_t, count Size_t) Ssize_t
|
||||
+//sendfile(outfd _C_int, infd _C_int, offset *Offset_t, count Size_t) Ssize_t
|
||||
|
||||
func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
|
||||
if race.Enabled {
|
||||
diff --git a/libgo/go/syscall/libcall_linux.go b/libgo/go/syscall/libcall_linux.go
|
||||
index 03ca7261b59..ad21fd0b3ac 100644
|
||||
--- a/libgo/go/syscall/libcall_linux.go
|
||||
+++ b/libgo/go/syscall/libcall_linux.go
|
||||
@@ -158,7 +158,7 @@ func Reboot(cmd int) (err error) {
|
||||
//adjtimex(buf *Timex) _C_int
|
||||
|
||||
//sys Fstatfs(fd int, buf *Statfs_t) (err error)
|
||||
-//fstatfs64(fd _C_int, buf *Statfs_t) _C_int
|
||||
+//fstatfs(fd _C_int, buf *Statfs_t) _C_int
|
||||
|
||||
func Gettid() (tid int) {
|
||||
r1, _, _ := Syscall(SYS_GETTID, 0, 0, 0)
|
||||
@@ -245,7 +245,7 @@ func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n i
|
||||
}
|
||||
|
||||
//sys Statfs(path string, buf *Statfs_t) (err error)
|
||||
-//statfs64(path *byte, buf *Statfs_t) _C_int
|
||||
+//statfs(path *byte, buf *Statfs_t) _C_int
|
||||
|
||||
//sysnb Sysinfo(info *Sysinfo_t) (err error)
|
||||
//sysinfo(info *Sysinfo_t) _C_int
|
||||
diff --git a/libgo/go/syscall/libcall_posix_largefile.go b/libgo/go/syscall/libcall_posix_largefile.go
|
||||
index f90055bb29a..334212f0af1 100644
|
||||
--- a/libgo/go/syscall/libcall_posix_largefile.go
|
||||
+++ b/libgo/go/syscall/libcall_posix_largefile.go
|
||||
@@ -10,40 +10,40 @@
|
||||
package syscall
|
||||
|
||||
//sys Creat(path string, mode uint32) (fd int, err error)
|
||||
-//creat64(path *byte, mode Mode_t) _C_int
|
||||
+//creat(path *byte, mode Mode_t) _C_int
|
||||
|
||||
//sys Fstat(fd int, stat *Stat_t) (err error)
|
||||
-//fstat64(fd _C_int, stat *Stat_t) _C_int
|
||||
+//fstat(fd _C_int, stat *Stat_t) _C_int
|
||||
|
||||
//sys Ftruncate(fd int, length int64) (err error)
|
||||
-//ftruncate64(fd _C_int, length Offset_t) _C_int
|
||||
+//ftruncate(fd _C_int, length Offset_t) _C_int
|
||||
|
||||
//sysnb Getrlimit(resource int, rlim *Rlimit) (err error)
|
||||
-//getrlimit64(resource _C_int, rlim *Rlimit) _C_int
|
||||
+//getrlimit(resource _C_int, rlim *Rlimit) _C_int
|
||||
|
||||
//sys Lstat(path string, stat *Stat_t) (err error)
|
||||
-//lstat64(path *byte, stat *Stat_t) _C_int
|
||||
+//lstat(path *byte, stat *Stat_t) _C_int
|
||||
|
||||
//sys mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error)
|
||||
-//mmap64(addr *byte, length Size_t, prot _C_int, flags _C_int, fd _C_int, offset Offset_t) *byte
|
||||
+//mmap(addr *byte, length Size_t, prot _C_int, flags _C_int, fd _C_int, offset Offset_t) *byte
|
||||
|
||||
//sys Open(path string, mode int, perm uint32) (fd int, err error)
|
||||
-//__go_open64(path *byte, mode _C_int, perm Mode_t) _C_int
|
||||
+//__go_open(path *byte, mode _C_int, perm Mode_t) _C_int
|
||||
|
||||
//sys Pread(fd int, p []byte, offset int64) (n int, err error)
|
||||
-//pread64(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
|
||||
+//pread(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
|
||||
|
||||
//sys Pwrite(fd int, p []byte, offset int64) (n int, err error)
|
||||
-//pwrite64(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
|
||||
+//pwrite(fd _C_int, buf *byte, count Size_t, offset Offset_t) Ssize_t
|
||||
|
||||
//sys Seek(fd int, offset int64, whence int) (off int64, err error)
|
||||
-//lseek64(fd _C_int, offset Offset_t, whence _C_int) Offset_t
|
||||
+//lseek(fd _C_int, offset Offset_t, whence _C_int) Offset_t
|
||||
|
||||
//sysnb Setrlimit(resource int, rlim *Rlimit) (err error)
|
||||
-//setrlimit64(resource int, rlim *Rlimit) _C_int
|
||||
+//setrlimit(resource int, rlim *Rlimit) _C_int
|
||||
|
||||
//sys Stat(path string, stat *Stat_t) (err error)
|
||||
-//stat64(path *byte, stat *Stat_t) _C_int
|
||||
+//stat(path *byte, stat *Stat_t) _C_int
|
||||
|
||||
//sys Truncate(path string, length int64) (err error)
|
||||
-//truncate64(path *byte, length Offset_t) _C_int
|
||||
+//truncate(path *byte, length Offset_t) _C_int
|
||||
diff --git a/libgo/runtime/go-varargs.c b/libgo/runtime/go-varargs.c
|
||||
index f84860891e6..7efc9615985 100644
|
||||
--- a/libgo/runtime/go-varargs.c
|
||||
+++ b/libgo/runtime/go-varargs.c
|
||||
@@ -84,7 +84,7 @@ __go_ioctl_ptr (int d, int request, void *arg)
|
||||
int
|
||||
__go_open64 (char *path, int mode, mode_t perm)
|
||||
{
|
||||
- return open64 (path, mode, perm);
|
||||
+ return open (path, mode, perm);
|
||||
}
|
||||
|
||||
#endif
|
||||
`}), &MakeAttr{
|
||||
ScriptEarly: `
|
||||
ln -s system/lib /
|
||||
ln -s system/lib /work/
|
||||
`,
|
||||
Configure: [][2]string{
|
||||
{"disable-multilib"},
|
||||
{"with-multilib-list", `""`},
|
||||
{"enable-default-pie"},
|
||||
{"disable-nls"},
|
||||
{"with-gnu-as"},
|
||||
{"with-gnu-ld"},
|
||||
{"with-system-zlib"},
|
||||
{"enable-languages", "c,c++,go"},
|
||||
{"with-native-system-header-dir", "/system/include"},
|
||||
},
|
||||
Make: []string{
|
||||
"BOOT_CFLAGS='-O2 -g'",
|
||||
"bootstrap",
|
||||
},
|
||||
|
||||
// This toolchain is hacked to pieces, it is not expected to ever work
|
||||
// well in its current state. That does not matter as long as the
|
||||
// toolchain it produces passes its own test suite.
|
||||
SkipCheck: true,
|
||||
|
||||
// GCC spends most of its time in its many configure scripts, however
|
||||
// it also saturates the CPU for a consequential amount of time.
|
||||
Flag: TExclusive,
|
||||
},
|
||||
t.Load(Binutils),
|
||||
|
||||
t.Load(GMP),
|
||||
t.Load(MPFR),
|
||||
t.Load(MPC),
|
||||
|
||||
t.Load(Zlib),
|
||||
t.Load(Libucontext),
|
||||
t.Load(KernelHeaders),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[gcc] = Toolchain.newGCC }
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
"slices"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
@@ -9,30 +10,18 @@ import (
|
||||
// newGoBootstrap returns the Go bootstrap toolchain.
|
||||
func (t Toolchain) newGoBootstrap() pkg.Artifact {
|
||||
const checksum = "8o9JL_ToiQKadCTb04nvBDkp8O1xiWOolAxVEqaTGodieNe4lOFEjlOxN3bwwe23"
|
||||
return t.New("go1.4-bootstrap", []pkg.Artifact{
|
||||
t.NewBash(),
|
||||
return t.New("go1.4-bootstrap", 0, []pkg.Artifact{
|
||||
t.Load(Bash),
|
||||
}, nil, []string{
|
||||
"CGO_ENABLED=0",
|
||||
}, `
|
||||
mkdir -p /var/tmp
|
||||
cp -r /usr/src/go1.4-bootstrap /work
|
||||
cd /work/go1.4-bootstrap/src
|
||||
mkdir -p /var/tmp/ /work/system/
|
||||
cp -r /usr/src/go /work/system/
|
||||
cd /work/system/go/src
|
||||
chmod -R +w ..
|
||||
|
||||
ln -s ../system/bin/busybox /bin/pwd
|
||||
cat << EOF > /bin/hostname
|
||||
#!/bin/sh
|
||||
echo cure
|
||||
EOF
|
||||
chmod +x /bin/hostname
|
||||
|
||||
rm \
|
||||
cmd/objdump/objdump_test.go \
|
||||
syscall/creds_test.go \
|
||||
net/multicast_test.go
|
||||
|
||||
CC="${CC} ${LDFLAGS}" ./all.bash
|
||||
`, pkg.Path(AbsUsrSrc.Append("go1.4-bootstrap"), false, pkg.NewHTTPGetTar(
|
||||
./make.bash
|
||||
`, pkg.Path(AbsUsrSrc.Append("go"), false, pkg.NewHTTPGetTar(
|
||||
nil, "https://dl.google.com/go/go1.4-bootstrap-20171003.tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
@@ -42,22 +31,30 @@ CC="${CC} ${LDFLAGS}" ./all.bash
|
||||
// newGo returns a specific version of the Go toolchain.
|
||||
func (t Toolchain) newGo(
|
||||
version, checksum string,
|
||||
boot pkg.Artifact,
|
||||
env ...string,
|
||||
env []string,
|
||||
script string,
|
||||
extra ...pkg.Artifact,
|
||||
) pkg.Artifact {
|
||||
return t.New("go"+version, []pkg.Artifact{
|
||||
boot,
|
||||
}, nil, slices.Concat([]string{
|
||||
return t.New("go"+version, 0, slices.Concat([]pkg.Artifact{
|
||||
t.Load(Bash),
|
||||
}, extra), nil, slices.Concat([]string{
|
||||
"CC=cc",
|
||||
"GOCACHE=/tmp/gocache",
|
||||
"GOROOT_BOOTSTRAP=/system/go",
|
||||
"CGO_" + ldflags(false) + " -O2 -g",
|
||||
"TMPDIR=/dev/shm/go",
|
||||
}, env), `
|
||||
mkdir /work/system
|
||||
mkdir /work/system "${TMPDIR}"
|
||||
cp -r /usr/src/go /work/system
|
||||
cd /work/system/go/src
|
||||
chmod -R +w ..
|
||||
sed -i 's/bash run.bash/sh run.bash/' all.bash
|
||||
sh make.bash
|
||||
`+script+`
|
||||
./all.bash
|
||||
|
||||
mkdir /work/system/bin
|
||||
ln -s \
|
||||
../go/bin/go \
|
||||
../go/bin/gofmt \
|
||||
/work/system/bin
|
||||
`, pkg.Path(AbsUsrSrc.Append("go"), false, pkg.NewHTTPGetTar(
|
||||
nil, "https://go.dev/dl/go"+version+".src.tar.gz",
|
||||
mustDecode(checksum),
|
||||
@@ -65,28 +62,97 @@ sh make.bash
|
||||
)))
|
||||
}
|
||||
|
||||
// NewGo returns a [pkg.Artifact] containing the Go toolchain.
|
||||
func (t Toolchain) NewGo() pkg.Artifact {
|
||||
func (t Toolchain) newGoLatest() pkg.Artifact {
|
||||
var (
|
||||
bootstrapEnv []string
|
||||
bootstrapExtra []pkg.Artifact
|
||||
|
||||
finalEnv []string
|
||||
)
|
||||
switch runtime.GOARCH {
|
||||
case "amd64":
|
||||
bootstrapExtra = append(bootstrapExtra, t.newGoBootstrap())
|
||||
|
||||
case "arm64":
|
||||
bootstrapEnv = append(bootstrapEnv,
|
||||
"GOROOT_BOOTSTRAP=/system",
|
||||
)
|
||||
bootstrapExtra = append(bootstrapExtra,
|
||||
t.Load(Binutils),
|
||||
|
||||
t.Load(GMP),
|
||||
t.Load(MPFR),
|
||||
t.Load(MPC),
|
||||
|
||||
t.Load(Zlib),
|
||||
t.Load(Libucontext),
|
||||
|
||||
t.Load(gcc),
|
||||
)
|
||||
|
||||
finalEnv = append(finalEnv, "CGO_ENABLED=0")
|
||||
|
||||
default:
|
||||
panic("unsupported target " + runtime.GOARCH)
|
||||
}
|
||||
|
||||
go119 := t.newGo(
|
||||
"1.19",
|
||||
"9_e0aFHsIkVxWVGsp9T2RvvjOc3p4n9o9S8tkNe9Cvgzk_zI2FhRQB7ioQkeAAro",
|
||||
t.newGoBootstrap(),
|
||||
"GOROOT_BOOTSTRAP=/go1.4-bootstrap",
|
||||
)
|
||||
append(bootstrapEnv, "CGO_ENABLED=0"), `
|
||||
rm \
|
||||
crypto/tls/handshake_client_test.go \
|
||||
cmd/pprof/pprof_test.go \
|
||||
os/os_unix_test.go
|
||||
sed -i \
|
||||
's/os\.Getenv("GCCGO")$/"nonexistent"/' \
|
||||
go/internal/gccgoimporter/importer_test.go
|
||||
echo \
|
||||
'type syscallDescriptor = int' >> \
|
||||
os/rawconn_test.go
|
||||
`, bootstrapExtra...)
|
||||
|
||||
go121 := t.newGo(
|
||||
"1.21.13",
|
||||
"YtrDka402BOAEwywx03Vz4QlVwoBiguJHzG7PuythMCPHXS8CVMLvzmvgEbu4Tzu",
|
||||
go119,
|
||||
[]string{"CGO_ENABLED=0"}, `
|
||||
sed -i \
|
||||
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
||||
cmd/link/internal/`+runtime.GOARCH+`/obj.go
|
||||
|
||||
rm \
|
||||
crypto/tls/handshake_client_test.go \
|
||||
crypto/tls/handshake_server_test.go \
|
||||
os/os_unix_test.go
|
||||
echo \
|
||||
'type syscallDescriptor = int' >> \
|
||||
os/rawconn_test.go
|
||||
`, go119,
|
||||
)
|
||||
|
||||
go123 := t.newGo(
|
||||
"1.23.12",
|
||||
"wcI32bl1tkqbgcelGtGWPI4RtlEddd-PTd76Eb-k7nXA5LbE9yTNdIL9QSOOxMOs",
|
||||
go121,
|
||||
[]string{"CGO_ENABLED=0"}, `
|
||||
sed -i \
|
||||
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
||||
cmd/link/internal/`+runtime.GOARCH+`/obj.go
|
||||
`, go121,
|
||||
)
|
||||
|
||||
go125 := t.newGo(
|
||||
"1.25.6",
|
||||
"x0z430qoDvQbbw_fftjW0rh_GSoh0VJhPzttWk_0hj9yz9AKOjuwRMupF_Q0dbt7",
|
||||
go123,
|
||||
"1.25.7",
|
||||
"fyylHdBVRUobnBjYj3NKBaYPUw3kGmo2mEELiZonOYurPfbarNU1x77B99Fjut7Q",
|
||||
finalEnv, `
|
||||
sed -i \
|
||||
's,/lib/ld-musl-`+linuxArch()+`.so.1,/system/bin/linker,' \
|
||||
cmd/link/internal/`+runtime.GOARCH+`/obj.go
|
||||
|
||||
rm \
|
||||
os/root_unix_test.go
|
||||
`, go123,
|
||||
)
|
||||
|
||||
return go125
|
||||
}
|
||||
func init() { artifactsF[Go] = Toolchain.newGoLatest }
|
||||
|
||||
303
internal/rosa/hakurei.go
Normal file
303
internal/rosa/hakurei.go
Normal file
@@ -0,0 +1,303 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func (t Toolchain) newHakurei(suffix, script string) pkg.Artifact {
|
||||
const (
|
||||
version = "0.3.4"
|
||||
checksum = "wVwSLo75a2OnH5tgxNWXR_YhiOJUFnYM_9-sJtxAEOKhcPE0BJafs6PU8o5JzyCT"
|
||||
)
|
||||
return t.New("hakurei"+suffix+"-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Go),
|
||||
|
||||
t.Load(Gzip),
|
||||
t.Load(PkgConfig),
|
||||
|
||||
t.Load(KernelHeaders),
|
||||
t.Load(Libseccomp),
|
||||
t.Load(ACL),
|
||||
t.Load(Attr),
|
||||
t.Load(Fuse),
|
||||
|
||||
t.Load(Xproto),
|
||||
t.Load(LibXau),
|
||||
t.Load(XCBProto),
|
||||
t.Load(XCB),
|
||||
|
||||
t.Load(Libffi),
|
||||
t.Load(Libexpat),
|
||||
t.Load(Libxml2),
|
||||
t.Load(Wayland),
|
||||
t.Load(WaylandProtocols),
|
||||
}, nil, []string{
|
||||
"CGO_ENABLED=1",
|
||||
"GOCACHE=/tmp/gocache",
|
||||
"CC=clang -O3 -Werror",
|
||||
}, `
|
||||
echo '# Building test helper (hostname).'
|
||||
go build -v -o /bin/hostname /usr/src/hostname/main.go
|
||||
echo
|
||||
|
||||
chmod -R +w /usr/src/hakurei
|
||||
cd /usr/src/hakurei
|
||||
|
||||
HAKUREI_VERSION='v`+version+`'
|
||||
`+script, pkg.Path(AbsUsrSrc.Append("hakurei"), true, t.NewPatchedSource("hakurei", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://git.gensokyo.uk/security/hakurei/archive/"+
|
||||
"v"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), true, [2]string{"dist-00-tests", `From 67e453f5c4de915de23ecbe5980e595758f0f2fb Mon Sep 17 00:00:00 2001
|
||||
From: Ophestra <cat@gensokyo.uk>
|
||||
Date: Tue, 27 Jan 2026 06:49:48 +0900
|
||||
Subject: [PATCH] dist: run tests
|
||||
|
||||
This used to be impossible due to nix jank which has been addressed.
|
||||
|
||||
Signed-off-by: Ophestra <cat@gensokyo.uk>
|
||||
---
|
||||
dist/release.sh | 21 ++++++++++++++++-----
|
||||
flake.nix | 32 ++++++++++++++++++++------------
|
||||
internal/acl/acl_test.go | 2 +-
|
||||
package.nix | 2 +-
|
||||
4 files changed, 38 insertions(+), 19 deletions(-)
|
||||
|
||||
diff --git a/dist/release.sh b/dist/release.sh
|
||||
index 4dcb278..0ba9104 100755
|
||||
--- a/dist/release.sh
|
||||
+++ b/dist/release.sh
|
||||
@@ -2,19 +2,30 @@
|
||||
cd "$(dirname -- "$0")/.."
|
||||
VERSION="${HAKUREI_VERSION:-untagged}"
|
||||
pname="hakurei-${VERSION}"
|
||||
-out="dist/${pname}"
|
||||
+out="${DESTDIR:-dist}/${pname}"
|
||||
|
||||
+echo '# Preparing distribution files.'
|
||||
mkdir -p "${out}"
|
||||
cp -v "README.md" "dist/hsurc.default" "dist/install.sh" "${out}"
|
||||
cp -rv "dist/comp" "${out}"
|
||||
+echo
|
||||
|
||||
+echo '# Building hakurei.'
|
||||
go generate ./...
|
||||
-go build -trimpath -v -o "${out}/bin/" -ldflags "-s -w -buildid= -extldflags '-static'
|
||||
+go build -trimpath -v -o "${out}/bin/" -ldflags "-s -w
|
||||
+ -buildid= -extldflags '-static'
|
||||
-X hakurei.app/internal/info.buildVersion=${VERSION}
|
||||
-X hakurei.app/internal/info.hakureiPath=/usr/bin/hakurei
|
||||
-X hakurei.app/internal/info.hsuPath=/usr/bin/hsu
|
||||
-X main.hakureiPath=/usr/bin/hakurei" ./...
|
||||
+echo
|
||||
|
||||
-rm -f "./${out}.tar.gz" && tar -C dist -czf "${out}.tar.gz" "${pname}"
|
||||
-rm -rf "./${out}"
|
||||
-(cd dist && sha512sum "${pname}.tar.gz" > "${pname}.tar.gz.sha512")
|
||||
+echo '# Testing hakurei.'
|
||||
+go test -ldflags='-buildid= -extldflags=-static' ./...
|
||||
+echo
|
||||
+
|
||||
+echo '# Creating distribution.'
|
||||
+rm -f "${out}.tar.gz" && tar -C "${out}/.." -vczf "${out}.tar.gz" "${pname}"
|
||||
+rm -rf "${out}"
|
||||
+(cd "${out}/.." && sha512sum "${pname}.tar.gz" > "${pname}.tar.gz.sha512")
|
||||
+echo
|
||||
diff --git a/flake.nix b/flake.nix
|
||||
index 9e09c61..2340b92 100644
|
||||
--- a/flake.nix
|
||||
+++ b/flake.nix
|
||||
@@ -143,19 +143,27 @@
|
||||
"bin/mount.fuse.sharefs" = "${hakurei}/libexec/sharefs";
|
||||
};
|
||||
|
||||
- dist = pkgs.runCommand "${hakurei.name}-dist" { buildInputs = hakurei.targetPkgs ++ [ pkgs.pkgsStatic.musl ]; } ''
|
||||
- # go requires XDG_CACHE_HOME for the build cache
|
||||
- export XDG_CACHE_HOME="$(mktemp -d)"
|
||||
+ dist =
|
||||
+ pkgs.runCommand "${hakurei.name}-dist"
|
||||
+ {
|
||||
+ buildInputs = hakurei.targetPkgs ++ [
|
||||
+ pkgs.pkgsStatic.musl
|
||||
+ ];
|
||||
+ }
|
||||
+ ''
|
||||
+ cd $(mktemp -d) \
|
||||
+ && cp -r ${hakurei.src}/. . \
|
||||
+ && chmod +w cmd && cp -r ${hsu.src}/. cmd/hsu/ \
|
||||
+ && chmod -R +w .
|
||||
|
||||
- # get a different workdir as go does not like /build
|
||||
- cd $(mktemp -d) \
|
||||
- && cp -r ${hakurei.src}/. . \
|
||||
- && chmod +w cmd && cp -r ${hsu.src}/. cmd/hsu/ \
|
||||
- && chmod -R +w .
|
||||
-
|
||||
- export HAKUREI_VERSION="v${hakurei.version}"
|
||||
- CC="clang -O3 -Werror" ./dist/release.sh && mkdir $out && cp -v "dist/hakurei-$HAKUREI_VERSION.tar.gz"* $out
|
||||
- '';
|
||||
+ CC="musl-clang -O3 -Werror -Qunused-arguments" \
|
||||
+ GOCACHE="$(mktemp -d)" \
|
||||
+ HAKUREI_TEST_SKIP_ACL=1 \
|
||||
+ PATH="${pkgs.pkgsStatic.musl.bin}/bin:$PATH" \
|
||||
+ DESTDIR="$out" \
|
||||
+ HAKUREI_VERSION="v${hakurei.version}" \
|
||||
+ ./dist/release.sh
|
||||
+ '';
|
||||
}
|
||||
);
|
||||
|
||||
diff --git a/internal/acl/acl_test.go b/internal/acl/acl_test.go
|
||||
index af6da55..19ce45a 100644
|
||||
--- a/internal/acl/acl_test.go
|
||||
+++ b/internal/acl/acl_test.go
|
||||
@@ -24,7 +24,7 @@ var (
|
||||
)
|
||||
|
||||
func TestUpdate(t *testing.T) {
|
||||
- if os.Getenv("GO_TEST_SKIP_ACL") == "1" {
|
||||
+ if os.Getenv("HAKUREI_TEST_SKIP_ACL") == "1" {
|
||||
t.Skip("acl test skipped")
|
||||
}
|
||||
|
||||
diff --git a/package.nix b/package.nix
|
||||
index 00c4401..2eaa2ec 100644
|
||||
--- a/package.nix
|
||||
+++ b/package.nix
|
||||
@@ -89,7 +89,7 @@ buildGoModule rec {
|
||||
CC = "clang -O3 -Werror";
|
||||
|
||||
# nix build environment does not allow acls
|
||||
- GO_TEST_SKIP_ACL = 1;
|
||||
+ HAKUREI_TEST_SKIP_ACL = 1;
|
||||
};
|
||||
|
||||
buildInputs = [`}, [2]string{"container-tests", `From bf14a412e47344fff2681f4b24d1ecc7415bfcb0 Mon Sep 17 00:00:00 2001
|
||||
From: Ophestra <cat@gensokyo.uk>
|
||||
Date: Sat, 31 Jan 2026 10:59:56 +0900
|
||||
Subject: [PATCH] container: fix host-dependent test cases
|
||||
|
||||
These are not fully controlled by hakurei and may change depending on host configuration.
|
||||
|
||||
Signed-off-by: Ophestra <cat@gensokyo.uk>
|
||||
---
|
||||
container/container_test.go | 27 +++++++++++++++------------
|
||||
1 file changed, 15 insertions(+), 12 deletions(-)
|
||||
|
||||
diff --git a/container/container_test.go b/container/container_test.go
|
||||
index d737a18..98713cb 100644
|
||||
--- a/container/container_test.go
|
||||
+++ b/container/container_test.go
|
||||
@@ -275,12 +275,12 @@ var containerTestCases = []struct {
|
||||
),
|
||||
earlyMnt(
|
||||
ent("/", "/dev", "ro,nosuid,nodev,relatime", "tmpfs", "devtmpfs", ignore),
|
||||
- ent("/null", "/dev/null", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/zero", "/dev/zero", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/full", "/dev/full", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/random", "/dev/random", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/urandom", "/dev/urandom", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/tty", "/dev/tty", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/null", "/dev/null", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/zero", "/dev/zero", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/full", "/dev/full", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/random", "/dev/random", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/urandom", "/dev/urandom", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/tty", "/dev/tty", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/", "/dev/pts", "rw,nosuid,noexec,relatime", "devpts", "devpts", "rw,mode=620,ptmxmode=666"),
|
||||
ent("/", "/dev/mqueue", "rw,nosuid,nodev,noexec,relatime", "mqueue", "mqueue", "rw"),
|
||||
ent("/", "/dev/shm", "rw,nosuid,nodev,relatime", "tmpfs", "tmpfs", ignore),
|
||||
@@ -293,12 +293,12 @@ var containerTestCases = []struct {
|
||||
),
|
||||
earlyMnt(
|
||||
ent("/", "/dev", "ro,nosuid,nodev,relatime", "tmpfs", "devtmpfs", ignore),
|
||||
- ent("/null", "/dev/null", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/zero", "/dev/zero", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/full", "/dev/full", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/random", "/dev/random", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/urandom", "/dev/urandom", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
- ent("/tty", "/dev/tty", "rw,nosuid", "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/null", "/dev/null", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/zero", "/dev/zero", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/full", "/dev/full", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/random", "/dev/random", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/urandom", "/dev/urandom", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
+ ent("/tty", "/dev/tty", ignore, "devtmpfs", "devtmpfs", ignore),
|
||||
ent("/", "/dev/pts", "rw,nosuid,noexec,relatime", "devpts", "devpts", "rw,mode=620,ptmxmode=666"),
|
||||
ent("/", "/dev/shm", "rw,nosuid,nodev,relatime", "tmpfs", "tmpfs", ignore),
|
||||
),
|
||||
@@ -696,6 +696,9 @@ func init() {
|
||||
mnt[i].VfsOptstr = strings.TrimSuffix(mnt[i].VfsOptstr, ",relatime")
|
||||
mnt[i].VfsOptstr = strings.TrimSuffix(mnt[i].VfsOptstr, ",noatime")
|
||||
|
||||
+ cur.FsOptstr = strings.Replace(cur.FsOptstr, ",seclabel", "", 1)
|
||||
+ mnt[i].FsOptstr = strings.Replace(mnt[i].FsOptstr, ",seclabel", "", 1)
|
||||
+
|
||||
if !cur.EqualWithIgnore(mnt[i], "\x00") {
|
||||
fail = true
|
||||
log.Printf("[FAIL] %s", cur)`}, [2]string{"dist-01-tarball-name", `diff --git a/dist/release.sh b/dist/release.sh
|
||||
index 0ba9104..2990ee1 100755
|
||||
--- a/dist/release.sh
|
||||
+++ b/dist/release.sh
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh -e
|
||||
cd "$(dirname -- "$0")/.."
|
||||
VERSION="${HAKUREI_VERSION:-untagged}"
|
||||
-pname="hakurei-${VERSION}"
|
||||
+pname="hakurei-${VERSION}-$(go env GOARCH)"
|
||||
out="${DESTDIR:-dist}/${pname}"
|
||||
|
||||
echo '# Preparing distribution files.'
|
||||
`}),
|
||||
), pkg.Path(AbsUsrSrc.Append("hostname", "main.go"), false, pkg.NewFile(
|
||||
"hostname.go",
|
||||
[]byte(`
|
||||
package main
|
||||
|
||||
import "os"
|
||||
|
||||
func main() {
|
||||
if name, err := os.Hostname(); err != nil {
|
||||
panic(err)
|
||||
} else {
|
||||
os.Stdout.WriteString(name)
|
||||
}
|
||||
}
|
||||
`),
|
||||
)))
|
||||
}
|
||||
func init() {
|
||||
artifactsF[Hakurei] = func(t Toolchain) pkg.Artifact {
|
||||
return t.newHakurei("", `
|
||||
mkdir -p /work/system/libexec/hakurei/
|
||||
|
||||
echo '# Building hakurei.'
|
||||
go generate -v ./...
|
||||
go build -trimpath -v -o /work/system/libexec/hakurei -ldflags="-s -w
|
||||
-buildid=
|
||||
-extldflags=-static
|
||||
-X hakurei.app/internal/info.buildVersion="$HAKUREI_VERSION"
|
||||
-X hakurei.app/internal/info.hakureiPath=/system/bin/hakurei
|
||||
-X hakurei.app/internal/info.hsuPath=/system/bin/hsu
|
||||
-X main.hakureiPath=/system/bin/hakurei" ./...
|
||||
echo
|
||||
|
||||
echo '# Testing hakurei.'
|
||||
go test -ldflags='-buildid= -extldflags=-static' ./...
|
||||
echo
|
||||
|
||||
mkdir -p /work/system/bin/
|
||||
(cd /work/system/libexec/hakurei && mv \
|
||||
hakurei \
|
||||
sharefs \
|
||||
../../bin/)
|
||||
`)
|
||||
}
|
||||
artifactsF[HakureiDist] = func(t Toolchain) pkg.Artifact {
|
||||
return t.newHakurei("-dist", `
|
||||
export HAKUREI_VERSION
|
||||
DESTDIR=/work /usr/src/hakurei/dist/release.sh
|
||||
`)
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,8 @@ import (
|
||||
|
||||
// newKernel is a helper for interacting with Kbuild.
|
||||
func (t Toolchain) newKernel(
|
||||
flag int,
|
||||
patches [][2]string,
|
||||
script string,
|
||||
extra ...pkg.Artifact,
|
||||
) pkg.Artifact {
|
||||
@@ -15,27 +17,28 @@ func (t Toolchain) newKernel(
|
||||
version = "6.18.5"
|
||||
checksum = "-V1e1WWl7HuePkmm84sSKF7nLuHfUs494uNMzMqXEyxcNE_PUE0FICL0oGWn44mM"
|
||||
)
|
||||
return t.New("kernel-"+version, slices.Concat([]pkg.Artifact{
|
||||
t.NewMake(),
|
||||
return t.New("kernel-"+version, flag, slices.Concat([]pkg.Artifact{
|
||||
t.Load(Make),
|
||||
}, extra), nil, nil, `
|
||||
export LLVM=1
|
||||
export HOSTCFLAGS="${ROSA_CFLAGS}"
|
||||
export HOSTLDFLAGS="${LDFLAGS}"
|
||||
chmod -R +w /usr/src/linux && cd /usr/src/linux
|
||||
`+script, pkg.Path(AbsUsrSrc.Append("linux"), true, pkg.NewHTTPGetTar(
|
||||
cd /usr/src/linux
|
||||
`+script, pkg.Path(AbsUsrSrc.Append("linux"), true, t.NewPatchedSource(
|
||||
"kernel", version, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/"+
|
||||
"snapshot/linux-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false, patches...,
|
||||
)))
|
||||
}
|
||||
|
||||
// NewKernelHeaders returns a [pkg.Artifact] containing kernel headers.
|
||||
func (t Toolchain) NewKernelHeaders() pkg.Artifact {
|
||||
return t.newKernel(`
|
||||
func (t Toolchain) newKernelHeaders() pkg.Artifact {
|
||||
return t.newKernel(TEarly, nil, `
|
||||
make "-j$(nproc)" \
|
||||
INSTALL_HDR_PATH=/work/system \
|
||||
headers_install
|
||||
`, t.NewRsync())
|
||||
`, t.Load(Rsync))
|
||||
}
|
||||
func init() { artifactsF[KernelHeaders] = Toolchain.newKernelHeaders }
|
||||
|
||||
28
internal/rosa/libexpat.go
Normal file
28
internal/rosa/libexpat.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func (t Toolchain) newLibexpat() pkg.Artifact {
|
||||
const (
|
||||
version = "2.7.3"
|
||||
checksum = "GmkoD23nRi9cMT0cgG1XRMrZWD82UcOMzkkvP1gkwSFWCBgeSXMuoLpa8-v8kxW-"
|
||||
)
|
||||
return t.NewViaMake("libexpat", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/libexpat/libexpat/releases/download/"+
|
||||
"R_"+strings.ReplaceAll(version, ".", "_")+"/"+
|
||||
"expat-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), &MakeAttr{
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(Bash),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Libexpat] = Toolchain.newLibexpat }
|
||||
@@ -2,27 +2,22 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
// NewLibffi returns a [pkg.Artifact] containing an installation of libffi.
|
||||
func (t Toolchain) NewLibffi() pkg.Artifact {
|
||||
func (t Toolchain) newLibffi() pkg.Artifact {
|
||||
const (
|
||||
version = "3.4.5"
|
||||
checksum = "apIJzypF4rDudeRoI_n3K7N-zCeBLTbQlHRn9NSAZqdLAWA80mR0gXPTpHsL7oMl"
|
||||
)
|
||||
return t.New("libffi-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
t.NewKernelHeaders(),
|
||||
}, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/libffi/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}"
|
||||
make "-j$(nproc)" check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("libffi"), false, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://github.com/libffi/libffi/releases/download/"+
|
||||
return t.NewViaMake("libffi", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/libffi/libffi/releases/download/"+
|
||||
"v"+version+"/libffi-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
), &MakeAttr{
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(KernelHeaders),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Libffi] = Toolchain.newLibffi }
|
||||
|
||||
30
internal/rosa/libgd.go
Normal file
30
internal/rosa/libgd.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newLibgd() pkg.Artifact {
|
||||
const (
|
||||
version = "2.3.3"
|
||||
checksum = "8T-sh1_FJT9K9aajgxzh8ot6vWIF-xxjcKAHvTak9MgGUcsFfzP8cAvvv44u2r36"
|
||||
)
|
||||
return t.NewViaMake("libgd", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/libgd/libgd/releases/download/"+
|
||||
"gd-"+version+"/libgd-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
OmitDefaults: true,
|
||||
Env: []string{
|
||||
"TMPDIR=/dev/shm/gd",
|
||||
},
|
||||
ScriptEarly: `
|
||||
mkdir /dev/shm/gd
|
||||
`,
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(Zlib),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Libgd] = Toolchain.newLibgd }
|
||||
28
internal/rosa/libpsl.go
Normal file
28
internal/rosa/libpsl.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newLibpsl() pkg.Artifact {
|
||||
const (
|
||||
version = "0.21.5"
|
||||
checksum = "XjfxSzh7peG2Vg4vJlL8z4JZJLcXqbuP6pLWkrGCmRxlnYUFTKNBqWGHCxEOlCad"
|
||||
)
|
||||
return t.NewViaMake("libpsl", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/rockdaboot/libpsl/releases/download/"+
|
||||
version+"/libpsl-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
ScriptEarly: `
|
||||
cd /usr/src/libpsl
|
||||
|
||||
test_disable() { chmod +w "$2" && echo "$1" > "$2"; }
|
||||
|
||||
test_disable 'int main(){return 0;}' tests/test-is-public-builtin.c
|
||||
`,
|
||||
},
|
||||
t.Load(Python),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Libpsl] = Toolchain.newLibpsl }
|
||||
33
internal/rosa/libseccomp.go
Normal file
33
internal/rosa/libseccomp.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func (t Toolchain) newLibseccomp() pkg.Artifact {
|
||||
const (
|
||||
version = "2.6.0"
|
||||
checksum = "mMu-iR71guPjFbb31u-YexBaanKE_nYPjPux-vuBiPfS_0kbwJdfCGlkofaUm-EY"
|
||||
)
|
||||
return t.NewViaMake("libseccomp", version, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://github.com/seccomp/libseccomp/releases/download/"+
|
||||
"v"+version+"/libseccomp-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
ScriptEarly: `
|
||||
ln -s ../system/bin/bash /bin/
|
||||
`,
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(Bash),
|
||||
t.Load(Diffutils),
|
||||
t.Load(Gperf),
|
||||
|
||||
t.Load(KernelHeaders),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Libseccomp] = Toolchain.newLibseccomp }
|
||||
40
internal/rosa/libucontext.go
Normal file
40
internal/rosa/libucontext.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newLibucontext() pkg.Artifact {
|
||||
const (
|
||||
version = "1.5"
|
||||
checksum = "Ggk7FMmDNBdCx1Z9PcNWWW6LSpjGYssn2vU0GK5BLXJYw7ZxZbA2m_eSgT9TFnIG"
|
||||
)
|
||||
return t.New("libucontext", 0, []pkg.Artifact{
|
||||
t.Load(Make),
|
||||
}, nil, []string{
|
||||
"ARCH=" + linuxArch(),
|
||||
}, `
|
||||
cd /usr/src/libucontext
|
||||
make check
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("libucontext"), true,
|
||||
t.NewPatchedSource("libucontext", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/kaniini/libucontext/archive/refs/tags/"+
|
||||
"libucontext-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), true, [2]string{"rosa-prefix", `diff --git a/Makefile b/Makefile
|
||||
index c80e574..4a8c1d3 100644
|
||||
--- a/Makefile
|
||||
+++ b/Makefile
|
||||
@@ -17,7 +17,7 @@ ifeq ($(ARCH),$(filter $(ARCH),arm64))
|
||||
override ARCH = aarch64
|
||||
endif
|
||||
|
||||
-prefix = /usr
|
||||
+prefix = /system
|
||||
libdir = ${prefix}/lib
|
||||
shared_libdir = ${libdir}
|
||||
static_libdir = ${libdir}
|
||||
`}),
|
||||
))
|
||||
}
|
||||
func init() { artifactsF[Libucontext] = Toolchain.newLibucontext }
|
||||
34
internal/rosa/libxml2.go
Normal file
34
internal/rosa/libxml2.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func (t Toolchain) newLibxml2() pkg.Artifact {
|
||||
const (
|
||||
version = "2.15.1"
|
||||
checksum = "pYzAR3cNrEHezhEMirgiq7jbboLzwMj5GD7SQp0jhSIMdgoU4G9oU9Gxun3zzUIU"
|
||||
)
|
||||
return t.NewViaMake("libxml2", version, pkg.NewHTTPGet(
|
||||
nil, "https://download.gnome.org/sources/libxml2/"+
|
||||
strings.Join(strings.Split(version, ".")[:2], ".")+
|
||||
"/libxml2-"+version+".tar.xz",
|
||||
mustDecode(checksum),
|
||||
), &MakeAttr{
|
||||
ScriptEarly: `
|
||||
cd /usr/src/
|
||||
tar xf libxml2.tar.xz
|
||||
mv libxml2-` + version + ` libxml2
|
||||
`,
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
SourceSuffix: ".tar.xz",
|
||||
},
|
||||
t.Load(Diffutils),
|
||||
t.Load(XZ),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Libxml2] = Toolchain.newLibxml2 }
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"hakurei.app/container/check"
|
||||
"hakurei.app/internal/pkg"
|
||||
@@ -21,12 +22,18 @@ type llvmAttr struct {
|
||||
cmake [][2]string
|
||||
// Override CMakeAttr.Append.
|
||||
append []string
|
||||
// Concatenated with default dependencies for CMakeAttr.Extra.
|
||||
// Concatenated with default dependencies for Toolchain.NewViaCMake.
|
||||
extra []pkg.Artifact
|
||||
// Passed through to CMakeAttr.Paths.
|
||||
paths []pkg.ExecPath
|
||||
// Passed through to CMakeAttr.ScriptConfigured.
|
||||
scriptConfigured string
|
||||
// Concatenated with default fixup for CMakeAttr.Script.
|
||||
script string
|
||||
// Passed through to CMakeAttr.Prefix.
|
||||
prefix *check.Absolute
|
||||
// Passed through to CMakeAttr.Writable.
|
||||
writable bool
|
||||
|
||||
// Patch name and body pairs.
|
||||
patches [][2]string
|
||||
@@ -72,8 +79,8 @@ func llvmFlagName(flag int) string {
|
||||
}
|
||||
}
|
||||
|
||||
// newLLVM returns a [pkg.Artifact] containing a LLVM variant.
|
||||
func (t Toolchain) newLLVM(variant string, attr *llvmAttr) pkg.Artifact {
|
||||
// newLLVMVariant returns a [pkg.Artifact] containing a LLVM variant.
|
||||
func (t Toolchain) newLLVMVariant(variant string, attr *llvmAttr) pkg.Artifact {
|
||||
const (
|
||||
version = "21.1.8"
|
||||
checksum = "8SUpqDkcgwOPsqHVtmf9kXfFeVmjVxl4LMn-qSE1AI_Xoeju-9HaoPNGtidyxyka"
|
||||
@@ -124,24 +131,12 @@ func (t Toolchain) newLLVM(variant string, attr *llvmAttr) pkg.Artifact {
|
||||
)
|
||||
}
|
||||
|
||||
extra := []pkg.Artifact{
|
||||
t.NewLibffi(),
|
||||
t.NewPython(),
|
||||
t.NewPerl(),
|
||||
t.NewDiffutils(),
|
||||
t.NewBash(),
|
||||
t.NewCoreutils(),
|
||||
|
||||
t.NewKernelHeaders(),
|
||||
}
|
||||
if t == toolchainStage3 {
|
||||
extra = nil
|
||||
}
|
||||
|
||||
if attr.flags&llvmProjectClang != 0 {
|
||||
cache = append(cache,
|
||||
[2]string{"CLANG_DEFAULT_LINKER", "lld"},
|
||||
[2]string{"CLANG_DEFAULT_CXX_STDLIB", "libc++"},
|
||||
[2]string{"CLANG_DEFAULT_RTLIB", "compiler-rt"},
|
||||
[2]string{"CLANG_DEFAULT_UNWINDLIB", "libunwind"},
|
||||
)
|
||||
}
|
||||
if attr.flags&llvmProjectLld != 0 {
|
||||
@@ -181,51 +176,51 @@ cp -r /system/include /usr/include && rm -rf /system/include
|
||||
)
|
||||
}
|
||||
|
||||
source := pkg.NewHTTPGetTar(
|
||||
return t.NewViaCMake("llvm", version, variant, t.NewPatchedSource(
|
||||
"llvmorg", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/llvm/llvm-project/archive/refs/tags/"+
|
||||
"llvmorg-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)
|
||||
|
||||
patches := make([]pkg.ExecPath, len(attr.patches)+1)
|
||||
for i, p := range attr.patches {
|
||||
patches[i+1] = pkg.Path(
|
||||
AbsUsrSrc.Append("llvm-patches", p[0]+".patch"), false,
|
||||
pkg.NewFile(p[0], []byte(p[1])),
|
||||
)
|
||||
}
|
||||
patches[0] = pkg.Path(AbsUsrSrc.Append("llvmorg"), false, source)
|
||||
if len(patches) > 1 {
|
||||
source = t.New(
|
||||
"llvmorg-patched", nil, nil, nil, `
|
||||
cp -r /usr/src/llvmorg/. /work/.
|
||||
chmod -R +w /work && cd /work
|
||||
cat /usr/src/llvm-patches/* | patch -p 1
|
||||
`, patches...,
|
||||
)
|
||||
}
|
||||
|
||||
return t.NewViaCMake("llvm", version, variant, source, &CMakeAttr{
|
||||
), true, attr.patches...,
|
||||
), &CMakeAttr{
|
||||
Cache: slices.Concat(cache, attr.cmake),
|
||||
Append: cmakeAppend,
|
||||
Extra: slices.Concat(attr.extra, extra),
|
||||
Prefix: attr.prefix,
|
||||
|
||||
Env: slices.Concat([]string{
|
||||
"ROSA_LLVM_PROJECTS=" + strings.Join(projects, ";"),
|
||||
"ROSA_LLVM_RUNTIMES=" + strings.Join(runtimes, ";"),
|
||||
}, attr.env),
|
||||
ScriptEarly: scriptEarly, Script: script + attr.script,
|
||||
})
|
||||
ScriptEarly: scriptEarly,
|
||||
ScriptConfigured: attr.scriptConfigured,
|
||||
Script: script + attr.script,
|
||||
Writable: attr.writable,
|
||||
|
||||
Paths: attr.paths,
|
||||
Flag: TExclusive,
|
||||
}, stage3Concat(t, attr.extra,
|
||||
t.Load(Libffi),
|
||||
t.Load(Python),
|
||||
t.Load(Perl),
|
||||
t.Load(Diffutils),
|
||||
t.Load(Bash),
|
||||
t.Load(Gawk),
|
||||
t.Load(Coreutils),
|
||||
t.Load(Findutils),
|
||||
|
||||
t.Load(KernelHeaders),
|
||||
)...)
|
||||
}
|
||||
|
||||
// NewLLVM returns LLVM toolchain across multiple [pkg.Artifact].
|
||||
func (t Toolchain) NewLLVM() (musl, compilerRT, runtimes, clang pkg.Artifact) {
|
||||
// newLLVM returns LLVM toolchain across multiple [pkg.Artifact].
|
||||
func (t Toolchain) newLLVM() (musl, compilerRT, runtimes, clang pkg.Artifact) {
|
||||
var target string
|
||||
switch runtime.GOARCH {
|
||||
case "386", "amd64":
|
||||
target = "X86"
|
||||
case "arm64":
|
||||
target = "AArch64"
|
||||
|
||||
default:
|
||||
panic("unsupported target " + runtime.GOARCH)
|
||||
@@ -237,16 +232,17 @@ func (t Toolchain) NewLLVM() (musl, compilerRT, runtimes, clang pkg.Artifact) {
|
||||
{"LLVM_ENABLE_LIBXML2", "OFF"},
|
||||
}
|
||||
|
||||
compilerRT = t.newLLVM("compiler-rt", &llvmAttr{
|
||||
env: []string{
|
||||
ldflags(false),
|
||||
},
|
||||
compilerRT = t.newLLVMVariant("compiler-rt", &llvmAttr{
|
||||
env: stage3ExclConcat(t, []string{},
|
||||
"LDFLAGS="+earlyLDFLAGS(false),
|
||||
),
|
||||
cmake: [][2]string{
|
||||
// libc++ not yet available
|
||||
{"CMAKE_CXX_COMPILER_TARGET", ""},
|
||||
|
||||
{"COMPILER_RT_BUILD_BUILTINS", "ON"},
|
||||
{"COMPILER_RT_DEFAULT_TARGET_ONLY", "ON"},
|
||||
{"COMPILER_RT_SANITIZERS_TO_BUILD", "asan"},
|
||||
{"LLVM_ENABLE_PER_TARGET_RUNTIME_DIR", "ON"},
|
||||
|
||||
// does not work without libunwind
|
||||
@@ -254,7 +250,6 @@ func (t Toolchain) NewLLVM() (musl, compilerRT, runtimes, clang pkg.Artifact) {
|
||||
{"COMPILER_RT_BUILD_LIBFUZZER", "OFF"},
|
||||
{"COMPILER_RT_BUILD_MEMPROF", "OFF"},
|
||||
{"COMPILER_RT_BUILD_PROFILE", "OFF"},
|
||||
{"COMPILER_RT_BUILD_SANITIZERS", "OFF"},
|
||||
{"COMPILER_RT_BUILD_XRAY", "OFF"},
|
||||
},
|
||||
append: []string{"compiler-rt"},
|
||||
@@ -271,30 +266,31 @@ ln -s \
|
||||
"${ROSA_INSTALL_PREFIX}/lib/clang/21/lib/"
|
||||
|
||||
ln -s \
|
||||
"clang_rt.crtbegin-$(uname -m).o" \
|
||||
"clang_rt.crtbegin-` + linuxArch() + `.o" \
|
||||
"${ROSA_INSTALL_PREFIX}/lib/${ROSA_TRIPLE}/crtbeginS.o"
|
||||
ln -s \
|
||||
"clang_rt.crtend-$(uname -m).o" \
|
||||
"clang_rt.crtend-` + linuxArch() + `.o" \
|
||||
"${ROSA_INSTALL_PREFIX}/lib/${ROSA_TRIPLE}/crtendS.o"
|
||||
`,
|
||||
})
|
||||
|
||||
musl = t.NewMusl(&MuslAttr{
|
||||
Extra: []pkg.Artifact{compilerRT},
|
||||
Env: []string{
|
||||
ldflags(false),
|
||||
Env: stage3ExclConcat(t, []string{
|
||||
"CC=clang",
|
||||
"LIBCC=/system/lib/clang/21/lib/" +
|
||||
triplet() + "/libclang_rt.builtins.a",
|
||||
"AR=ar",
|
||||
"RANLIB=ranlib",
|
||||
},
|
||||
"LDFLAGS="+earlyLDFLAGS(false),
|
||||
),
|
||||
})
|
||||
|
||||
runtimes = t.newLLVM("runtimes", &llvmAttr{
|
||||
env: []string{
|
||||
ldflags(false),
|
||||
},
|
||||
runtimes = t.newLLVMVariant("runtimes", &llvmAttr{
|
||||
env: stage3ExclConcat(t, []string{},
|
||||
"LDFLAGS="+earlyLDFLAGS(false),
|
||||
),
|
||||
flags: llvmRuntimeLibunwind | llvmRuntimeLibcxx | llvmRuntimeLibcxxABI,
|
||||
cmake: slices.Concat([][2]string{
|
||||
// libc++ not yet available
|
||||
@@ -310,13 +306,13 @@ ln -s \
|
||||
},
|
||||
})
|
||||
|
||||
clang = t.newLLVM("clang", &llvmAttr{
|
||||
clang = t.newLLVMVariant("clang", &llvmAttr{
|
||||
flags: llvmProjectClang | llvmProjectLld,
|
||||
env: []string{
|
||||
"CFLAGS=" + cflags,
|
||||
"CXXFLAGS=" + cxxflags(),
|
||||
ldflags(false),
|
||||
},
|
||||
env: stage3ExclConcat(t, []string{},
|
||||
"CFLAGS="+earlyCFLAGS,
|
||||
"CXXFLAGS="+earlyCXXFLAGS(),
|
||||
"LDFLAGS="+earlyLDFLAGS(false),
|
||||
),
|
||||
cmake: slices.Concat([][2]string{
|
||||
{"LLVM_TARGETS_TO_BUILD", target},
|
||||
{"CMAKE_CROSSCOMPILING", "OFF"},
|
||||
@@ -326,24 +322,50 @@ ln -s \
|
||||
musl,
|
||||
compilerRT,
|
||||
runtimes,
|
||||
t.NewGit(),
|
||||
},
|
||||
script: `
|
||||
ln -s clang /work/system/bin/cc
|
||||
ln -s clang++ /work/system/bin/c++
|
||||
|
||||
ninja check-all
|
||||
`,
|
||||
|
||||
patches: [][2]string{
|
||||
{"xfail-broken-tests", `diff --git a/clang/test/Driver/hexagon-toolchain-linux.c b/clang/test/Driver/hexagon-toolchain-linux.c
|
||||
index e791353cca07..4efaf3948054 100644
|
||||
--- a/clang/test/Driver/hexagon-toolchain-linux.c
|
||||
+++ b/clang/test/Driver/hexagon-toolchain-linux.c
|
||||
@@ -1,3 +1,5 @@
|
||||
+// XFAIL: target={{.*-rosa-linux-musl}}
|
||||
+
|
||||
// UNSUPPORTED: system-windows
|
||||
{"add-rosa-vendor", `diff --git a/llvm/include/llvm/TargetParser/Triple.h b/llvm/include/llvm/TargetParser/Triple.h
|
||||
index 657f4230379e..12c305756184 100644
|
||||
--- a/llvm/include/llvm/TargetParser/Triple.h
|
||||
+++ b/llvm/include/llvm/TargetParser/Triple.h
|
||||
@@ -185,6 +185,7 @@ public:
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
diff --git a/clang/test/Modules/timestamps.c b/clang/test/Modules/timestamps.c
|
||||
Apple,
|
||||
PC,
|
||||
+ Rosa,
|
||||
SCEI,
|
||||
Freescale,
|
||||
IBM,
|
||||
diff --git a/llvm/lib/TargetParser/Triple.cpp b/llvm/lib/TargetParser/Triple.cpp
|
||||
index 0584c941d2e6..e4d6ef963cc7 100644
|
||||
--- a/llvm/lib/TargetParser/Triple.cpp
|
||||
+++ b/llvm/lib/TargetParser/Triple.cpp
|
||||
@@ -269,6 +269,7 @@ StringRef Triple::getVendorTypeName(VendorType Kind) {
|
||||
case NVIDIA: return "nvidia";
|
||||
case OpenEmbedded: return "oe";
|
||||
case PC: return "pc";
|
||||
+ case Rosa: return "rosa";
|
||||
case SCEI: return "scei";
|
||||
case SUSE: return "suse";
|
||||
}
|
||||
@@ -669,6 +670,7 @@ static Triple::VendorType parseVendor(StringRef VendorName) {
|
||||
.Case("suse", Triple::SUSE)
|
||||
.Case("oe", Triple::OpenEmbedded)
|
||||
.Case("intel", Triple::Intel)
|
||||
+ .Case("rosa", Triple::Rosa)
|
||||
.Default(Triple::UnknownVendor);
|
||||
}
|
||||
|
||||
`},
|
||||
|
||||
{"xfail-broken-tests", `diff --git a/clang/test/Modules/timestamps.c b/clang/test/Modules/timestamps.c
|
||||
index 50fdce630255..4b4465a75617 100644
|
||||
--- a/clang/test/Modules/timestamps.c
|
||||
+++ b/clang/test/Modules/timestamps.c
|
||||
@@ -353,9 +375,150 @@ index 50fdce630255..4b4465a75617 100644
|
||||
/// Verify timestamps that gets embedded in the module
|
||||
#include <c-header.h>
|
||||
|
||||
`},
|
||||
|
||||
{"path-system-include", `diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
|
||||
index cdbf21fb9026..dd052858700d 100644
|
||||
--- a/clang/lib/Driver/ToolChains/Linux.cpp
|
||||
+++ b/clang/lib/Driver/ToolChains/Linux.cpp
|
||||
@@ -773,6 +773,12 @@ void Linux::AddClangSystemIncludeArgs(const ArgList &DriverArgs,
|
||||
addExternCSystemInclude(
|
||||
DriverArgs, CC1Args,
|
||||
concat(SysRoot, "/usr/include", MultiarchIncludeDir));
|
||||
+ if (!MultiarchIncludeDir.empty() &&
|
||||
+ D.getVFS().exists(concat(SysRoot, "/system/include", MultiarchIncludeDir)))
|
||||
+ addExternCSystemInclude(
|
||||
+ DriverArgs, CC1Args,
|
||||
+ concat(SysRoot, "/system/include", MultiarchIncludeDir));
|
||||
+
|
||||
|
||||
if (getTriple().getOS() == llvm::Triple::RTEMS)
|
||||
return;
|
||||
@@ -783,6 +789,7 @@ void Linux::AddClangSystemIncludeArgs(const ArgList &DriverArgs,
|
||||
addExternCSystemInclude(DriverArgs, CC1Args, concat(SysRoot, "/include"));
|
||||
|
||||
addExternCSystemInclude(DriverArgs, CC1Args, concat(SysRoot, "/usr/include"));
|
||||
+ addExternCSystemInclude(DriverArgs, CC1Args, concat(SysRoot, "/system/include"));
|
||||
|
||||
if (!DriverArgs.hasArg(options::OPT_nobuiltininc) && getTriple().isMusl())
|
||||
addSystemInclude(DriverArgs, CC1Args, ResourceDirInclude);
|
||||
`},
|
||||
|
||||
{"path-system-libraries", `diff --git a/clang/lib/Driver/ToolChains/Linux.cpp b/clang/lib/Driver/ToolChains/Linux.cpp
|
||||
index 8ac8d4eb9181..f4d1347ab64d 100644
|
||||
--- a/clang/lib/Driver/ToolChains/Linux.cpp
|
||||
+++ b/clang/lib/Driver/ToolChains/Linux.cpp
|
||||
@@ -282,6 +282,7 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
||||
const bool IsHexagon = Arch == llvm::Triple::hexagon;
|
||||
const bool IsRISCV = Triple.isRISCV();
|
||||
const bool IsCSKY = Triple.isCSKY();
|
||||
+ const bool IsRosa = Triple.getVendor() == llvm::Triple::Rosa;
|
||||
|
||||
if (IsCSKY && !SelectedMultilibs.empty())
|
||||
SysRoot = SysRoot + SelectedMultilibs.back().osSuffix();
|
||||
@@ -318,12 +319,23 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
||||
const std::string OSLibDir = std::string(getOSLibDir(Triple, Args));
|
||||
const std::string MultiarchTriple = getMultiarchTriple(D, Triple, SysRoot);
|
||||
|
||||
+ if (IsRosa) {
|
||||
+ ExtraOpts.push_back("-rpath");
|
||||
+ ExtraOpts.push_back("/system/lib");
|
||||
+ ExtraOpts.push_back("-rpath");
|
||||
+ ExtraOpts.push_back(concat("/system/lib", MultiarchTriple));
|
||||
+ }
|
||||
+
|
||||
// mips32: Debian multilib, we use /libo32, while in other case, /lib is
|
||||
// used. We need add both libo32 and /lib.
|
||||
if (Arch == llvm::Triple::mips || Arch == llvm::Triple::mipsel) {
|
||||
Generic_GCC::AddMultilibPaths(D, SysRoot, "libo32", MultiarchTriple, Paths);
|
||||
- addPathIfExists(D, concat(SysRoot, "/libo32"), Paths);
|
||||
- addPathIfExists(D, concat(SysRoot, "/usr/libo32"), Paths);
|
||||
+ if (!IsRosa) {
|
||||
+ addPathIfExists(D, concat(SysRoot, "/libo32"), Paths);
|
||||
+ addPathIfExists(D, concat(SysRoot, "/usr/libo32"), Paths);
|
||||
+ } else {
|
||||
+ addPathIfExists(D, concat(SysRoot, "/system/libo32"), Paths);
|
||||
+ }
|
||||
}
|
||||
Generic_GCC::AddMultilibPaths(D, SysRoot, OSLibDir, MultiarchTriple, Paths);
|
||||
|
||||
@@ -341,18 +353,30 @@ Linux::Linux(const Driver &D, const llvm::Triple &Triple, const ArgList &Args)
|
||||
Paths);
|
||||
}
|
||||
|
||||
- addPathIfExists(D, concat(SysRoot, "/usr/lib", MultiarchTriple), Paths);
|
||||
- addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir), Paths);
|
||||
+ if (!IsRosa) {
|
||||
+ addPathIfExists(D, concat(SysRoot, "/usr/lib", MultiarchTriple), Paths);
|
||||
+ addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir), Paths);
|
||||
+ } else {
|
||||
+ addPathIfExists(D, concat(SysRoot, "/system/lib", MultiarchTriple), Paths);
|
||||
+ addPathIfExists(D, concat(SysRoot, "/system", OSLibDir), Paths);
|
||||
+ }
|
||||
if (IsRISCV) {
|
||||
StringRef ABIName = tools::riscv::getRISCVABI(Args, Triple);
|
||||
addPathIfExists(D, concat(SysRoot, "/", OSLibDir, ABIName), Paths);
|
||||
- addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir, ABIName), Paths);
|
||||
+ if (!IsRosa)
|
||||
+ addPathIfExists(D, concat(SysRoot, "/usr", OSLibDir, ABIName), Paths);
|
||||
+ else
|
||||
+ addPathIfExists(D, concat(SysRoot, "/system", OSLibDir, ABIName), Paths);
|
||||
}
|
||||
|
||||
Generic_GCC::AddMultiarchPaths(D, SysRoot, OSLibDir, Paths);
|
||||
|
||||
- addPathIfExists(D, concat(SysRoot, "/lib"), Paths);
|
||||
- addPathIfExists(D, concat(SysRoot, "/usr/lib"), Paths);
|
||||
+ if (!IsRosa) {
|
||||
+ addPathIfExists(D, concat(SysRoot, "/lib"), Paths);
|
||||
+ addPathIfExists(D, concat(SysRoot, "/usr/lib"), Paths);
|
||||
+ } else {
|
||||
+ addPathIfExists(D, concat(SysRoot, "/system/lib"), Paths);
|
||||
+ }
|
||||
}
|
||||
|
||||
ToolChain::RuntimeLibType Linux::GetDefaultRuntimeLibType() const {
|
||||
@@ -457,6 +481,9 @@ std::string Linux::getDynamicLinker(const ArgList &Args) const {
|
||||
return Triple.isArch64Bit() ? "/system/bin/linker64" : "/system/bin/linker";
|
||||
}
|
||||
if (Triple.isMusl()) {
|
||||
+ if (Triple.getVendor() == llvm::Triple::Rosa)
|
||||
+ return "/system/bin/linker";
|
||||
+
|
||||
std::string ArchName;
|
||||
bool IsArm = false;
|
||||
|
||||
diff --git a/clang/tools/clang-installapi/Options.cpp b/clang/tools/clang-installapi/Options.cpp
|
||||
index 64324a3f8b01..15ce70b68217 100644
|
||||
--- a/clang/tools/clang-installapi/Options.cpp
|
||||
+++ b/clang/tools/clang-installapi/Options.cpp
|
||||
@@ -515,7 +515,7 @@ bool Options::processFrontendOptions(InputArgList &Args) {
|
||||
FEOpts.FwkPaths = std::move(FrameworkPaths);
|
||||
|
||||
// Add default framework/library paths.
|
||||
- PathSeq DefaultLibraryPaths = {"/usr/lib", "/usr/local/lib"};
|
||||
+ PathSeq DefaultLibraryPaths = {"/usr/lib", "/system/lib", "/usr/local/lib"};
|
||||
PathSeq DefaultFrameworkPaths = {"/Library/Frameworks",
|
||||
"/System/Library/Frameworks"};
|
||||
|
||||
`},
|
||||
},
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
// llvm stores the result of Toolchain.newLLVM.
|
||||
llvm [_toolchainEnd][4]pkg.Artifact
|
||||
// llvmOnce is for lazy initialisation of llvm.
|
||||
llvmOnce [_toolchainEnd]sync.Once
|
||||
)
|
||||
|
||||
// NewLLVM returns LLVM toolchain across multiple [pkg.Artifact].
|
||||
func (t Toolchain) NewLLVM() (musl, compilerRT, runtimes, clang pkg.Artifact) {
|
||||
llvmOnce[t].Do(func() {
|
||||
llvm[t][0], llvm[t][1], llvm[t][2], llvm[t][3] = t.newLLVM()
|
||||
})
|
||||
return llvm[t][0], llvm[t][1], llvm[t][2], llvm[t][3]
|
||||
}
|
||||
|
||||
162
internal/rosa/make.go
Normal file
162
internal/rosa/make.go
Normal file
@@ -0,0 +1,162 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func (t Toolchain) newMake() pkg.Artifact {
|
||||
const (
|
||||
version = "4.4.1"
|
||||
checksum = "YS_B07ZcAy9PbaK5_vKGj64SrxO2VMpnMKfc9I0Q9IC1rn0RwOH7802pJoj2Mq4a"
|
||||
)
|
||||
return t.New("make-"+version, TEarly, nil, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/make/configure \
|
||||
--prefix=/system \
|
||||
--build="${ROSA_TRIPLE}" \
|
||||
--disable-dependency-tracking
|
||||
./build.sh
|
||||
./make DESTDIR=/work install check
|
||||
`, pkg.Path(AbsUsrSrc.Append("make"), false, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/make/make-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Make] = Toolchain.newMake }
|
||||
|
||||
// MakeAttr holds the project-specific attributes that will be applied to a new
|
||||
// [pkg.Artifact] compiled via [Make].
|
||||
type MakeAttr struct {
|
||||
// Mount the source tree writable.
|
||||
Writable bool
|
||||
|
||||
// Do not include default extras.
|
||||
OmitDefaults bool
|
||||
// Dependencies not provided by stage3.
|
||||
NonStage3 []pkg.Artifact
|
||||
|
||||
// Additional environment variables.
|
||||
Env []string
|
||||
// Runs before configure.
|
||||
ScriptEarly string
|
||||
// Runs after configure.
|
||||
ScriptConfigured string
|
||||
// Runs after install.
|
||||
Script string
|
||||
|
||||
// Remain in working directory set up during ScriptEarly.
|
||||
InPlace bool
|
||||
|
||||
// Flags passed to the configure script.
|
||||
Configure [][2]string
|
||||
// Extra make targets.
|
||||
Make []string
|
||||
// Target triple, zero value is equivalent to the Rosa OS triple.
|
||||
Build string
|
||||
// Whether to skip the check target.
|
||||
SkipCheck bool
|
||||
// Name of the check target, zero value is equivalent to "check".
|
||||
CheckName string
|
||||
|
||||
// Suffix appended to the source pathname.
|
||||
SourceSuffix string
|
||||
|
||||
// Passed through to [Toolchain.New].
|
||||
Flag int
|
||||
}
|
||||
|
||||
// NewViaMake returns a [pkg.Artifact] for compiling and installing via [Make].
|
||||
func (t Toolchain) NewViaMake(
|
||||
name, version string,
|
||||
source pkg.Artifact,
|
||||
attr *MakeAttr,
|
||||
extra ...pkg.Artifact,
|
||||
) pkg.Artifact {
|
||||
if name == "" || version == "" {
|
||||
panic("names must be non-empty")
|
||||
}
|
||||
if attr == nil {
|
||||
attr = new(MakeAttr)
|
||||
}
|
||||
build := `"${ROSA_TRIPLE}"`
|
||||
if attr.Build != "" {
|
||||
build = attr.Build
|
||||
}
|
||||
|
||||
var configureFlags string
|
||||
if len(attr.Configure) > 0 {
|
||||
const sep = " \\\n\t"
|
||||
configureFlags += sep + strings.Join(
|
||||
slices.Collect(func(yield func(string) bool) {
|
||||
for _, v := range attr.Configure {
|
||||
s := v[0]
|
||||
if v[1] == "" || (v[0] != "" &&
|
||||
v[0][0] >= 'a' &&
|
||||
v[0][0] <= 'z') {
|
||||
s = "--" + s
|
||||
}
|
||||
if v[1] != "" {
|
||||
s += "=" + v[1]
|
||||
}
|
||||
if !yield(s) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}),
|
||||
sep,
|
||||
)
|
||||
}
|
||||
|
||||
var buildFlag string
|
||||
if attr.Build != `""` {
|
||||
buildFlag = ` \
|
||||
--build=` + build
|
||||
}
|
||||
|
||||
makeTargets := make([]string, 1, 2+len(attr.Make))
|
||||
if !attr.SkipCheck {
|
||||
if attr.CheckName == "" {
|
||||
makeTargets = append(makeTargets, "check")
|
||||
} else {
|
||||
makeTargets = append(makeTargets, attr.CheckName)
|
||||
}
|
||||
}
|
||||
makeTargets = append(makeTargets, attr.Make...)
|
||||
if len(makeTargets) == 1 {
|
||||
makeTargets = nil
|
||||
}
|
||||
|
||||
finalExtra := []pkg.Artifact{
|
||||
t.Load(Make),
|
||||
}
|
||||
if attr.OmitDefaults || attr.Flag&TEarly == 0 {
|
||||
finalExtra = append(finalExtra,
|
||||
t.Load(Gawk),
|
||||
t.Load(Coreutils),
|
||||
)
|
||||
}
|
||||
finalExtra = append(finalExtra, extra...)
|
||||
|
||||
scriptEarly := attr.ScriptEarly
|
||||
if !attr.InPlace {
|
||||
scriptEarly += "\ncd \"$(mktemp -d)\""
|
||||
} else if scriptEarly == "" {
|
||||
panic("cannot remain in root")
|
||||
}
|
||||
|
||||
return t.New(name+"-"+version, attr.Flag, stage3Concat(t,
|
||||
attr.NonStage3,
|
||||
finalExtra...,
|
||||
), nil, attr.Env, scriptEarly+`
|
||||
/usr/src/`+name+`/configure \
|
||||
--prefix=/system`+buildFlag+configureFlags+attr.ScriptConfigured+`
|
||||
make "-j$(nproc)"`+strings.Join(makeTargets, " ")+`
|
||||
make DESTDIR=/work install
|
||||
`+attr.Script, pkg.Path(AbsUsrSrc.Append(
|
||||
name+attr.SourceSuffix,
|
||||
), attr.Writable, source))
|
||||
}
|
||||
27
internal/rosa/meson.go
Normal file
27
internal/rosa/meson.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newMeson() pkg.Artifact {
|
||||
const (
|
||||
version = "1.10.1"
|
||||
checksum = "w895BXF_icncnXatT_OLCFe2PYEtg4KrKooMgUYdN-nQVvbFX3PvYWHGEpogsHtd"
|
||||
)
|
||||
return t.New("meson-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Python),
|
||||
t.Load(Setuptools),
|
||||
}, nil, nil, `
|
||||
cd /usr/src/meson
|
||||
chmod -R +w meson.egg-info
|
||||
python3 setup.py \
|
||||
install \
|
||||
--prefix=/system \
|
||||
--root=/work
|
||||
`, pkg.Path(AbsUsrSrc.Append("meson"), true, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/mesonbuild/meson/releases/download/"+
|
||||
version+"/meson-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Meson] = Toolchain.newMeson }
|
||||
36
internal/rosa/mksh.go
Normal file
36
internal/rosa/mksh.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newMksh() pkg.Artifact {
|
||||
const (
|
||||
version = "59c"
|
||||
checksum = "0Zj-k4nXEu3IuJY4lvwD2OrC2t27GdZj8SPy4DoaeuBRH1padWb7oREpYgwY8JNq"
|
||||
)
|
||||
return t.New("mksh-"+version, 0, stage3Concat(t, []pkg.Artifact{},
|
||||
t.Load(Perl),
|
||||
t.Load(Coreutils),
|
||||
), nil, []string{
|
||||
"LDSTATIC=-static",
|
||||
"CPPFLAGS=-DMKSH_DEFAULT_PROFILEDIR=\\\"/system/etc\\\"",
|
||||
}, `
|
||||
cd "$(mktemp -d)"
|
||||
sh /usr/src/mksh/Build.sh -r
|
||||
CPPFLAGS="${CPPFLAGS} -DMKSH_BINSHPOSIX -DMKSH_BINSHREDUCED" \
|
||||
sh /usr/src/mksh/Build.sh -r -L
|
||||
./test.sh -C regress:no-ctty
|
||||
|
||||
mkdir -p /work/system/bin/
|
||||
cp -v mksh /work/system/bin/
|
||||
cp -v lksh /work/system/bin/sh
|
||||
|
||||
mkdir -p /work/bin/
|
||||
ln -vs ../system/bin/sh /work/bin/
|
||||
`, pkg.Path(AbsUsrSrc.Append("mksh"), false, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://mbsd.evolvis.org/MirOS/dist/mir/mksh/mksh-R"+version+".tgz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Mksh] = Toolchain.newMksh }
|
||||
@@ -29,39 +29,36 @@ func (t Toolchain) NewMusl(attr *MuslAttr) pkg.Artifact {
|
||||
|
||||
target := "install"
|
||||
script := `
|
||||
mv -v /work/lib/* /work/system/lib
|
||||
rmdir -v /work/lib/
|
||||
mkdir -p /work/system/bin
|
||||
COMPAT_LINKER_NAME="ld-musl-` + linuxArch() + `.so.1"
|
||||
ln -vs ../lib/libc.so /work/system/bin/linker
|
||||
ln -vs ../lib/libc.so /work/system/bin/ldd
|
||||
ln -vs libc.so "/work/system/lib/${COMPAT_LINKER_NAME}"
|
||||
rm -v "/work/lib/${COMPAT_LINKER_NAME}"
|
||||
rmdir -v /work/lib
|
||||
`
|
||||
if attr.Headers {
|
||||
target = "install-headers"
|
||||
script = ""
|
||||
}
|
||||
|
||||
extra := []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
}
|
||||
if t == toolchainStage3 {
|
||||
extra = nil
|
||||
}
|
||||
|
||||
return t.New("musl-"+version, slices.Concat(
|
||||
attr.Extra,
|
||||
extra,
|
||||
return t.New("musl-"+version, 0, stage3Concat(t, attr.Extra,
|
||||
t.Load(Make),
|
||||
t.Load(Coreutils),
|
||||
), nil, slices.Concat([]string{
|
||||
"ROSA_MUSL_TARGET=" + target,
|
||||
}, attr.Env), `
|
||||
# expected to be writable in copies
|
||||
chmod -R +w /usr/src/musl/
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/musl/configure \
|
||||
--prefix=/system \
|
||||
--target="${ROSA_TRIPLE}"
|
||||
make "-j$(nproc)" DESTDIR=/work "${ROSA_MUSL_TARGET}"
|
||||
`+script, pkg.Path(AbsUsrSrc.Append("musl"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://musl.libc.org/releases/musl-"+version+".tar.gz",
|
||||
`+script, pkg.Path(AbsUsrSrc.Append("musl"), false, t.NewPatchedSource(
|
||||
// expected to be writable in copies
|
||||
"musl", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://musl.libc.org/releases/musl-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false,
|
||||
)))
|
||||
}
|
||||
|
||||
@@ -2,34 +2,38 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
// NewNinja returns a [pkg.Artifact] containing an installation of Ninja.
|
||||
func (t Toolchain) NewNinja() pkg.Artifact {
|
||||
func (t Toolchain) newNinja() pkg.Artifact {
|
||||
const (
|
||||
version = "1.13.2"
|
||||
checksum = "ygKWMa0YV2lWKiFro5hnL-vcKbc_-RACZuPu0Io8qDvgQlZ0dxv7hPNSFkt4214v"
|
||||
)
|
||||
return t.New("ninja-"+version, []pkg.Artifact{
|
||||
t.NewCMake(),
|
||||
t.NewPython(),
|
||||
return t.New("ninja-"+version, 0, []pkg.Artifact{
|
||||
t.Load(CMake),
|
||||
t.Load(Python),
|
||||
t.Load(Bash),
|
||||
}, nil, nil, `
|
||||
chmod -R +w /usr/src/ninja/
|
||||
mkdir -p /work/system/bin/ && cd /work/system/bin/
|
||||
cd "$(mktemp -d)"
|
||||
python3 /usr/src/ninja/configure.py \
|
||||
--bootstrap \
|
||||
--gtest-source-dir=/usr/src/googletest
|
||||
./ninja all
|
||||
./ninja_test
|
||||
|
||||
mkdir -p /work/system/bin/
|
||||
cp ninja /work/system/bin/
|
||||
`, pkg.Path(AbsUsrSrc.Append("googletest"), false,
|
||||
pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/google/googletest/releases/download/"+
|
||||
"v1.16.0/googletest-1.16.0.tar.gz",
|
||||
mustDecode("NjLGvSbgPy_B-y-o1hdanlzEzaYeStFcvFGxpYV3KYlhrWWFRcugYhM3ZMzOA9B_"),
|
||||
pkg.TarGzip,
|
||||
)), pkg.Path(AbsUsrSrc.Append("ninja"), true,
|
||||
pkg.NewHTTPGetTar(
|
||||
)), pkg.Path(AbsUsrSrc.Append("ninja"), true, t.NewPatchedSource(
|
||||
"ninja", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/ninja-build/ninja/archive/refs/tags/"+
|
||||
"v"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Ninja] = Toolchain.newNinja }
|
||||
|
||||
36
internal/rosa/openssl.go
Normal file
36
internal/rosa/openssl.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newOpenSSL() pkg.Artifact {
|
||||
const (
|
||||
version = "3.5.5"
|
||||
checksum = "I2Hp1LxcTR8j4G6LFEQMVy6EJH-Na1byI9Ti-ThBot6EMLNRnjGXGq-WXrim3Fkz"
|
||||
)
|
||||
return t.New("openssl-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Perl),
|
||||
t.Load(Make),
|
||||
|
||||
t.Load(Zlib),
|
||||
t.Load(KernelHeaders),
|
||||
}, nil, []string{
|
||||
"CC=cc",
|
||||
}, `
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/openssl/Configure \
|
||||
--prefix=/system \
|
||||
--libdir=lib \
|
||||
--openssldir=etc/ssl
|
||||
make \
|
||||
"-j$(nproc)" \
|
||||
HARNESS_JOBS=256 \
|
||||
test
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("openssl"), false, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/openssl/openssl/releases/download/"+
|
||||
"openssl-"+version+"/openssl-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[OpenSSL] = Toolchain.newOpenSSL }
|
||||
@@ -2,31 +2,38 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
// NewPerl returns a [pkg.Artifact] containing an installation of perl.
|
||||
func (t Toolchain) NewPerl() pkg.Artifact {
|
||||
func (t Toolchain) newPerl() pkg.Artifact {
|
||||
const (
|
||||
version = "5.42.0"
|
||||
checksum = "2KR7Jbpk-ZVn1a30LQRwbgUvg2AXlPQZfzrqCr31qD5-yEsTwVQ_W76eZH-EdxM9"
|
||||
)
|
||||
return t.New("perl-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
return t.New("perl-"+version, TEarly, []pkg.Artifact{
|
||||
t.Load(Make),
|
||||
}, nil, nil, `
|
||||
chmod -R +w /usr/src/perl && cd /usr/src/perl
|
||||
cd /usr/src/perl
|
||||
|
||||
echo 'print STDOUT "1..0 # Skip broken test\n";' > ext/Pod-Html/t/htmldir3.t
|
||||
rm -f /system/bin/ps # perl does not like toybox ps
|
||||
|
||||
./Configure \
|
||||
-des \
|
||||
-Dprefix=/system \
|
||||
-Dcc="${CC}" \
|
||||
-Dcc="clang" \
|
||||
-Dcflags='--std=gnu99' \
|
||||
-Dldflags="${LDFLAGS}" \
|
||||
-Doptimize='-O2 -fno-strict-aliasing' \
|
||||
-Duseithreads
|
||||
make "-j$(nproc)" # test
|
||||
make \
|
||||
"-j$(nproc)" \
|
||||
TEST_JOBS=256 \
|
||||
test_harness
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("perl"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://www.cpan.org/src/5.0/perl-"+version+".tar.gz",
|
||||
`, pkg.Path(AbsUsrSrc.Append("perl"), true, t.NewPatchedSource(
|
||||
"perl", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://www.cpan.org/src/5.0/perl-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Perl] = Toolchain.newPerl }
|
||||
|
||||
23
internal/rosa/pkg-config.go
Normal file
23
internal/rosa/pkg-config.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newPkgConfig() pkg.Artifact {
|
||||
const (
|
||||
version = "0.29.2"
|
||||
checksum = "gi7yAvkwo20Inys1tHbeYZ3Wjdm5VPkrnO0Q6_QZPCAwa1zrA8F4a63cdZDd-717"
|
||||
)
|
||||
return t.NewViaMake("pkg-config", version, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://pkgconfig.freedesktop.org/releases/"+
|
||||
"pkg-config-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Configure: [][2]string{
|
||||
{"CFLAGS", "'-Wno-int-conversion'"},
|
||||
{"with-internal-glib"},
|
||||
},
|
||||
})
|
||||
}
|
||||
func init() { artifactsF[PkgConfig] = Toolchain.newPkgConfig }
|
||||
@@ -1,18 +1,28 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
// NewPython returns a [pkg.Artifact] containing an installation of Python.
|
||||
func (t Toolchain) NewPython() pkg.Artifact {
|
||||
func (t Toolchain) newPython() pkg.Artifact {
|
||||
const (
|
||||
version = "3.14.2"
|
||||
checksum = "7nZunVMGj0viB-CnxpcRego2C90X5wFsMTgsoewd5z-KSZY2zLuqaBwG-14zmKys"
|
||||
)
|
||||
skipTests := []string{
|
||||
return t.NewViaMake("python", version, t.NewPatchedSource("python", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://www.python.org/ftp/python/"+version+
|
||||
"/Python-"+version+".tgz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false), &MakeAttr{
|
||||
// test_synopsis_sourceless assumes this is writable and checks __pycache__
|
||||
Writable: true,
|
||||
|
||||
Env: []string{
|
||||
"EXTRATESTOPTS=-j0 -x " + strings.Join([]string{
|
||||
// requires internet access (http://www.pythontest.net/)
|
||||
"test_asyncio",
|
||||
"test_socket",
|
||||
@@ -32,29 +42,110 @@ func (t Toolchain) NewPython() pkg.Artifact {
|
||||
|
||||
// breaks on llvm
|
||||
"test_dbm_gnu",
|
||||
}
|
||||
return t.New("python-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
t.NewZlib(),
|
||||
t.NewLibffi(),
|
||||
}, nil, []string{
|
||||
"EXTRATESTOPTS=-j0 -x " + strings.Join(skipTests, " -x "),
|
||||
}, `
|
||||
# test_synopsis_sourceless assumes this is writable and checks __pycache__
|
||||
chmod -R +w /usr/src/python/
|
||||
}, " -x "),
|
||||
|
||||
// _ctypes appears to infer something from the linker name
|
||||
"LDFLAGS=-Wl,--dynamic-linker=/system/lib/" +
|
||||
"ld-musl-" + linuxArch() + ".so.1",
|
||||
},
|
||||
|
||||
ScriptEarly: `
|
||||
export HOME="$(mktemp -d)"
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/python/configure \
|
||||
--prefix=/system
|
||||
make "-j$(nproc)"
|
||||
make test
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("python"), true,
|
||||
pkg.NewHTTPGetTar(
|
||||
nil, "https://www.python.org/ftp/python/"+version+
|
||||
"/Python-"+version+".tgz",
|
||||
`,
|
||||
|
||||
CheckName: "test",
|
||||
},
|
||||
t.Load(Zlib),
|
||||
t.Load(Libffi),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Python] = Toolchain.newPython }
|
||||
|
||||
// newViaPip is a helper for installing python dependencies via pip.
|
||||
func (t Toolchain) newViaPip(
|
||||
name, version, abi, platform, checksum, prefix string,
|
||||
extra ...pkg.Artifact,
|
||||
) pkg.Artifact {
|
||||
wname := name + "-" + version + "-py3-" + abi + "-" + platform + ".whl"
|
||||
return t.New(name+"-"+version, 0, slices.Concat([]pkg.Artifact{
|
||||
t.Load(Python),
|
||||
}, extra), nil, nil, `
|
||||
pip3 install \
|
||||
--no-index \
|
||||
--prefix=/system \
|
||||
--root=/work \
|
||||
/usr/src/`+wname+`
|
||||
`, pkg.Path(AbsUsrSrc.Append(wname), false, pkg.NewHTTPGet(
|
||||
nil, prefix+wname,
|
||||
mustDecode(checksum),
|
||||
)))
|
||||
}
|
||||
|
||||
func (t Toolchain) newSetuptools() pkg.Artifact {
|
||||
const (
|
||||
version = "80.10.1"
|
||||
checksum = "p3rlwEmy1krcUH1KabprQz1TCYjJ8ZUjOQknQsWh3q-XEqLGEd3P4VrCc7ouHGXU"
|
||||
)
|
||||
return t.New("setuptools-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Python),
|
||||
}, nil, nil, `
|
||||
pip3 install \
|
||||
--no-index \
|
||||
--prefix=/system \
|
||||
--root=/work \
|
||||
/usr/src/setuptools
|
||||
`, pkg.Path(AbsUsrSrc.Append("setuptools"), true, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/pypa/setuptools/archive/refs/tags/"+
|
||||
"v"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Setuptools] = Toolchain.newSetuptools }
|
||||
|
||||
func (t Toolchain) newPygments() pkg.Artifact {
|
||||
return t.newViaPip("pygments", "2.19.2", "none", "any",
|
||||
"ak_lwTalmSr7W4Mjy2XBZPG9I6a0gwSy2pS87N8x4QEuZYif0ie9z0OcfRfi9msd",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/")
|
||||
}
|
||||
func init() { artifactsF[Pygments] = Toolchain.newPygments }
|
||||
|
||||
func (t Toolchain) newPluggy() pkg.Artifact {
|
||||
return t.newViaPip("pluggy", "1.6.0", "none", "any",
|
||||
"2HWYBaEwM66-y1hSUcWI1MyE7dVVuNNRW24XD6iJBey4YaUdAK8WeXdtFMQGC-4J",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/")
|
||||
}
|
||||
func init() { artifactsF[Pluggy] = Toolchain.newPluggy }
|
||||
|
||||
func (t Toolchain) newPackaging() pkg.Artifact {
|
||||
return t.newViaPip("packaging", "26.0", "none", "any",
|
||||
"iVVXcqdwHDskPKoCFUlh2x8J0Gyq-bhO4ns9DvUJ7oJjeOegRYtSIvLV33Bki-pP",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/")
|
||||
}
|
||||
func init() { artifactsF[Packaging] = Toolchain.newPackaging }
|
||||
|
||||
func (t Toolchain) newIniConfig() pkg.Artifact {
|
||||
const version = "2.3.0"
|
||||
return t.newViaPip("iniconfig", version, "none", "any",
|
||||
"SDgs4S5bXi77aVOeKTPv2TUrS3M9rduiK4DpU0hCmDsSBWqnZcWInq9lsx6INxut",
|
||||
"https://github.com/pytest-dev/iniconfig/releases/download/"+
|
||||
"v"+version+"/")
|
||||
}
|
||||
func init() { artifactsF[IniConfig] = Toolchain.newIniConfig }
|
||||
|
||||
func (t Toolchain) newPyTest() pkg.Artifact {
|
||||
const version = "9.0.2"
|
||||
return t.newViaPip("pytest", version, "none", "any",
|
||||
"IM2wDbLke1EtZhF92zvAjUl_Hms1uKDtM7U8Dt4acOaChMnDg1pW7ib8U0wYGDLH",
|
||||
"https://github.com/pytest-dev/pytest/releases/download/"+
|
||||
version+"/",
|
||||
t.Load(IniConfig),
|
||||
t.Load(Packaging),
|
||||
t.Load(Pluggy),
|
||||
t.Load(Pygments),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[PyTest] = Toolchain.newPyTest }
|
||||
|
||||
@@ -49,6 +49,8 @@ func linuxArch() string {
|
||||
switch runtime.GOARCH {
|
||||
case "amd64":
|
||||
return "x86_64"
|
||||
case "arm64":
|
||||
return "aarch64"
|
||||
|
||||
default:
|
||||
panic("unsupported target " + runtime.GOARCH)
|
||||
@@ -63,16 +65,11 @@ func triplet() string {
|
||||
const (
|
||||
// EnvTriplet holds the return value of triplet.
|
||||
EnvTriplet = "ROSA_TRIPLE"
|
||||
// EnvRefCFLAGS holds toolchain-specific reference CFLAGS.
|
||||
EnvRefCFLAGS = "ROSA_CFLAGS"
|
||||
// EnvRefCXXFLAGS holds toolchain-specific reference CXXFLAGS.
|
||||
EnvRefCXXFLAGS = "ROSA_CXXFLAGS"
|
||||
)
|
||||
|
||||
// ldflags returns LDFLAGS corresponding to triplet.
|
||||
func ldflags(static bool) string {
|
||||
s := "LDFLAGS=" +
|
||||
"-fuse-ld=lld " +
|
||||
// earlyLDFLAGS returns LDFLAGS corresponding to triplet.
|
||||
func earlyLDFLAGS(static bool) string {
|
||||
s := "-fuse-ld=lld " +
|
||||
"-L/system/lib -Wl,-rpath=/system/lib " +
|
||||
"-L/system/lib/" + triplet() + " " +
|
||||
"-Wl,-rpath=/system/lib/" + triplet() + " " +
|
||||
@@ -80,18 +77,18 @@ func ldflags(static bool) string {
|
||||
"-unwindlib=libunwind " +
|
||||
"-Wl,--as-needed"
|
||||
if !static {
|
||||
s += " -Wl,--dynamic-linker=/system/lib/ld-musl-x86_64.so.1"
|
||||
s += " -Wl,--dynamic-linker=/system/bin/linker"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// cflags is reference CFLAGS for the Rosa OS toolchain.
|
||||
const cflags = "-Qunused-arguments " +
|
||||
// earlyCFLAGS is reference CFLAGS for the stage3 toolchain.
|
||||
const earlyCFLAGS = "-Qunused-arguments " +
|
||||
"-isystem/system/include"
|
||||
|
||||
// cxxflags returns reference CXXFLAGS for the Rosa OS toolchain corresponding
|
||||
// to [runtime.GOARCH].
|
||||
func cxxflags() string {
|
||||
// earlyCXXFLAGS returns reference CXXFLAGS for the stage3 toolchain
|
||||
// corresponding to [runtime.GOARCH].
|
||||
func earlyCXXFLAGS() string {
|
||||
return "--start-no-unused-arguments " +
|
||||
"-stdlib=libc++ " +
|
||||
"--end-no-unused-arguments " +
|
||||
@@ -119,8 +116,30 @@ const (
|
||||
|
||||
// Std denotes the standard Rosa OS toolchain.
|
||||
Std
|
||||
|
||||
// _toolchainEnd is the total number of toolchains available and does not
|
||||
// denote a valid toolchain.
|
||||
_toolchainEnd
|
||||
)
|
||||
|
||||
// stage3Concat concatenates s and values. If the current toolchain is
|
||||
// toolchainStage3, stage3Concat returns s as is.
|
||||
func stage3Concat[S ~[]E, E any](t Toolchain, s S, values ...E) S {
|
||||
if t == toolchainStage3 {
|
||||
return s
|
||||
}
|
||||
return slices.Concat(s, values)
|
||||
}
|
||||
|
||||
// stage3ExclConcat concatenates s and values. If the current toolchain is not
|
||||
// toolchainStage3, stage3ExclConcat returns s as is.
|
||||
func stage3ExclConcat[S ~[]E, E any](t Toolchain, s S, values ...E) S {
|
||||
if t == toolchainStage3 {
|
||||
return slices.Concat(s, values)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// lastIndexFunc is like [strings.LastIndexFunc] but for [slices].
|
||||
func lastIndexFunc[S ~[]E, E any](s S, f func(E) bool) (i int) {
|
||||
if i = slices.IndexFunc(s, f); i < 0 {
|
||||
@@ -156,9 +175,17 @@ func fixupEnviron(env, extras []string, paths ...string) []string {
|
||||
// build script under.
|
||||
var absCureScript = fhs.AbsUsrBin.Append(".cure-script")
|
||||
|
||||
const (
|
||||
// TExclusive denotes an exclusive [pkg.Artifact].
|
||||
TExclusive = 1 << iota
|
||||
// TEarly hints for an early variant of [Toybox] to be used when available.
|
||||
TEarly
|
||||
)
|
||||
|
||||
// New returns a [pkg.Artifact] compiled on this toolchain.
|
||||
func (t Toolchain) New(
|
||||
name string,
|
||||
flag int,
|
||||
extra []pkg.Artifact,
|
||||
checksum *pkg.Checksum,
|
||||
env []string,
|
||||
@@ -169,71 +196,78 @@ func (t Toolchain) New(
|
||||
const lcMessages = "LC_MESSAGES=C.UTF-8"
|
||||
|
||||
var (
|
||||
path = AbsSystem.Append("bin", "busybox")
|
||||
args = []string{"hush", absCureScript.String()}
|
||||
path = AbsSystem.Append("bin", "sh")
|
||||
args = []string{"sh", absCureScript.String()}
|
||||
support []pkg.Artifact
|
||||
)
|
||||
switch t {
|
||||
case toolchainBusybox:
|
||||
name += "-early"
|
||||
support = slices.Concat([]pkg.Artifact{newBusyboxBin()}, extra)
|
||||
path = AbsSystem.Append("bin", "busybox")
|
||||
args[0] = "hush"
|
||||
env = fixupEnviron(env, nil, "/system/bin")
|
||||
|
||||
case toolchainStage3:
|
||||
const (
|
||||
version = "20260111T160052Z"
|
||||
checksum = "c5_FwMnRN8RZpTdBLGYkL4RR8ampdaZN2JbkgrFLe8-QHQAVQy08APVvIL6eT7KW"
|
||||
)
|
||||
name += "-boot"
|
||||
var seed string
|
||||
switch runtime.GOARCH {
|
||||
case "amd64":
|
||||
seed = "c5_FwMnRN8RZpTdBLGYkL4RR8ampdaZN2JbkgrFLe8-QHQAVQy08APVvIL6eT7KW"
|
||||
case "arm64":
|
||||
seed = "79uRbRI44PyknQQ9RlFUQrwqplup7vImiIk6klefL8TN-fT42TXMS_v4XszwexCb"
|
||||
|
||||
default:
|
||||
panic("unsupported target " + runtime.GOARCH)
|
||||
}
|
||||
path = fhs.AbsRoot.Append("bin", "bash")
|
||||
args[0] = "bash"
|
||||
support = slices.Concat([]pkg.Artifact{
|
||||
cureEtc{},
|
||||
toolchainBusybox.New("stage3-"+version, nil, nil, nil, `
|
||||
toolchainBusybox.New("stage3", 0, nil, nil, nil, `
|
||||
tar -C /work -xf /usr/src/stage3.tar.xz
|
||||
rm -rf /work/dev/ /work/proc/
|
||||
ln -vs ../usr/bin /work/bin
|
||||
`, pkg.Path(AbsUsrSrc.Append("stage3.tar.xz"), false,
|
||||
pkg.NewHTTPGet(
|
||||
nil, "https://distfiles.gentoo.org/releases/"+
|
||||
runtime.GOARCH+"/autobuilds/"+version+
|
||||
"/stage3-"+runtime.GOARCH+"-musl-llvm-"+version+".tar.xz",
|
||||
mustDecode(checksum),
|
||||
nil, "https://basement.gensokyo.uk/seed/"+seed,
|
||||
mustDecode(seed),
|
||||
),
|
||||
)),
|
||||
}, extra)
|
||||
env = fixupEnviron(env, []string{
|
||||
EnvTriplet + "=" + triplet(),
|
||||
lcMessages,
|
||||
|
||||
EnvRefCFLAGS + "=" + cflags,
|
||||
EnvRefCXXFLAGS + "=" + cxxflags(),
|
||||
ldflags(true),
|
||||
"LDFLAGS=" + earlyLDFLAGS(true),
|
||||
}, "/system/bin",
|
||||
"/usr/bin",
|
||||
"/usr/lib/llvm/21/bin",
|
||||
)
|
||||
|
||||
case toolchainIntermediate, Std:
|
||||
if t < Std {
|
||||
name += "-std"
|
||||
}
|
||||
|
||||
boot := t - 1
|
||||
musl, compilerRT, runtimes, clang := boot.NewLLVM()
|
||||
toybox := Toybox
|
||||
if flag&TEarly != 0 {
|
||||
toybox = toyboxEarly
|
||||
}
|
||||
support = slices.Concat(extra, []pkg.Artifact{
|
||||
cureEtc{newIANAEtc()},
|
||||
musl,
|
||||
compilerRT,
|
||||
runtimes,
|
||||
clang,
|
||||
boot.NewBusybox(),
|
||||
boot.Load(Mksh),
|
||||
boot.Load(toybox),
|
||||
})
|
||||
env = fixupEnviron(env, []string{
|
||||
EnvTriplet + "=" + triplet(),
|
||||
lcMessages,
|
||||
|
||||
// autotools projects act up with CFLAGS
|
||||
"CC=clang " + cflags,
|
||||
EnvRefCFLAGS + "=" + cflags,
|
||||
"CXX=clang++ " + cxxflags(),
|
||||
EnvRefCXXFLAGS + "=" + cxxflags(),
|
||||
ldflags(false),
|
||||
|
||||
"AR=ar",
|
||||
"RANLIB=ranlib",
|
||||
"LIBCC=/system/lib/clang/21/lib/" + triplet() +
|
||||
@@ -245,7 +279,7 @@ ln -vs ../usr/bin /work/bin
|
||||
}
|
||||
|
||||
return pkg.NewExec(
|
||||
name, checksum, pkg.ExecTimeoutMax,
|
||||
name, checksum, pkg.ExecTimeoutMax, flag&TExclusive != 0,
|
||||
fhs.AbsRoot, env,
|
||||
path, args,
|
||||
|
||||
@@ -258,3 +292,43 @@ ln -vs ../usr/bin /work/bin
|
||||
)}, paths)...,
|
||||
)
|
||||
}
|
||||
|
||||
// NewPatchedSource returns [pkg.Artifact] of source with patches applied. If
|
||||
// passthrough is true, source is returned as is for zero length patches.
|
||||
func (t Toolchain) NewPatchedSource(
|
||||
name, version string,
|
||||
source pkg.Artifact,
|
||||
passthrough bool,
|
||||
patches ...[2]string,
|
||||
) pkg.Artifact {
|
||||
if passthrough && len(patches) == 0 {
|
||||
return source
|
||||
}
|
||||
|
||||
paths := make([]pkg.ExecPath, len(patches)+1)
|
||||
for i, p := range patches {
|
||||
paths[i+1] = pkg.Path(
|
||||
AbsUsrSrc.Append(name+"-patches", p[0]+".patch"), false,
|
||||
pkg.NewFile(p[0]+".patch", []byte(p[1])),
|
||||
)
|
||||
}
|
||||
paths[0] = pkg.Path(AbsUsrSrc.Append(name), false, source)
|
||||
|
||||
aname := name + "-" + version + "-src"
|
||||
script := `
|
||||
cp -r /usr/src/` + name + `/. /work/.
|
||||
chmod -R +w /work && cd /work
|
||||
`
|
||||
if len(paths) > 1 {
|
||||
script += `
|
||||
cat /usr/src/` + name + `-patches/* | \
|
||||
patch \
|
||||
-p 1 \
|
||||
--ignore-whitespace
|
||||
`
|
||||
aname += "-patched"
|
||||
}
|
||||
return t.New(aname, 0, stage3Concat(t, []pkg.Artifact{},
|
||||
t.Load(Patch),
|
||||
), nil, nil, script, paths...)
|
||||
}
|
||||
|
||||
@@ -2,28 +2,30 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
// NewRsync returns a [pkg.Artifact] containing an installation of rsync.
|
||||
func (t Toolchain) NewRsync() pkg.Artifact {
|
||||
func (t Toolchain) newRsync() pkg.Artifact {
|
||||
const (
|
||||
version = "3.4.1"
|
||||
checksum = "VBlTsBWd9z3r2-ex7GkWeWxkUc5OrlgDzikAC0pK7ufTjAJ0MbmC_N04oSVTGPiv"
|
||||
)
|
||||
return t.New("rsync-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
}, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
/usr/src/rsync/configure --prefix=/system \
|
||||
--build="${ROSA_TRIPLE}" \
|
||||
--disable-openssl \
|
||||
--disable-xxhash \
|
||||
--disable-zstd \
|
||||
--disable-lz4
|
||||
make "-j${nproc}"
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("rsync"), false, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://download.samba.org/pub/rsync/src/rsync-"+version+".tar.gz",
|
||||
return t.NewViaMake("rsync", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://download.samba.org/pub/rsync/src/"+
|
||||
"rsync-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
), &MakeAttr{
|
||||
Configure: [][2]string{
|
||||
{"disable-openssl"},
|
||||
{"disable-xxhash"},
|
||||
{"disable-zstd"},
|
||||
{"disable-lz4"},
|
||||
},
|
||||
|
||||
// circular dependency
|
||||
SkipCheck: true,
|
||||
|
||||
Flag: TEarly,
|
||||
},
|
||||
t.Load(Gawk),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Rsync] = Toolchain.newRsync }
|
||||
|
||||
84
internal/rosa/ssl.go
Normal file
84
internal/rosa/ssl.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func (t Toolchain) newNSS() pkg.Artifact {
|
||||
const (
|
||||
version = "3_120"
|
||||
checksum = "9M0SNMrj9BJp6RH2rQnMm6bZWtP0Kgj64D5JNPHF7Cxr2_8kfy3msubIcvEPwC35"
|
||||
|
||||
version0 = "4_38_2"
|
||||
checksum0 = "25x2uJeQnOHIiq_zj17b4sYqKgeoU8-IsySUptoPcdHZ52PohFZfGuIisBreWzx0"
|
||||
)
|
||||
return t.New("nss-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Perl),
|
||||
t.Load(Python),
|
||||
t.Load(Unzip),
|
||||
t.Load(Make),
|
||||
t.Load(Gawk),
|
||||
t.Load(Coreutils),
|
||||
|
||||
t.Load(Zlib),
|
||||
t.Load(KernelHeaders),
|
||||
}, nil, nil, `
|
||||
unzip /usr/src/nspr.zip -d /usr/src
|
||||
mv '/usr/src/nspr-NSPR_`+version0+`_RTM' /usr/src/nspr
|
||||
cd /usr/src/nss
|
||||
|
||||
make \
|
||||
"-j$(nproc)" \
|
||||
CCC="clang++" \
|
||||
NSDISTMODE=copy \
|
||||
BUILD_OPT=1 \
|
||||
USE_64=1 \
|
||||
nss_build_all
|
||||
mkdir -p /work/system/nss
|
||||
cp -r \
|
||||
/usr/src/dist/. \
|
||||
lib/ckfw/builtins/certdata.txt \
|
||||
/work/system/nss
|
||||
`, pkg.Path(AbsUsrSrc.Append("nss"), true, t.NewPatchedSource(
|
||||
"nss", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/nss-dev/nss/archive/refs/tags/"+
|
||||
"NSS_"+version+"_RTM.tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false,
|
||||
)), pkg.Path(AbsUsrSrc.Append("nspr.zip"), false, pkg.NewHTTPGet(
|
||||
nil, "https://hg-edge.mozilla.org/projects/nspr/archive/"+
|
||||
"NSPR_"+version0+"_RTM.zip",
|
||||
mustDecode(checksum0),
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[NSS] = Toolchain.newNSS }
|
||||
|
||||
func (t Toolchain) newBuildCATrust() pkg.Artifact {
|
||||
const version = "0.4.0"
|
||||
return t.newViaPip("buildcatrust", version, "none", "any",
|
||||
"k_FGzkRCLjbTWBkuBLzQJ1S8FPAz19neJZlMHm0t10F2Y0hElmvVwdSBRc03Rjo1",
|
||||
"https://github.com/nix-community/buildcatrust/"+
|
||||
"releases/download/v"+version+"/")
|
||||
}
|
||||
func init() { artifactsF[buildcatrust] = Toolchain.newBuildCATrust }
|
||||
|
||||
func (t Toolchain) newNSSCACert() pkg.Artifact {
|
||||
return t.New("nss-cacert", 0, []pkg.Artifact{
|
||||
t.Load(Bash),
|
||||
t.Load(Python),
|
||||
|
||||
t.Load(NSS),
|
||||
t.Load(buildcatrust),
|
||||
}, nil, nil, `
|
||||
mkdir -p /work/system/etc/ssl/{certs/unbundled,certs/hashed,trust-source}
|
||||
buildcatrust \
|
||||
--certdata_input /system/nss/certdata.txt \
|
||||
--ca_bundle_output /work/system/etc/ssl/certs/ca-bundle.crt \
|
||||
--ca_standard_bundle_output /work/system/etc/ssl/certs/ca-no-trust-rules-bundle.crt \
|
||||
--ca_unpacked_output /work/system/etc/ssl/certs/unbundled \
|
||||
--ca_hashed_unpacked_output /work/system/etc/ssl/certs/hashed \
|
||||
--p11kit_output /work/system/etc/ssl/trust-source/ca-bundle.trust.p11-kit
|
||||
`)
|
||||
}
|
||||
func init() { artifactsF[NSSCACert] = Toolchain.newNSSCACert }
|
||||
64
internal/rosa/toybox.go
Normal file
64
internal/rosa/toybox.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newToybox(suffix, script string) pkg.Artifact {
|
||||
const (
|
||||
version = "0.8.13"
|
||||
checksum = "rZ1V1ATDte2WeQZanxLVoiRGdfPXhMlEo5-exX-e-ml8cGn9qOv0ABEUVZpX3wTI"
|
||||
)
|
||||
return t.New("toybox-"+version+suffix, TEarly, stage3Concat(t, []pkg.Artifact{},
|
||||
t.Load(Make),
|
||||
t.Load(Bash),
|
||||
t.Load(Gzip),
|
||||
|
||||
t.Load(KernelHeaders),
|
||||
), nil, stage3Concat(t, []string{},
|
||||
"ROSA_CHECK=make USER=cure tests",
|
||||
), `
|
||||
ln -s ../system/bin/bash /bin/ || true
|
||||
cd /usr/src/toybox
|
||||
chmod +w kconfig tests
|
||||
rm \
|
||||
tests/du.test \
|
||||
tests/sed.test \
|
||||
tests/tar.test \
|
||||
tests/ls.test \
|
||||
tests/taskset.test
|
||||
|
||||
make defconfig
|
||||
sed -i \
|
||||
's/^CONFIG_TOYBOX_ZHELP=y$/CONFIG_TOYBOX_ZHELP=0/' \
|
||||
.config
|
||||
`+script+`
|
||||
make \
|
||||
"-j$(nproc)" \
|
||||
LDFLAGS="${LDFLAGS} -static"
|
||||
${ROSA_CHECK}
|
||||
PREFIX=/work/system/bin make install_flat
|
||||
|
||||
mkdir -p /work/usr/bin
|
||||
ln -s ../../system/bin/env /work/usr/bin
|
||||
`, pkg.Path(AbsUsrSrc.Append("toybox"), true, pkg.NewHTTPGetTar(
|
||||
nil,
|
||||
"https://landley.net/toybox/downloads/toybox-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
}
|
||||
func init() {
|
||||
artifactsF[Toybox] = func(t Toolchain) pkg.Artifact {
|
||||
return t.newToybox("", "")
|
||||
}
|
||||
|
||||
artifactsF[toyboxEarly] = func(t Toolchain) pkg.Artifact {
|
||||
return t.newToybox("-early", `
|
||||
echo '
|
||||
CONFIG_EXPR=y
|
||||
CONFIG_TR=y
|
||||
CONFIG_AWK=y
|
||||
CONFIG_DIFF=y
|
||||
' >> .config
|
||||
`)
|
||||
}
|
||||
}
|
||||
34
internal/rosa/unzip.go
Normal file
34
internal/rosa/unzip.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package rosa
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
)
|
||||
|
||||
func (t Toolchain) newUnzip() pkg.Artifact {
|
||||
const (
|
||||
version = "6.0"
|
||||
checksum = "fcqjB1IOVRNJ16K5gTGEDt3zCJDVBc7EDSra9w3H93stqkNwH1vaPQs_QGOpQZu1"
|
||||
)
|
||||
return t.New("unzip-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Make),
|
||||
t.Load(Coreutils),
|
||||
}, nil, nil, `
|
||||
cd /usr/src/unzip/
|
||||
unix/configure
|
||||
make -f unix/Makefile generic1
|
||||
|
||||
mkdir -p /work/system/bin/
|
||||
mv unzip /work/system/bin/
|
||||
`, pkg.Path(AbsUsrSrc.Append("unzip"), true, t.NewPatchedSource(
|
||||
"unzip", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://downloads.sourceforge.net/project/infozip/"+
|
||||
"UnZip%206.x%20%28latest%29/UnZip%20"+version+"/"+
|
||||
"unzip"+strings.ReplaceAll(version, ".", "")+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), false,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Unzip] = Toolchain.newUnzip }
|
||||
84
internal/rosa/wayland.go
Normal file
84
internal/rosa/wayland.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newWayland() pkg.Artifact {
|
||||
const (
|
||||
version = "1.24.0"
|
||||
checksum = "JxgLiFRRGw2D3uhVw8ZeDbs3V7K_d4z_ypDog2LBqiA_5y2vVbUAk5NT6D5ozm0m"
|
||||
)
|
||||
return t.New("wayland-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Python),
|
||||
t.Load(Meson),
|
||||
t.Load(PkgConfig),
|
||||
t.Load(CMake),
|
||||
t.Load(Ninja),
|
||||
t.Load(Gawk),
|
||||
t.Load(Diffutils),
|
||||
|
||||
t.Load(Libffi),
|
||||
t.Load(Libexpat),
|
||||
t.Load(Libxml2),
|
||||
}, nil, nil, `
|
||||
cd /usr/src/wayland
|
||||
chmod +w tests tests/sanity-test.c
|
||||
echo 'int main(){}' > tests/sanity-test.c
|
||||
|
||||
cd "$(mktemp -d)"
|
||||
meson setup \
|
||||
--reconfigure \
|
||||
--buildtype=release \
|
||||
--prefix=/system \
|
||||
--prefer-static \
|
||||
-Ddocumentation=false \
|
||||
-Dtests=true \
|
||||
-Ddefault_library=both \
|
||||
. /usr/src/wayland
|
||||
meson compile
|
||||
meson test
|
||||
meson install \
|
||||
--destdir=/work
|
||||
`, pkg.Path(AbsUsrSrc.Append("wayland"), true, pkg.NewHTTPGetTar(
|
||||
nil, "https://gitlab.freedesktop.org/wayland/wayland/"+
|
||||
"-/archive/"+version+"/wayland-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[Wayland] = Toolchain.newWayland }
|
||||
|
||||
func (t Toolchain) newWaylandProtocols() pkg.Artifact {
|
||||
const (
|
||||
version = "1.47"
|
||||
checksum = "B_NodZ7AQfCstcx7kgbaVjpkYOzbAQq0a4NOk-SA8bQixAE20FY3p1-6gsbPgHn9"
|
||||
)
|
||||
return t.New("wayland-protocols-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Python),
|
||||
t.Load(Meson),
|
||||
t.Load(PkgConfig),
|
||||
t.Load(CMake),
|
||||
t.Load(Ninja),
|
||||
|
||||
t.Load(Wayland),
|
||||
t.Load(Libffi),
|
||||
t.Load(Libexpat),
|
||||
t.Load(Libxml2),
|
||||
}, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
meson setup \
|
||||
--reconfigure \
|
||||
--buildtype=release \
|
||||
--prefix=/system \
|
||||
--prefer-static \
|
||||
. /usr/src/wayland-protocols
|
||||
meson compile
|
||||
meson install \
|
||||
--destdir=/work
|
||||
`, pkg.Path(AbsUsrSrc.Append("wayland-protocols"), false, pkg.NewHTTPGetTar(
|
||||
nil, "https://gitlab.freedesktop.org/wayland/wayland-protocols/"+
|
||||
"-/archive/"+version+"/wayland-protocols-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
)))
|
||||
}
|
||||
func init() { artifactsF[WaylandProtocols] = Toolchain.newWaylandProtocols }
|
||||
83
internal/rosa/x.go
Normal file
83
internal/rosa/x.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newUtilMacros() pkg.Artifact {
|
||||
const (
|
||||
version = "1.17"
|
||||
checksum = "vYPO4Qq3B_WGcsBjG0-lfwZ6DZ7ayyrOLqfDrVOgTDcyLChuMGOAAVAa_UXLu5tD"
|
||||
)
|
||||
return t.NewViaMake("util-macros", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://www.x.org/releases/X11R7.7/src/util/"+
|
||||
"util-macros-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), nil)
|
||||
}
|
||||
func init() { artifactsF[utilMacros] = Toolchain.newUtilMacros }
|
||||
|
||||
func (t Toolchain) newXproto() pkg.Artifact {
|
||||
const (
|
||||
version = "7.0.23"
|
||||
checksum = "goxwWxV0jZ_3pNczXFltZWHAhq92x-aEreUGyp5Ns8dBOoOmgbpeNIu1nv0Zx07z"
|
||||
)
|
||||
return t.NewViaMake("xproto", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://www.x.org/releases/X11R7.7/src/proto/"+
|
||||
"xproto-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
|
||||
// ancient configure script
|
||||
ScriptEarly: `
|
||||
cd /usr/src/xproto
|
||||
autoreconf -if
|
||||
`,
|
||||
},
|
||||
t.Load(M4),
|
||||
t.Load(Perl),
|
||||
t.Load(Autoconf),
|
||||
t.Load(Automake),
|
||||
t.Load(PkgConfig),
|
||||
|
||||
t.Load(utilMacros),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[Xproto] = Toolchain.newXproto }
|
||||
|
||||
func (t Toolchain) newLibXau() pkg.Artifact {
|
||||
const (
|
||||
version = "1.0.7"
|
||||
checksum = "bm768RoZZnHRe9VjNU1Dw3BhfE60DyS9D_bgSR-JLkEEyUWT_Hb_lQripxrXto8j"
|
||||
)
|
||||
return t.NewViaMake("libXau", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://www.x.org/releases/X11R7.7/src/lib/"+
|
||||
"libXau-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), &MakeAttr{
|
||||
Writable: true,
|
||||
|
||||
// ancient configure script
|
||||
ScriptEarly: `
|
||||
cd /usr/src/libXau
|
||||
autoreconf -if
|
||||
`,
|
||||
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(M4),
|
||||
t.Load(Perl),
|
||||
t.Load(Autoconf),
|
||||
t.Load(Automake),
|
||||
t.Load(Libtool),
|
||||
t.Load(PkgConfig),
|
||||
|
||||
t.Load(utilMacros),
|
||||
t.Load(Xproto),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[LibXau] = Toolchain.newLibXau }
|
||||
46
internal/rosa/xcb.go
Normal file
46
internal/rosa/xcb.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newXCBProto() pkg.Artifact {
|
||||
const (
|
||||
version = "1.17.0"
|
||||
checksum = "_NtbKaJ_iyT7XiJz25mXQ7y-niTzE8sHPvLXZPcqtNoV_-vTzqkezJ8Hp2U1enCv"
|
||||
)
|
||||
return t.NewViaMake("xcb-proto", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://xcb.freedesktop.org/dist/xcb-proto-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(Python),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[XCBProto] = Toolchain.newXCBProto }
|
||||
|
||||
func (t Toolchain) newXCB() pkg.Artifact {
|
||||
const (
|
||||
version = "1.17.0"
|
||||
checksum = "hjjsc79LpWM_hZjNWbDDS6qRQUXREjjekS6UbUsDq-RR1_AjgNDxhRvZf-1_kzDd"
|
||||
)
|
||||
return t.NewViaMake("xcb", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://xcb.freedesktop.org/dist/libxcb-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &MakeAttr{
|
||||
Configure: [][2]string{
|
||||
{"enable-static"},
|
||||
},
|
||||
},
|
||||
t.Load(Python),
|
||||
t.Load(PkgConfig),
|
||||
|
||||
t.Load(XCBProto),
|
||||
t.Load(Xproto),
|
||||
t.Load(LibXau),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[XCB] = Toolchain.newXCB }
|
||||
19
internal/rosa/xz.go
Normal file
19
internal/rosa/xz.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newXZ() pkg.Artifact {
|
||||
const (
|
||||
version = "5.8.2"
|
||||
checksum = "rXT-XCp9R2q6cXqJ5qenp0cmGPfiENQiU3BWtUVeVgArfRmSsISeUJgvCR3zI0a0"
|
||||
)
|
||||
return t.NewViaMake("xz", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://github.com/tukaani-project/xz/releases/download/"+
|
||||
"v"+version+"/xz-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), nil,
|
||||
t.Load(Diffutils),
|
||||
)
|
||||
}
|
||||
func init() { artifactsF[XZ] = Toolchain.newXZ }
|
||||
@@ -2,24 +2,21 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
// NewZlib returns a new [pkg.Artifact] containing an installation of zlib.
|
||||
func (t Toolchain) NewZlib() pkg.Artifact {
|
||||
func (t Toolchain) newZlib() pkg.Artifact {
|
||||
const (
|
||||
version = "1.3.1"
|
||||
checksum = "E-eIpNzE8oJ5DsqH4UuA_0GDKuQF5csqI8ooDx2w7Vx-woJ2mb-YtSbEyIMN44mH"
|
||||
)
|
||||
return t.New("zlib-"+version, []pkg.Artifact{
|
||||
t.NewMake(),
|
||||
}, nil, nil, `
|
||||
cd "$(mktemp -d)"
|
||||
CFLAGS="${CFLAGS} -fPIC" /usr/src/zlib/configure \
|
||||
--prefix /system
|
||||
make "-j$(nproc)" test
|
||||
make DESTDIR=/work install
|
||||
`, pkg.Path(AbsUsrSrc.Append("zlib"), true,
|
||||
pkg.NewHTTPGetTar(
|
||||
return t.NewViaMake("zlib", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://zlib.net/zlib-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
)))
|
||||
), &MakeAttr{
|
||||
OmitDefaults: true,
|
||||
Env: []string{
|
||||
"CC=clang -fPIC",
|
||||
},
|
||||
Build: `""`,
|
||||
})
|
||||
}
|
||||
func init() { artifactsF[Zlib] = Toolchain.newZlib }
|
||||
|
||||
@@ -35,7 +35,7 @@ package
|
||||
|
||||
|
||||
*Default:*
|
||||
` <derivation hakurei-static-x86_64-unknown-linux-musl-0.3.3> `
|
||||
` <derivation hakurei-static-x86_64-unknown-linux-musl-0.3.5> `
|
||||
|
||||
|
||||
|
||||
@@ -805,7 +805,7 @@ package
|
||||
|
||||
|
||||
*Default:*
|
||||
` <derivation hakurei-hsu-0.3.3> `
|
||||
` <derivation hakurei-hsu-0.3.5> `
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
|
||||
buildGoModule rec {
|
||||
pname = "hakurei";
|
||||
version = "0.3.3";
|
||||
version = "0.3.5";
|
||||
|
||||
srcFiltered = builtins.path {
|
||||
name = "${pname}-src";
|
||||
@@ -89,7 +89,7 @@ buildGoModule rec {
|
||||
CC = "clang -O3 -Werror";
|
||||
|
||||
# nix build environment does not allow acls
|
||||
GO_TEST_SKIP_ACL = 1;
|
||||
HAKUREI_TEST_SKIP_ACL = 1;
|
||||
};
|
||||
|
||||
buildInputs = [
|
||||
|
||||
Reference in New Issue
Block a user