cmd/hpkg: rename from planterette
All checks were successful
Test / Create distribution (push) Successful in 33s
Test / Sandbox (push) Successful in 1m58s
Test / Sandbox (race detector) (push) Successful in 3m47s
Test / Hpkg (push) Successful in 3m54s
Test / Hakurei (race detector) (push) Successful in 4m32s
Test / Hakurei (push) Successful in 2m10s
Test / Flake checks (push) Successful in 1m19s

Planterette is now developed in another repository, so rename this proof of concept to avoid confusion.

Signed-off-by: Ophestra <cat@gensokyo.uk>
This commit is contained in:
2025-07-31 23:57:11 +09:00
parent 3ae0cec000
commit a8a79a8664
15 changed files with 23 additions and 22 deletions

154
cmd/hpkg/app.go Normal file
View File

@@ -0,0 +1,154 @@
package main
import (
"encoding/json"
"log"
"os"
"path"
"hakurei.app/container/seccomp"
"hakurei.app/hst"
"hakurei.app/system"
"hakurei.app/system/dbus"
)
type appInfo struct {
Name string `json:"name"`
Version string `json:"version"`
// passed through to [hst.Config]
ID string `json:"id"`
// passed through to [hst.Config]
Identity int `json:"identity"`
// passed through to [hst.Config]
Groups []string `json:"groups,omitempty"`
// passed through to [hst.Config]
Devel bool `json:"devel,omitempty"`
// passed through to [hst.Config]
Userns bool `json:"userns,omitempty"`
// passed through to [hst.Config]
Net bool `json:"net,omitempty"`
// passed through to [hst.Config]
Device bool `json:"dev,omitempty"`
// passed through to [hst.Config]
Tty bool `json:"tty,omitempty"`
// passed through to [hst.Config]
MapRealUID bool `json:"map_real_uid,omitempty"`
// passed through to [hst.Config]
DirectWayland bool `json:"direct_wayland,omitempty"`
// passed through to [hst.Config]
SystemBus *dbus.Config `json:"system_bus,omitempty"`
// passed through to [hst.Config]
SessionBus *dbus.Config `json:"session_bus,omitempty"`
// passed through to [hst.Config]
Enablements system.Enablement `json:"enablements"`
// passed through to [hst.Config]
Multiarch bool `json:"multiarch,omitempty"`
// passed through to [hst.Config]
Bluetooth bool `json:"bluetooth,omitempty"`
// allow gpu access within sandbox
GPU bool `json:"gpu"`
// store path to nixGL mesa wrappers
Mesa string `json:"mesa,omitempty"`
// store path to nixGL source
NixGL string `json:"nix_gl,omitempty"`
// store path to activate-and-exec script
Launcher string `json:"launcher"`
// store path to /run/current-system
CurrentSystem string `json:"current_system"`
// store path to home-manager activation package
ActivationPackage string `json:"activation_package"`
}
func (app *appInfo) toFst(pathSet *appPathSet, argv []string, flagDropShell bool) *hst.Config {
config := &hst.Config{
ID: app.ID,
Path: argv[0],
Args: argv,
Enablements: app.Enablements,
SystemBus: app.SystemBus,
SessionBus: app.SessionBus,
DirectWayland: app.DirectWayland,
Username: "hakurei",
Shell: shellPath,
Data: pathSet.homeDir,
Dir: path.Join("/data/data", app.ID),
Identity: app.Identity,
Groups: app.Groups,
Container: &hst.ContainerConfig{
Hostname: formatHostname(app.Name),
Devel: app.Devel,
Userns: app.Userns,
Net: app.Net,
Device: app.Device,
Tty: app.Tty || flagDropShell,
MapRealUID: app.MapRealUID,
Filesystem: []*hst.FilesystemConfig{
{Src: path.Join(pathSet.nixPath, "store"), Dst: "/nix/store", Must: true},
{Src: pathSet.metaPath, Dst: path.Join(hst.Tmp, "app"), Must: true},
{Src: "/etc/resolv.conf"},
{Src: "/sys/block"},
{Src: "/sys/bus"},
{Src: "/sys/class"},
{Src: "/sys/dev"},
{Src: "/sys/devices"},
},
Link: [][2]string{
{app.CurrentSystem, "/run/current-system"},
{"/run/current-system/sw/bin", "/bin"},
{"/run/current-system/sw/bin", "/usr/bin"},
},
Etc: path.Join(pathSet.cacheDir, "etc"),
AutoEtc: true,
},
ExtraPerms: []*hst.ExtraPermConfig{
{Path: dataHome, Execute: true},
{Ensure: true, Path: pathSet.baseDir, Read: true, Write: true, Execute: true},
},
}
if app.Multiarch {
config.Container.SeccompFlags |= seccomp.AllowMultiarch
}
if app.Bluetooth {
config.Container.SeccompFlags |= seccomp.AllowBluetooth
}
return config
}
func loadAppInfo(name string, beforeFail func()) *appInfo {
bundle := new(appInfo)
if f, err := os.Open(name); err != nil {
beforeFail()
log.Fatalf("cannot open bundle: %v", err)
} else if err = json.NewDecoder(f).Decode(&bundle); err != nil {
beforeFail()
log.Fatalf("cannot parse bundle metadata: %v", err)
} else if err = f.Close(); err != nil {
log.Printf("cannot close bundle metadata: %v", err)
// not fatal
}
if bundle.ID == "" {
beforeFail()
log.Fatal("application identifier must not be empty")
}
return bundle
}
func formatHostname(name string) string {
if h, err := os.Hostname(); err != nil {
log.Printf("cannot get hostname: %v", err)
return "hakurei-" + name
} else {
return h + "-" + name
}
}

251
cmd/hpkg/build.nix Normal file
View File

@@ -0,0 +1,251 @@
{
nixpkgsFor,
system,
nixpkgs,
home-manager,
}:
{
lib,
stdenv,
closureInfo,
writeScript,
runtimeShell,
writeText,
symlinkJoin,
vmTools,
runCommand,
fetchFromGitHub,
zstd,
nix,
sqlite,
name ? throw "name is required",
version ? throw "version is required",
pname ? "${name}-${version}",
modules ? [ ],
nixosModules ? [ ],
script ? ''
exec "$SHELL" "$@"
'',
id ? name,
identity ? throw "identity is required",
groups ? [ ],
userns ? false,
net ? true,
dev ? false,
no_new_session ? false,
map_real_uid ? false,
direct_wayland ? false,
system_bus ? null,
session_bus ? null,
allow_wayland ? true,
allow_x11 ? false,
allow_dbus ? true,
allow_pulse ? true,
gpu ? allow_wayland || allow_x11,
}:
let
inherit (lib) optionals;
homeManagerConfiguration = home-manager.lib.homeManagerConfiguration {
pkgs = nixpkgsFor.${system};
modules = modules ++ [
{
home = {
username = "hakurei";
homeDirectory = "/data/data/${id}";
stateVersion = "22.11";
};
}
];
};
launcher = writeScript "hakurei-${pname}" ''
#!${runtimeShell} -el
${script}
'';
extraNixOSConfig =
{ pkgs, ... }:
{
environment = {
etc.nixpkgs.source = nixpkgs.outPath;
systemPackages = [ pkgs.nix ];
};
imports = nixosModules;
};
nixos = nixpkgs.lib.nixosSystem {
inherit system;
modules = [
extraNixOSConfig
{ nix.settings.experimental-features = [ "flakes" ]; }
{ nix.settings.experimental-features = [ "nix-command" ]; }
{ boot.isContainer = true; }
{ system.stateVersion = "22.11"; }
];
};
etc = vmTools.runInLinuxVM (
runCommand "etc" { } ''
mkdir -p /etc
${nixos.config.system.build.etcActivationCommands}
# remove unused files
rm -rf /etc/sudoers
mkdir -p $out
tar -C /etc -cf "$out/etc.tar" .
''
);
extendSessionDefault = id: ext: {
filter = true;
talk = [ "org.freedesktop.Notifications" ] ++ ext.talk;
own =
(optionals (id != null) [
"${id}.*"
"org.mpris.MediaPlayer2.${id}.*"
])
++ ext.own;
inherit (ext) call broadcast;
};
nixGL = fetchFromGitHub {
owner = "nix-community";
repo = "nixGL";
rev = "310f8e49a149e4c9ea52f1adf70cdc768ec53f8a";
hash = "sha256-lnzZQYG0+EXl/6NkGpyIz+FEOc/DSEG57AP1VsdeNrM=";
};
mesaWrappers =
let
isIntelX86Platform = system == "x86_64-linux";
nixGLPackages = import (nixGL + "/default.nix") {
pkgs = nixpkgs.legacyPackages.${system};
enable32bits = isIntelX86Platform;
enableIntelX86Extensions = isIntelX86Platform;
};
in
symlinkJoin {
name = "nixGL-mesa";
paths = with nixGLPackages; [
nixGLIntel
nixVulkanIntel
];
};
info = builtins.toJSON {
inherit
name
version
id
identity
launcher
groups
userns
net
dev
no_new_session
map_real_uid
direct_wayland
system_bus
gpu
;
session_bus =
if session_bus != null then
(session_bus (extendSessionDefault id))
else
(extendSessionDefault id {
talk = [ ];
own = [ ];
call = { };
broadcast = { };
});
enablements = (if allow_wayland then 1 else 0) + (if allow_x11 then 2 else 0) + (if allow_dbus then 4 else 0) + (if allow_pulse then 8 else 0);
mesa = if gpu then mesaWrappers else null;
nix_gl = if gpu then nixGL else null;
current_system = nixos.config.system.build.toplevel;
activation_package = homeManagerConfiguration.activationPackage;
};
in
stdenv.mkDerivation {
name = "${pname}.pkg";
inherit version;
__structuredAttrs = true;
nativeBuildInputs = [
zstd
nix
sqlite
];
buildCommand = ''
NIX_ROOT="$(mktemp -d)"
export USER="nobody"
# create bootstrap store
bootstrapClosureInfo="${
closureInfo {
rootPaths = [
nix
nixos.config.system.build.toplevel
];
}
}"
echo "copying bootstrap store paths..."
mkdir -p "$NIX_ROOT/nix/store"
xargs -n 1 -a "$bootstrapClosureInfo/store-paths" cp -at "$NIX_ROOT/nix/store/"
NIX_REMOTE="local?root=$NIX_ROOT" nix-store --load-db < "$bootstrapClosureInfo/registration"
NIX_REMOTE="local?root=$NIX_ROOT" nix-store --optimise
sqlite3 "$NIX_ROOT/nix/var/nix/db/db.sqlite" "UPDATE ValidPaths SET registrationTime = ''${SOURCE_DATE_EPOCH}"
chmod -R +r "$NIX_ROOT/nix/var"
# create binary cache
closureInfo="${
closureInfo {
rootPaths = [
homeManagerConfiguration.activationPackage
launcher
]
++ optionals gpu [
mesaWrappers
nixGL
];
}
}"
echo "copying application paths..."
TMP_STORE="$(mktemp -d)"
mkdir -p "$TMP_STORE/nix/store"
xargs -n 1 -a "$closureInfo/store-paths" cp -at "$TMP_STORE/nix/store/"
NIX_REMOTE="local?root=$TMP_STORE" nix-store --load-db < "$closureInfo/registration"
sqlite3 "$TMP_STORE/nix/var/nix/db/db.sqlite" "UPDATE ValidPaths SET registrationTime = ''${SOURCE_DATE_EPOCH}"
NIX_REMOTE="local?root=$TMP_STORE" nix --offline --extra-experimental-features nix-command \
--verbose --log-format raw-with-logs \
copy --all --no-check-sigs --to \
"file://$NIX_ROOT/res?compression=zstd&compression-level=19&parallel-compression=true"
# package /etc
mkdir -p "$NIX_ROOT/etc"
tar -C "$NIX_ROOT/etc" -xf "${etc}/etc.tar"
# write metadata
cp "${writeText "bundle.json" info}" "$NIX_ROOT/bundle.json"
# create an intermediate file to improve zstd performance
INTER="$(mktemp)"
tar -C "$NIX_ROOT" -cf "$INTER" .
zstd -T0 -19 -fo "$out" "$INTER"
'';
}

333
cmd/hpkg/main.go Normal file
View File

@@ -0,0 +1,333 @@
package main
import (
"context"
"encoding/json"
"errors"
"log"
"os"
"os/signal"
"path"
"syscall"
"hakurei.app/command"
"hakurei.app/hst"
"hakurei.app/internal"
"hakurei.app/internal/hlog"
)
const shellPath = "/run/current-system/sw/bin/bash"
var (
errSuccess = errors.New("success")
)
func init() {
hlog.Prepare("hpkg")
if err := os.Setenv("SHELL", shellPath); err != nil {
log.Fatalf("cannot set $SHELL: %v", err)
}
}
func main() {
if os.Geteuid() == 0 {
log.Fatal("this program must not run as root")
}
ctx, stop := signal.NotifyContext(context.Background(),
syscall.SIGINT, syscall.SIGTERM)
defer stop() // unreachable
var (
flagVerbose bool
flagDropShell bool
)
c := command.New(os.Stderr, log.Printf, "hpkg", func([]string) error { internal.InstallOutput(flagVerbose); return nil }).
Flag(&flagVerbose, "v", command.BoolFlag(false), "Print debug messages to the console").
Flag(&flagDropShell, "s", command.BoolFlag(false), "Drop to a shell in place of next hakurei action")
{
var (
flagDropShellActivate bool
)
c.NewCommand("install", "Install an application from its package", func(args []string) error {
if len(args) != 1 {
log.Println("invalid argument")
return syscall.EINVAL
}
pkgPath := args[0]
if !path.IsAbs(pkgPath) {
if dir, err := os.Getwd(); err != nil {
log.Printf("cannot get current directory: %v", err)
return err
} else {
pkgPath = path.Join(dir, pkgPath)
}
}
/*
Look up paths to programs started by hpkg.
This is done here to ease error handling as cleanup is not yet required.
*/
var (
_ = lookPath("zstd")
tar = lookPath("tar")
chmod = lookPath("chmod")
rm = lookPath("rm")
)
/*
Extract package and set up for cleanup.
*/
var workDir string
if p, err := os.MkdirTemp("", "hpkg.*"); err != nil {
log.Printf("cannot create temporary directory: %v", err)
return err
} else {
workDir = p
}
cleanup := func() {
// should be faster than a native implementation
mustRun(chmod, "-R", "+w", workDir)
mustRun(rm, "-rf", workDir)
}
beforeRunFail.Store(&cleanup)
mustRun(tar, "-C", workDir, "-xf", pkgPath)
/*
Parse bundle and app metadata, do pre-install checks.
*/
bundle := loadAppInfo(path.Join(workDir, "bundle.json"), cleanup)
pathSet := pathSetByApp(bundle.ID)
a := bundle
if s, err := os.Stat(pathSet.metaPath); err != nil {
if !os.IsNotExist(err) {
cleanup()
log.Printf("cannot access %q: %v", pathSet.metaPath, err)
return err
}
// did not modify app, clean installation condition met later
} else if s.IsDir() {
cleanup()
log.Printf("metadata path %q is not a file", pathSet.metaPath)
return syscall.EBADMSG
} else {
a = loadAppInfo(pathSet.metaPath, cleanup)
if a.ID != bundle.ID {
cleanup()
log.Printf("app %q claims to have identifier %q",
bundle.ID, a.ID)
return syscall.EBADE
}
// sec: should verify credentials
}
if a != bundle {
// do not try to re-install
if a.NixGL == bundle.NixGL &&
a.CurrentSystem == bundle.CurrentSystem &&
a.Launcher == bundle.Launcher &&
a.ActivationPackage == bundle.ActivationPackage {
cleanup()
log.Printf("package %q is identical to local application %q",
pkgPath, a.ID)
return errSuccess
}
// identity determines uid
if a.Identity != bundle.Identity {
cleanup()
log.Printf("package %q identity %d differs from installed %d",
pkgPath, bundle.Identity, a.Identity)
return syscall.EBADE
}
// sec: should compare version string
hlog.Verbosef("installing application %q version %q over local %q",
bundle.ID, bundle.Version, a.Version)
} else {
hlog.Verbosef("application %q clean installation", bundle.ID)
// sec: should install credentials
}
/*
Setup steps for files owned by the target user.
*/
withCacheDir(ctx, "install", []string{
// export inner bundle path in the environment
"export BUNDLE=" + hst.Tmp + "/bundle",
// replace inner /etc
"mkdir -p etc",
"chmod -R +w etc",
"rm -rf etc",
"cp -dRf $BUNDLE/etc etc",
// replace inner /nix
"mkdir -p nix",
"chmod -R +w nix",
"rm -rf nix",
"cp -dRf /nix nix",
// copy from binary cache
"nix copy --offline --no-check-sigs --all --from file://$BUNDLE/res --to $PWD",
// deduplicate nix store
"nix store --offline --store $PWD optimise",
// make cache directory world-readable for autoetc
"chmod 0755 .",
}, workDir, bundle, pathSet, flagDropShell, cleanup)
if bundle.GPU {
withCacheDir(ctx, "mesa-wrappers", []string{
// link nixGL mesa wrappers
"mkdir -p nix/.nixGL",
"ln -s " + bundle.Mesa + "/bin/nixGLIntel nix/.nixGL/nixGL",
"ln -s " + bundle.Mesa + "/bin/nixVulkanIntel nix/.nixGL/nixVulkan",
}, workDir, bundle, pathSet, false, cleanup)
}
/*
Activate home-manager generation.
*/
withNixDaemon(ctx, "activate", []string{
// clean up broken links
"mkdir -p .local/state/{nix,home-manager}",
"chmod -R +w .local/state/{nix,home-manager}",
"rm -rf .local/state/{nix,home-manager}",
// run activation script
bundle.ActivationPackage + "/activate",
}, false, func(config *hst.Config) *hst.Config { return config },
bundle, pathSet, flagDropShellActivate, cleanup)
/*
Installation complete. Write metadata to block re-installs or downgrades.
*/
// serialise metadata to ensure consistency
if f, err := os.OpenFile(pathSet.metaPath+"~", os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644); err != nil {
cleanup()
log.Printf("cannot create metadata file: %v", err)
return err
} else if err = json.NewEncoder(f).Encode(bundle); err != nil {
cleanup()
log.Printf("cannot write metadata: %v", err)
return err
} else if err = f.Close(); err != nil {
log.Printf("cannot close metadata file: %v", err)
// not fatal
}
if err := os.Rename(pathSet.metaPath+"~", pathSet.metaPath); err != nil {
cleanup()
log.Printf("cannot rename metadata file: %v", err)
return err
}
cleanup()
return errSuccess
}).
Flag(&flagDropShellActivate, "s", command.BoolFlag(false), "Drop to a shell on activation")
}
{
var (
flagDropShellNixGL bool
flagAutoDrivers bool
)
c.NewCommand("start", "Start an application", func(args []string) error {
if len(args) < 1 {
log.Println("invalid argument")
return syscall.EINVAL
}
/*
Parse app metadata.
*/
id := args[0]
pathSet := pathSetByApp(id)
a := loadAppInfo(pathSet.metaPath, func() {})
if a.ID != id {
log.Printf("app %q claims to have identifier %q", id, a.ID)
return syscall.EBADE
}
/*
Prepare nixGL.
*/
if a.GPU && flagAutoDrivers {
withNixDaemon(ctx, "nix-gl", []string{
"mkdir -p /nix/.nixGL/auto",
"rm -rf /nix/.nixGL/auto",
"export NIXPKGS_ALLOW_UNFREE=1",
"nix build --impure " +
"--out-link /nix/.nixGL/auto/opengl " +
"--override-input nixpkgs path:/etc/nixpkgs " +
"path:" + a.NixGL,
"nix build --impure " +
"--out-link /nix/.nixGL/auto/vulkan " +
"--override-input nixpkgs path:/etc/nixpkgs " +
"path:" + a.NixGL + "#nixVulkanNvidia",
}, true, func(config *hst.Config) *hst.Config {
config.Container.Filesystem = append(config.Container.Filesystem, []*hst.FilesystemConfig{
{Src: "/etc/resolv.conf"},
{Src: "/sys/block"},
{Src: "/sys/bus"},
{Src: "/sys/class"},
{Src: "/sys/dev"},
{Src: "/sys/devices"},
}...)
appendGPUFilesystem(config)
return config
}, a, pathSet, flagDropShellNixGL, func() {})
}
/*
Create app configuration.
*/
argv := make([]string, 1, len(args))
if !flagDropShell {
argv[0] = a.Launcher
} else {
argv[0] = shellPath
}
argv = append(argv, args[1:]...)
config := a.toFst(pathSet, argv, flagDropShell)
/*
Expose GPU devices.
*/
if a.GPU {
config.Container.Filesystem = append(config.Container.Filesystem,
&hst.FilesystemConfig{Src: path.Join(pathSet.nixPath, ".nixGL"), Dst: path.Join(hst.Tmp, "nixGL")})
appendGPUFilesystem(config)
}
/*
Spawn app.
*/
mustRunApp(ctx, config, func() {})
return errSuccess
}).
Flag(&flagDropShellNixGL, "s", command.BoolFlag(false), "Drop to a shell on nixGL build").
Flag(&flagAutoDrivers, "auto-drivers", command.BoolFlag(false), "Attempt automatic opengl driver detection")
}
c.MustParse(os.Args[1:], func(err error) {
hlog.Verbosef("command returned %v", err)
if errors.Is(err, errSuccess) {
hlog.BeforeExit()
os.Exit(0)
}
})
log.Fatal("unreachable")
}

101
cmd/hpkg/paths.go Normal file
View File

@@ -0,0 +1,101 @@
package main
import (
"log"
"os"
"os/exec"
"path"
"strconv"
"sync/atomic"
"hakurei.app/hst"
"hakurei.app/internal/hlog"
)
var (
dataHome string
)
func init() {
// dataHome
if p, ok := os.LookupEnv("HAKUREI_DATA_HOME"); ok {
dataHome = p
} else {
dataHome = "/var/lib/hakurei/" + strconv.Itoa(os.Getuid())
}
}
func lookPath(file string) string {
if p, err := exec.LookPath(file); err != nil {
log.Fatalf("%s: command not found", file)
return ""
} else {
return p
}
}
var beforeRunFail = new(atomic.Pointer[func()])
func mustRun(name string, arg ...string) {
hlog.Verbosef("spawning process: %q %q", name, arg)
cmd := exec.Command(name, arg...)
cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
if err := cmd.Run(); err != nil {
if f := beforeRunFail.Swap(nil); f != nil {
(*f)()
}
log.Fatalf("%s: %v", name, err)
}
}
type appPathSet struct {
// ${dataHome}/${id}
baseDir string
// ${baseDir}/app
metaPath string
// ${baseDir}/files
homeDir string
// ${baseDir}/cache
cacheDir string
// ${baseDir}/cache/nix
nixPath string
}
func pathSetByApp(id string) *appPathSet {
pathSet := new(appPathSet)
pathSet.baseDir = path.Join(dataHome, id)
pathSet.metaPath = path.Join(pathSet.baseDir, "app")
pathSet.homeDir = path.Join(pathSet.baseDir, "files")
pathSet.cacheDir = path.Join(pathSet.baseDir, "cache")
pathSet.nixPath = path.Join(pathSet.cacheDir, "nix")
return pathSet
}
func appendGPUFilesystem(config *hst.Config) {
config.Container.Filesystem = append(config.Container.Filesystem, []*hst.FilesystemConfig{
// flatpak commit 763a686d874dd668f0236f911de00b80766ffe79
{Src: "/dev/dri", Device: true},
// mali
{Src: "/dev/mali", Device: true},
{Src: "/dev/mali0", Device: true},
{Src: "/dev/umplock", Device: true},
// nvidia
{Src: "/dev/nvidiactl", Device: true},
{Src: "/dev/nvidia-modeset", Device: true},
// nvidia OpenCL/CUDA
{Src: "/dev/nvidia-uvm", Device: true},
{Src: "/dev/nvidia-uvm-tools", Device: true},
// flatpak commit d2dff2875bb3b7e2cd92d8204088d743fd07f3ff
{Src: "/dev/nvidia0", Device: true}, {Src: "/dev/nvidia1", Device: true},
{Src: "/dev/nvidia2", Device: true}, {Src: "/dev/nvidia3", Device: true},
{Src: "/dev/nvidia4", Device: true}, {Src: "/dev/nvidia5", Device: true},
{Src: "/dev/nvidia6", Device: true}, {Src: "/dev/nvidia7", Device: true},
{Src: "/dev/nvidia8", Device: true}, {Src: "/dev/nvidia9", Device: true},
{Src: "/dev/nvidia10", Device: true}, {Src: "/dev/nvidia11", Device: true},
{Src: "/dev/nvidia12", Device: true}, {Src: "/dev/nvidia13", Device: true},
{Src: "/dev/nvidia14", Device: true}, {Src: "/dev/nvidia15", Device: true},
{Src: "/dev/nvidia16", Device: true}, {Src: "/dev/nvidia17", Device: true},
{Src: "/dev/nvidia18", Device: true}, {Src: "/dev/nvidia19", Device: true},
}...)
}

60
cmd/hpkg/proc.go Normal file
View File

@@ -0,0 +1,60 @@
package main
import (
"context"
"encoding/json"
"errors"
"io"
"log"
"os"
"os/exec"
"hakurei.app/hst"
"hakurei.app/internal"
"hakurei.app/internal/hlog"
)
var hakureiPath = internal.MustHakureiPath()
func mustRunApp(ctx context.Context, config *hst.Config, beforeFail func()) {
var (
cmd *exec.Cmd
st io.WriteCloser
)
if r, w, err := os.Pipe(); err != nil {
beforeFail()
log.Fatalf("cannot pipe: %v", err)
} else {
if hlog.Load() {
cmd = exec.CommandContext(ctx, hakureiPath, "-v", "app", "3")
} else {
cmd = exec.CommandContext(ctx, hakureiPath, "app", "3")
}
cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
cmd.ExtraFiles = []*os.File{r}
st = w
}
go func() {
if err := json.NewEncoder(st).Encode(config); err != nil {
beforeFail()
log.Fatalf("cannot send configuration: %v", err)
}
}()
if err := cmd.Start(); err != nil {
beforeFail()
log.Fatalf("cannot start hakurei: %v", err)
}
if err := cmd.Wait(); err != nil {
var exitError *exec.ExitError
if errors.As(err, &exitError) {
beforeFail()
internal.Exit(exitError.ExitCode())
} else {
beforeFail()
log.Fatalf("cannot wait: %v", err)
}
}
}

View File

@@ -0,0 +1,62 @@
{ pkgs, ... }:
{
users.users = {
alice = {
isNormalUser = true;
description = "Alice Foobar";
password = "foobar";
uid = 1000;
};
};
home-manager.users.alice.home.stateVersion = "24.11";
# Automatically login on tty1 as a normal user:
services.getty.autologinUser = "alice";
environment = {
variables = {
SWAYSOCK = "/tmp/sway-ipc.sock";
WLR_RENDERER = "pixman";
};
};
# Automatically configure and start Sway when logging in on tty1:
programs.bash.loginShellInit = ''
if [ "$(tty)" = "/dev/tty1" ]; then
set -e
mkdir -p ~/.config/sway
(sed s/Mod4/Mod1/ /etc/sway/config &&
echo 'output * bg ${pkgs.nixos-artwork.wallpapers.simple-light-gray.gnomeFilePath} fill' &&
echo 'output Virtual-1 res 1680x1050') > ~/.config/sway/config
sway --validate
systemd-cat --identifier=session sway && touch /tmp/sway-exit-ok
fi
'';
programs.sway.enable = true;
virtualisation = {
diskSize = 6 * 1024;
qemu.options = [
# Need to switch to a different GPU driver than the default one (-vga std) so that Sway can launch:
"-vga none -device virtio-gpu-pci"
# Increase zstd performance:
"-smp 8"
];
};
environment.hakurei = {
enable = true;
stateDir = "/var/lib/hakurei";
users.alice = 0;
extraHomeConfig = {
home.stateVersion = "23.05";
};
};
}

34
cmd/hpkg/test/default.nix Normal file
View File

@@ -0,0 +1,34 @@
{
nixosTest,
callPackage,
system,
self,
}:
let
buildPackage = self.buildPackage.${system};
in
nixosTest {
name = "hpkg";
nodes.machine = {
environment.etc = {
"foot.pkg".source = callPackage ./foot.nix { inherit buildPackage; };
};
imports = [
./configuration.nix
self.nixosModules.hakurei
self.inputs.home-manager.nixosModules.home-manager
];
};
# adapted from nixos sway integration tests
# testScriptWithTypes:49: error: Cannot call function of unknown type
# (machine.succeed if succeed else machine.execute)(
# ^
# Found 1 error in 1 file (checked 1 source file)
skipTypeCheck = true;
testScript = builtins.readFile ./test.py;
}

48
cmd/hpkg/test/foot.nix Normal file
View File

@@ -0,0 +1,48 @@
{
lib,
buildPackage,
foot,
wayland-utils,
inconsolata,
}:
buildPackage {
name = "foot";
inherit (foot) version;
identity = 2;
id = "org.codeberg.dnkl.foot";
modules = [
{
home.packages = [
foot
# For wayland-info:
wayland-utils
];
}
];
nixosModules = [
{
# To help with OCR:
environment.etc."xdg/foot/foot.ini".text = lib.generators.toINI { } {
main = {
font = "inconsolata:size=14";
};
colors = rec {
foreground = "000000";
background = "ffffff";
regular2 = foreground;
};
};
fonts.packages = [ inconsolata ];
}
];
script = ''
exec foot "$@"
'';
}

108
cmd/hpkg/test/test.py Normal file
View File

@@ -0,0 +1,108 @@
import json
import shlex
q = shlex.quote
NODE_GROUPS = ["nodes", "floating_nodes"]
def swaymsg(command: str = "", succeed=True, type="command"):
assert command != "" or type != "command", "Must specify command or type"
shell = q(f"swaymsg -t {q(type)} -- {q(command)}")
with machine.nested(
f"sending swaymsg {shell!r}" + " (allowed to fail)" * (not succeed)
):
ret = (machine.succeed if succeed else machine.execute)(
f"su - alice -c {shell}"
)
# execute also returns a status code, but disregard.
if not succeed:
_, ret = ret
if not succeed and not ret:
return None
parsed = json.loads(ret)
return parsed
def walk(tree):
yield tree
for group in NODE_GROUPS:
for node in tree.get(group, []):
yield from walk(node)
def wait_for_window(pattern):
def func(last_chance):
nodes = (node["name"] for node in walk(swaymsg(type="get_tree")))
if last_chance:
nodes = list(nodes)
machine.log(f"Last call! Current list of windows: {nodes}")
return any(pattern in name for name in nodes)
retry(func)
def collect_state_ui(name):
swaymsg(f"exec hakurei ps > '/tmp/{name}.ps'")
machine.copy_from_vm(f"/tmp/{name}.ps", "")
swaymsg(f"exec hakurei --json ps > '/tmp/{name}.json'")
machine.copy_from_vm(f"/tmp/{name}.json", "")
machine.screenshot(name)
def check_state(name, enablements):
instances = json.loads(machine.succeed("sudo -u alice -i XDG_RUNTIME_DIR=/run/user/1000 hakurei --json ps"))
if len(instances) != 1:
raise Exception(f"unexpected state length {len(instances)}")
instance = next(iter(instances.values()))
config = instance['config']
if len(config['args']) != 1 or not (config['args'][0].startswith("/nix/store/")) or f"hakurei-{name}-" not in (config['args'][0]):
raise Exception(f"unexpected args {instance['config']['args']}")
if config['enablements'] != enablements:
raise Exception(f"unexpected enablements {instance['config']['enablements']}")
start_all()
machine.wait_for_unit("multi-user.target")
# To check hakurei's version:
print(machine.succeed("sudo -u alice -i hakurei version"))
# Wait for Sway to complete startup:
machine.wait_for_file("/run/user/1000/wayland-1")
machine.wait_for_file("/tmp/sway-ipc.sock")
# Prepare hpkg directory:
machine.succeed("install -dm 0700 -o alice -g users /var/lib/hakurei/1000")
# Install hpkg app:
swaymsg("exec hpkg -v install /etc/foot.pkg && touch /tmp/hpkg-install-ok")
machine.wait_for_file("/tmp/hpkg-install-ok")
# Start app (foot) with Wayland enablement:
swaymsg("exec hpkg -v start org.codeberg.dnkl.foot")
wait_for_window("hakurei@machine-foot")
machine.send_chars("clear; wayland-info && touch /tmp/success-client\n")
machine.wait_for_file("/tmp/hakurei.1000/tmpdir/2/success-client")
collect_state_ui("app_wayland")
check_state("foot", 13)
# Verify acl on XDG_RUNTIME_DIR:
print(machine.succeed("getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep 1000002"))
machine.send_chars("exit\n")
machine.wait_until_fails("pgrep foot")
# Verify acl cleanup on XDG_RUNTIME_DIR:
machine.wait_until_fails("getfacl --absolute-names --omit-header --numeric /run/user/1000 | grep 1000002")
# Exit Sway and verify process exit status 0:
swaymsg("exit", succeed=False)
machine.wait_for_file("/tmp/sway-exit-ok")
# Print hakurei runDir contents:
print(machine.succeed("find /run/user/1000/hakurei"))

114
cmd/hpkg/with.go Normal file
View File

@@ -0,0 +1,114 @@
package main
import (
"context"
"path"
"strings"
"hakurei.app/container/seccomp"
"hakurei.app/hst"
"hakurei.app/internal"
)
func withNixDaemon(
ctx context.Context,
action string, command []string, net bool, updateConfig func(config *hst.Config) *hst.Config,
app *appInfo, pathSet *appPathSet, dropShell bool, beforeFail func(),
) {
mustRunAppDropShell(ctx, updateConfig(&hst.Config{
ID: app.ID,
Path: shellPath,
Args: []string{shellPath, "-lc", "rm -f /nix/var/nix/daemon-socket/socket && " +
// start nix-daemon
"nix-daemon --store / & " +
// wait for socket to appear
"(while [ ! -S /nix/var/nix/daemon-socket/socket ]; do sleep 0.01; done) && " +
// create directory so nix stops complaining
"mkdir -p /nix/var/nix/profiles/per-user/root/channels && " +
strings.Join(command, " && ") +
// terminate nix-daemon
" && pkill nix-daemon",
},
Username: "hakurei",
Shell: shellPath,
Data: pathSet.homeDir,
Dir: path.Join("/data/data", app.ID),
ExtraPerms: []*hst.ExtraPermConfig{
{Path: dataHome, Execute: true},
{Ensure: true, Path: pathSet.baseDir, Read: true, Write: true, Execute: true},
},
Identity: app.Identity,
Container: &hst.ContainerConfig{
Hostname: formatHostname(app.Name) + "-" + action,
Userns: true, // nix sandbox requires userns
Net: net,
SeccompFlags: seccomp.AllowMultiarch,
Tty: dropShell,
Filesystem: []*hst.FilesystemConfig{
{Src: pathSet.nixPath, Dst: "/nix", Write: true, Must: true},
},
Link: [][2]string{
{app.CurrentSystem, "/run/current-system"},
{"/run/current-system/sw/bin", "/bin"},
{"/run/current-system/sw/bin", "/usr/bin"},
},
Etc: path.Join(pathSet.cacheDir, "etc"),
AutoEtc: true,
},
}), dropShell, beforeFail)
}
func withCacheDir(
ctx context.Context,
action string, command []string, workDir string,
app *appInfo, pathSet *appPathSet, dropShell bool, beforeFail func()) {
mustRunAppDropShell(ctx, &hst.Config{
ID: app.ID,
Path: shellPath,
Args: []string{shellPath, "-lc", strings.Join(command, " && ")},
Username: "nixos",
Shell: shellPath,
Data: pathSet.cacheDir, // this also ensures cacheDir via shim
Dir: path.Join("/data/data", app.ID, "cache"),
ExtraPerms: []*hst.ExtraPermConfig{
{Path: dataHome, Execute: true},
{Ensure: true, Path: pathSet.baseDir, Read: true, Write: true, Execute: true},
{Path: workDir, Execute: true},
},
Identity: app.Identity,
Container: &hst.ContainerConfig{
Hostname: formatHostname(app.Name) + "-" + action,
SeccompFlags: seccomp.AllowMultiarch,
Tty: dropShell,
Filesystem: []*hst.FilesystemConfig{
{Src: path.Join(workDir, "nix"), Dst: "/nix", Must: true},
{Src: workDir, Dst: path.Join(hst.Tmp, "bundle"), Must: true},
},
Link: [][2]string{
{app.CurrentSystem, "/run/current-system"},
{"/run/current-system/sw/bin", "/bin"},
{"/run/current-system/sw/bin", "/usr/bin"},
},
Etc: path.Join(workDir, "etc"),
AutoEtc: true,
},
}, dropShell, beforeFail)
}
func mustRunAppDropShell(ctx context.Context, config *hst.Config, dropShell bool, beforeFail func()) {
if dropShell {
config.Args = []string{shellPath, "-l"}
mustRunApp(ctx, config, beforeFail)
beforeFail()
internal.Exit(0)
}
mustRunApp(ctx, config, beforeFail)
}