Compare commits

...

11 Commits

Author SHA1 Message Date
b298c5f572
nix: static linking via nix 2025-07-24 18:12:54 +09:00
4d9d4bcef2
exec: append custom store
This is primarily for chroot stores. This might not be useful however since chroot store breaks builds.
2025-07-23 12:09:32 +09:00
b6961508e8
store: local store
This just returns the verbatim string.
2025-07-23 11:49:23 +09:00
e0278a6d7d
copy: generalise store 2025-07-23 11:43:14 +09:00
e448541464
exec: increase default wait delay
Should avoid killing Nix command whenever possible.
2025-07-23 11:31:57 +09:00
4787f51d84
exec: interrupt with delayed kill
This gives nix a chance to gracefully exit.
2025-07-21 00:33:04 +09:00
e2299a57d2
cmd/nix-tool: flag for idle priority
Mainly for usability. This is often ran on a workstation.
2025-07-20 19:17:08 +09:00
0d5e7f61fc
cmd/nix-tool: disable binary cache for chroot store 2025-07-20 14:16:00 +09:00
5f8b5bcb3d
treewide: remove sample store paths
This should finally build on nix.
2025-07-20 04:22:28 +09:00
66a4079114
treewide: compress testdata
This significantly improves handling by reducing file size. This is also part of an effort of getting this to build on nix itself.
2025-07-20 04:12:26 +09:00
55933ccfb3
cmd/nix-tool: pass --store via extraArgs
Sleepy me forgot about passing extraArgs :c
2025-07-20 03:27:19 +09:00
54 changed files with 398 additions and 3276207 deletions

View File

@ -85,7 +85,7 @@ func TestBuildBadCommand(t *testing.T) {
wantErr := os.ErrNotExist
breakNixCommand(t)
if err := nix.Build(
nix.New(t.Context(), nil, nil, nil),
nix.New(t.Context(), nil, nil, nil, nil),
nil,
); !errors.Is(err, wantErr) {
t.Errorf("Build: error = %v, want %v", err, wantErr)

View File

@ -1,17 +0,0 @@
package main
import (
"context"
"os/exec"
"gensokyo.uk/nix"
)
type chrootStoreContext struct {
store string
nix.Context
}
func (c *chrootStoreContext) Nix(ctx context.Context, arg ...string) *exec.Cmd {
return c.Context.Nix(ctx, append([]string{"--store", c.store}, arg...)...)
}

View File

@ -9,6 +9,7 @@ import (
"log"
"os"
"os/signal"
"runtime"
"slices"
"strings"
"syscall"
@ -29,6 +30,7 @@ func main() {
var (
flagStore string
flagIdle bool
flagNixOS bool
flagVerbose bool
flagJSON bool
@ -36,20 +38,39 @@ func main() {
c := command.New(os.Stderr, log.Printf, "nix-tool", func(args []string) error {
log.SetFlags(0)
if flagIdle {
runtime.LockOSThread()
if err := schedSetPolicy(0, SCHED_IDLE); err != nil {
return commandHandlerError(fmt.Sprintf("cannot set policy: %v", err))
}
}
var (
store nix.Store
extraArgs []string
)
flagStore = strings.TrimSpace(flagStore)
if flagStore != string(os.PathSeparator) {
store = nix.Local(flagStore)
extraArgs = append(extraArgs,
// do not use any binary cache
nix.FlagOption, nix.OptionBuildUseSubstitutes, nix.ValueFalse,
nix.FlagOption, nix.OptionSubstituters, "",
nix.FlagOption, nix.OptionTrustedSubstituters, "",
nix.FlagOption, nix.OptionTrustedPublicKeys, "",
)
}
var stderr io.Writer
if flagVerbose {
stderr = os.Stderr
}
ctx = nix.New(nixCtx, nil, os.Stdout, stderr)
flagStore = strings.TrimSpace(flagStore)
if flagStore != string(os.PathSeparator) {
ctx = &chrootStoreContext{flagStore, ctx}
}
ctx = nix.New(nixCtx, store, extraArgs, os.Stdout, stderr)
return nil
}).
Flag(&flagStore, "store", command.StringFlag("/"), "Path to the nix root").
Flag(&flagIdle, "idle", command.BoolFlag(false), "Whether to set SCHED_IDLE policy").
Flag(&flagNixOS, "nixos", command.BoolFlag(false), "Interpret input as NixOS flake installable").
Flag(&flagVerbose, "v", command.BoolFlag(false), "Connect nix stderr").
Flag(&flagJSON, "json", command.BoolFlag(false), "Serialise output in JSON when applicable")
@ -93,7 +114,7 @@ func main() {
}
log.Println("copying to binary cache...")
if err := nix.Copy(ctx, flagCacheKeyPath, &nix.BinaryCache{
if err := nix.Copy(ctx, &nix.BinaryCache{
Compression: flagCacheComp,
ParallelCompression: flagCachePComp,
Bucket: flagCacheBucket,
@ -101,6 +122,7 @@ func main() {
Region: flagCacheRegion,
Scheme: flagCacheScheme,
CredentialsPath: flagCacheCredPath,
KeyPath: flagCacheKeyPath,
}, slices.Values(collective)); err != nil {
return commandHandlerError(fmt.Sprintf("cannot copy: %v", err))
}

25
cmd/nix-tool/sched.go Normal file
View File

@ -0,0 +1,25 @@
package main
/*
#ifndef _GNU_SOURCE
#define _GNU_SOURCE // SCHED_IDLE
#endif
#include <sched.h>
const struct sched_param param = { .sched_priority = 0 };
const int NIX_TOOL_SCHED_IDLE = SCHED_IDLE;
*/
import "C"
var (
SCHED_IDLE = int(C.NIX_TOOL_SCHED_IDLE)
)
func schedSetPolicy(pid, policy int) error {
r, err := C.sched_setscheduler(C.pid_t(pid), C.int(policy), &C.param)
if r != 0 {
return err
}
return nil
}

39
copy.go
View File

@ -2,43 +2,12 @@ package nix
import (
"context"
"fmt"
"iter"
"os"
)
const (
EnvAwsSharedCredentialsFile = "AWS_SHARED_CREDENTIALS_FILE"
)
// A BinaryCache holds credentials and parameters to a s3 binary cache.
type BinaryCache struct {
// Compression is the name of the compression algorithm to use. Example: "zstd".
Compression string `json:"compression"`
// ParallelCompression determines whether parallel compression is enabled.
ParallelCompression bool `json:"parallel_compression,omitempty"`
// Bucket is the s3 bucket name.
Bucket string `json:"bucket"`
// Endpoint is the s3 endpoint. Example: "s3.example.org".
Endpoint string `json:"endpoint,omitempty"`
// Region is the s3 region. Example: "ap-northeast-1".
Region string `json:"region"`
// Scheme is the s3 protocol. Example: "https".
Scheme string `json:"scheme"`
// CredentialsPath is the path to the s3 shared credentials file.
CredentialsPath string `json:"credentials_path"`
}
func (store *BinaryCache) String() string {
return fmt.Sprintf(
"s3://%s?compression=%s&parallel-compression=%t&region=%s&scheme=%s&endpoint=%s",
store.Bucket, store.Compression, store.ParallelCompression, store.Region, store.Scheme, store.Endpoint,
)
}
// Copy copies installables to the binary cache store, signing all paths using the key at keyPath.
func Copy(ctx Context, keyPath string, store *BinaryCache, installables iter.Seq[string]) error {
// Copy copies installables to the binary cache store.
func Copy(ctx Context, store Store, installables iter.Seq[string]) error {
if store == nil {
return os.ErrInvalid
}
@ -47,9 +16,9 @@ func Copy(ctx Context, keyPath string, store *BinaryCache, installables iter.Seq
defer cancel()
cmd := ctx.Nix(c, CommandCopy,
FlagTo, store.String()+"&secret-key="+keyPath,
FlagTo, store.String(),
FlagStdin)
cmd.Env = append(os.Environ(), EnvAwsSharedCredentialsFile+"="+store.CredentialsPath)
cmd.Env = append(os.Environ(), store.Environ()...)
cmd.Stdout, cmd.Stderr = ctx.Streams()
_, err := ctx.WriteStdin(cmd, installables, nil)

View File

@ -36,46 +36,10 @@ func init() {
})
}
func TestBinaryCache(t *testing.T) {
testCases := []struct {
name string
store *nix.BinaryCache
want string
}{
{"example", &nix.BinaryCache{
Compression: "none",
ParallelCompression: false,
Bucket: "example",
Endpoint: "s3.example.org",
Region: "us-east-1",
Scheme: "http",
CredentialsPath: "/dev/null",
}, "s3://example?compression=none&parallel-compression=false&region=us-east-1&scheme=http&endpoint=s3.example.org"},
{"gensokyo", &nix.BinaryCache{
Compression: "zstd",
ParallelCompression: true,
Bucket: "nix-cache",
Endpoint: "s3.gensokyo.uk",
Region: "ap-northeast-1",
Scheme: "https",
CredentialsPath: "/var/lib/persist/cache/s3",
}, "s3://nix-cache?compression=zstd&parallel-compression=true&region=ap-northeast-1&scheme=https&endpoint=s3.gensokyo.uk"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if got := tc.store.String(); got != tc.want {
t.Errorf("String: %q, want %q", got, tc.want)
}
})
}
}
func TestCopy(t *testing.T) {
stubNixCommand(t)
if err := nix.Copy(
newStubContext(t.Context(), nil, os.Stdout, os.Stderr),
nonexistent,
&nix.BinaryCache{
Compression: "none",
ParallelCompression: false,
@ -84,6 +48,7 @@ func TestCopy(t *testing.T) {
Region: "us-east-1",
Scheme: "http",
CredentialsPath: "/dev/null",
KeyPath: nonexistent,
},
slices.Values(instWant["pluiedev pappardelle"]),
); err != nil {
@ -93,7 +58,6 @@ func TestCopy(t *testing.T) {
t.Run("nil store", func(t *testing.T) {
if err := nix.Copy(
newStubContext(t.Context(), nil, os.Stdout, os.Stderr),
nonexistent,
nil,
nil,
); !errors.Is(err, os.ErrInvalid) {

1
default.nix Normal file
View File

@ -0,0 +1 @@
with import <nixpkgs> { }; pkgsStatic.callPackage ./package.nix { }

28
exec.go
View File

@ -5,7 +5,13 @@ import (
"errors"
"io"
"iter"
"os"
"os/exec"
"time"
)
const (
defaultWaitDelay = 30 * time.Second
)
// Nix is the name of the nix program.
@ -13,6 +19,7 @@ var Nix = "nix"
type nix struct {
name string
store Store
ctx context.Context
extra []string
@ -34,12 +41,17 @@ A non-nil stderr implies verbose.
Streams will not be connected for commands outputting JSON.
*/
func New(ctx context.Context, extraArgs []string, stdout, stderr io.Writer) Context {
func New(ctx context.Context, store Store, extraArgs []string, stdout, stderr io.Writer) Context {
extra := []string{ExtraExperimentalFeatures, ExperimentalFeaturesFlakes}
if store != nil {
extra = append(extraArgs, FlagStore, store.String())
}
return &nix{
name: Nix,
ctx: ctx,
name: Nix,
store: store,
ctx: ctx,
// since flakes are supposedly experimental
extra: append(extraArgs, ExtraExperimentalFeatures, ExperimentalFeaturesFlakes),
extra: append(extraArgs, extra...),
stdout: stdout,
stderr: stderr,
@ -47,7 +59,13 @@ func New(ctx context.Context, extraArgs []string, stdout, stderr io.Writer) Cont
}
func (n *nix) Nix(ctx context.Context, arg ...string) *exec.Cmd {
return exec.CommandContext(ctx, n.name, append(n.extra, arg...)...)
cmd := exec.CommandContext(ctx, n.name, append(n.extra, arg...)...)
cmd.Cancel = func() error { return cmd.Process.Signal(os.Interrupt) }
cmd.WaitDelay = defaultWaitDelay
if n.store != nil {
cmd.Env = append(cmd.Env, n.store.Environ()...)
}
return cmd
}
func (n *nix) WriteStdin(cmd *exec.Cmd, installables iter.Seq[string], f func() error) (int, error) {

View File

@ -54,7 +54,7 @@ func stubNixCommand(t *testing.T) {
// newStubContext creates a context for use with the nix command stub.
func newStubContext(ctx context.Context, extraArgs []string, stdout, stderr io.Writer) nix.Context {
return nix.New(ctx, append(stubExtraArgs, extraArgs...), stdout, stderr)
return nix.New(ctx, nil, append(stubExtraArgs, extraArgs...), stdout, stderr)
}
type stubContextCommand struct {
@ -107,7 +107,7 @@ func TestNixStub(t *testing.T) {
Flag(&flagExtraExperimentalFeatures, trimFlagName(nix.ExtraExperimentalFeatures), command.StringFlag(""),
fmt.Sprintf("expects exactly %q", nix.ExperimentalFeaturesFlakes))
c.Command("true", command.UsageInternal, func([]string) error { return nil })
c.Command(nix.ValueTrue, command.UsageInternal, func([]string) error { return nil })
for _, f := range stubCommandInit {
f(c)

View File

@ -1,6 +1,7 @@
package nix_test
import (
"context"
"errors"
"os"
"os/exec"
@ -12,8 +13,36 @@ import (
)
func TestNixWriteStdin(t *testing.T) {
t.Run("store", func(t *testing.T) {
ctx := nix.New(t.Context(), &nix.BinaryCache{
Compression: "none",
ParallelCompression: false,
Bucket: "example",
Endpoint: "s3.example.org",
Region: "us-east-1",
Scheme: "http",
CredentialsPath: "/dev/null",
KeyPath: nonexistent,
}, nil, nil, nil)
cmd := ctx.Nix(t.Context(), nix.FlagVersion)
wantArgs := []string{
nix.Nix,
nix.FlagStore,
"s3://example?compression=none&parallel-compression=false&region=us-east-1&scheme=http&endpoint=s3.example.org&secret-key=/proc/nonexistent",
nix.FlagVersion}
if !slices.Equal(cmd.Args, wantArgs) {
t.Errorf("Args = %#v, want %#v", cmd.Args, wantArgs)
}
wantEnv := []string{nix.EnvAwsSharedCredentialsFile + "=/dev/null"}
if !slices.Equal(cmd.Env, wantEnv) {
t.Errorf("Env = %#v, want %#v", cmd.Env, wantEnv)
}
})
t.Run("already set", func(t *testing.T) {
ctx := nix.New(t.Context(), nil, os.Stdout, os.Stderr)
ctx := nix.New(t.Context(), nil, nil, os.Stdout, os.Stderr)
cmd := exec.CommandContext(t.Context(), nonexistent)
cmd.Stdin = os.Stdin
if _, err := ctx.WriteStdin(cmd, nil, nil); err == nil {
@ -33,4 +62,23 @@ func TestNixWriteStdin(t *testing.T) {
t.Fatalf("WriteStdinCommand: error = %v, want %v", err, syscall.ENOSYS)
}
})
t.Run("exit before cancel", func(t *testing.T) {
stubNixCommand(t)
ctx := newStubContext(t.Context(), nil, os.Stdout, os.Stderr)
c, cancel := context.WithCancel(t.Context())
defer cancel()
cmd := ctx.Nix(c, "true")
if err := cmd.Start(); err != nil {
t.Fatalf("Start: error = %v", err)
}
// Cancel is skipped after exec.Cmd.Wait completes
if _, err := cmd.Process.Wait(); err != nil {
t.Fatalf("Wait: error = %v", err)
}
cancel()
if cmd.Err != nil {
t.Fatalf("Err = %v", cmd.Err)
}
})
}

View File

@ -80,6 +80,9 @@ const (
// FlagVersion show version information.
FlagVersion = "--version"
// FlagStore is a loosely documented flag for specifying the store url to operate on.
FlagStore = "--store"
// FlagKeepGoing keep going in case of failed builds, to the greatest extent possible.
// That is, if building an input of some derivation fails, Nix will still build the other inputs,
// but not the derivation itself.
@ -91,6 +94,17 @@ const (
// Certain commands wont have to evaluate when invoked for the second time with a particular version of a flake.
// Intermediate results are not cached.
OptionEvalCache = "eval-cache"
// OptionBuildUseSubstitutes if set to true (default), Nix will use binary substitutes if available.
// This option can be disabled to force building from source.
OptionBuildUseSubstitutes = "build-use-substitutes"
// OptionSubstituters a list of URLs of Nix stores to be used as substituters, separated by whitespace.
// A substituter is an additional store from which Nix can obtain store objects instead of building them.
OptionSubstituters = "substituters"
// OptionTrustedSubstituters a list of Nix store URLs, separated by whitespace.
//These are not used by default, but users of the Nix daemon can enable them by specifying substituters.
OptionTrustedSubstituters = "trusted-substituters"
// OptionTrustedPublicKeys a whitespace-separated list of public keys.
OptionTrustedPublicKeys = "trusted-public-keys"
ValueTrue = "true"
ValueFalse = "false"

View File

@ -26,14 +26,7 @@ func TestInstantiated(t *testing.T) {
{"unexpected quotes short", nil, &nix.MalformedInstantiatedError{Type: nix.InstantiatedUnexpectedQuotes}},
{"not absolute", nil, &nix.MalformedInstantiatedError{Type: nix.InstantiatedNotAbsolute}},
{"good segment", []string{
"/nix/store/3zilrlmq7r6rpzfd94mwss32b62yinj5-bootstrap-stage0-stdenv-linux.drv",
"/nix/store/7yfwy95p6lcdpljdajs5aw10h6q0sfx0-update-autotools-gnu-config-scripts-hook.drv",
"/nix/store/bamwxswxacs3cjdcydv0z7bj22d7g2kc-config.guess-948ae97.drv",
"/nix/store/gyks6vvl7x0gq214ldjhi3w4rg37nh8i-zlib-1.3.1.tar.gz.drv",
"/nix/store/nbsdqpfzh1jlpmh95s69b3iivfcvv3lh-config.sub-948ae97.drv",
"/nix/store/ysp83x9nrks28zkblqmnc1s1kb68dr69-gnu-config-2024-01-01.drv",
}, nil},
{"good segment", segmentWant, nil},
{"getchoo atlas", getchooAtlasInstantiated, nil},
{"getchoo glados", getchooGladosInstantiated, nil},
@ -93,12 +86,6 @@ func stubInstantiatedEvaluator(args []string) error {
}
func TestInstantiatedDecoderStopEarly(t *testing.T) {
want := []string{
"/nix/store/gyks6vvl7x0gq214ldjhi3w4rg37nh8i-zlib-1.3.1.tar.gz.drv",
"/nix/store/bamwxswxacs3cjdcydv0z7bj22d7g2kc-config.guess-948ae97.drv",
"/nix/store/nbsdqpfzh1jlpmh95s69b3iivfcvv3lh-config.sub-948ae97.drv",
}
decoder := nix.NewInstantiatedDecoder(strings.NewReader(segmentPrefix+segmentBody+segmentSuffix), os.Stderr)
counter := 3
got := make([]string, 0, counter)
@ -110,8 +97,8 @@ func TestInstantiatedDecoderStopEarly(t *testing.T) {
}
}
if !slices.Equal(got, want) {
t.Errorf("Instantiated: %#v, want %#v", got, want)
if !slices.Equal(got, segmentEarly) {
t.Errorf("Instantiated: %#v, want %#v", got, segmentEarly)
}
}
@ -120,7 +107,7 @@ func TestInstantiatedEvaluatorBadCommand(t *testing.T) {
breakNixCommand(t)
if _, err := nix.EvalInstantiated(
nix.New(t.Context(), nil, os.Stdout, os.Stderr),
nix.New(t.Context(), nil, nil, os.Stdout, os.Stderr),
"",
); !errors.Is(err, wantErr) {
t.Errorf("EvalInstantiated: error = %v, want %v", err, wantErr)

24
package.nix Normal file
View File

@ -0,0 +1,24 @@
{
lib,
stdenv,
buildGoModule,
pkg-config,
}:
buildGoModule {
pname = "nix-tool";
version = "0.1.4";
src = ./.;
vendorHash = "sha256-SVSrY9SZnS6NLin+apdN4efqCzHgBBY8LIjXcUCXbUo=";
ldflags =
[ "-s -w" ]
++ lib.optionals stdenv.hostPlatform.isStatic [
"-linkmode external"
"-extldflags \"-static\""
];
nativeBuildInputs = [
pkg-config
];
}

View File

@ -1,24 +0,0 @@
package nix_test
import _ "embed"
// github:getchoo/borealis#atlas
var (
//go:embed testdata/getchoo_atlas
getchooAtlasOut string
getchooAtlasInstantiated = sampleSplitPaths(getchooAtlasInstantiatedRaw)
//go:embed testdata/instantiated/getchoo_atlas
getchooAtlasInstantiatedRaw string
//go:embed testdata/derivation/show_getchoo_atlas.json
getchooAtlasShow []byte
getchooAtlasCollective = sampleSplitPaths(getchooAtlasCollectiveRaw)
//go:embed testdata/derivation/collect_getchoo_atlas
getchooAtlasCollectiveRaw string
//go:embed testdata/format/stdin_getchoo_atlas
getchooAtlasStdin string
)

View File

@ -1,24 +0,0 @@
package nix_test
import _ "embed"
// github:getchoo/borealis#glados
var (
//go:embed testdata/getchoo_glados
getchooGladosOut string
getchooGladosInstantiated = sampleSplitPaths(getchooGladosInstantiatedRaw)
//go:embed testdata/instantiated/getchoo_glados
getchooGladosInstantiatedRaw string
//go:embed testdata/derivation/show_getchoo_glados.json
getchooGladosShow []byte
getchooGladosCollective = sampleSplitPaths(getchooGladosCollectiveRaw)
//go:embed testdata/derivation/collect_getchoo_glados
getchooGladosCollectiveRaw string
//go:embed testdata/format/stdin_getchoo_glados
getchooGladosStdin string
)

View File

@ -1,26 +0,0 @@
package nix_test
import (
_ "embed"
)
// git+https://tangled.sh/@pluie.me/flake#pappardelle
var (
//go:embed testdata/pluiedev_pappardelle
pluiedevPappardelleOut string
pluiedevPappardelleInstantiated = sampleSplitPaths(pluiedevPappardelleInstantiatedRaw)
//go:embed testdata/instantiated/pluiedev_pappardelle
pluiedevPappardelleInstantiatedRaw string
//go:embed testdata/derivation/show_pluiedev_pappardelle.json
pluiedevPappardelleShow []byte
pluiedevPappardelleCollective = sampleSplitPaths(pluiedevPappardelleCollectiveRaw)
//go:embed testdata/derivation/collect_pluiedev_pappardelle
pluiedevPappardelleCollectiveRaw string
//go:embed testdata/format/stdin_pluiedev_pappardelle
pluiedevPappardelleStdin string
)

View File

@ -1,6 +1,12 @@
package nix_test
import "strings"
import (
"bytes"
"compress/gzip"
_ "embed"
"io"
"strings"
)
var instSample = map[string]string{
"bad fields": segmentPrefix + `instantiated 'config.sub-948ae97' ` + segmentSuffix,
@ -33,33 +39,107 @@ var drvShow = map[string][]byte{
"pluiedev pappardelle": pluiedevPappardelleShow,
}
const (
segmentPrefix = `evaluating file '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/development/libraries/zlib/default.nix'
evaluating file '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/build-support/fetchurl/boot.nix'
performing daemon worker op: 7
instantiated 'zlib-1.3.1.tar.gz' -> '/nix/store/gyks6vvl7x0gq214ldjhi3w4rg37nh8i-zlib-1.3.1.tar.gz.drv'
source path '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh' is uncacheable
copying '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh' to the store...
performing daemon worker op: 7
copied '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh' to '/nix/store/96rvfw5vlv1hwwm9sdxhdkkpjyym6p2x-update-autotools-gnu-config-scripts.sh'
copied source '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh' -> '/nix/store/96rvfw5vlv1hwwm9sdxhdkkpjyym6p2x-update-autotools-gnu-config-scripts.sh'
evaluating file '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/by-name/gn/gnu-config/package.nix'
performing daemon worker op: 7
instantiated 'config.guess-948ae97' -> '/nix/store/bamwxswxacs3cjdcydv0z7bj22d7g2kc-config.guess-948ae97.drv'
performing daemon worker op: 7
`
var (
segmentPrefix = string(sampleMustGunzip(segmentPrefixGzip))
segmentBody = string(sampleMustGunzip(segmentBodyGzip))
segmentSuffix = string(sampleMustGunzip(segmentSuffixGzip))
segmentWant = sampleSplitPaths(sampleMustGunzip(segmentWantGzip))
segmentEarly = sampleSplitPaths(sampleMustGunzip(segmentEarlyGzip))
segmentBody = `instantiated 'config.sub-948ae97' -> '/nix/store/nbsdqpfzh1jlpmh95s69b3iivfcvv3lh-config.sub-948ae97.drv'`
segmentSuffix = `
performing daemon worker op: 7
instantiated 'gnu-config-2024-01-01' -> '/nix/store/ysp83x9nrks28zkblqmnc1s1kb68dr69-gnu-config-2024-01-01.drv'
performing daemon worker op: 7
instantiated 'bootstrap-stage0-stdenv-linux' -> '/nix/store/3zilrlmq7r6rpzfd94mwss32b62yinj5-bootstrap-stage0-stdenv-linux.drv'
performing daemon worker op: 7
instantiated 'update-autotools-gnu-config-scripts-hook' -> '/nix/store/7yfwy95p6lcdpljdajs5aw10h6q0sfx0-update-autotools-gnu-config-scripts-hook.drv'
source path '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/build-support/bintools-wrapper/ld-wrapper.sh' is uncacheable
copying '/nix/store/vdzlppvrdkz9rv14q4j02g9kpjbww2ww-source/pkgs/build-support/bintools-wrapper/ld-wrapper.sh' to the store...`
//go:embed testdata/segment/prefix.gz
segmentPrefixGzip []byte
//go:embed testdata/segment/body.gz
segmentBodyGzip []byte
//go:embed testdata/segment/suffix.gz
segmentSuffixGzip []byte
//go:embed testdata/segment/want.gz
segmentWantGzip []byte
//go:embed testdata/segment/early.gz
segmentEarlyGzip []byte
)
func sampleSplitPaths(s string) []string { return strings.Split(strings.TrimSpace(s), "\n") }
func sampleSplitPaths(data []byte) []string {
return strings.Split(strings.TrimSpace(string(data)), "\n")
}
func sampleMustGunzip(data []byte) []byte {
var u []byte
if r, err := gzip.NewReader(bytes.NewReader(data)); err != nil {
panic(err.Error())
} else if u, err = io.ReadAll(r); err != nil {
panic(err.Error())
}
return u
}
// github:getchoo/borealis#atlas
var (
getchooAtlasOut = string(sampleMustGunzip(getchooAtlasOutGzip))
//go:embed testdata/getchoo_atlas.gz
getchooAtlasOutGzip []byte
getchooAtlasInstantiated = sampleSplitPaths(sampleMustGunzip(getchooAtlasInstantiatedGzip))
//go:embed testdata/instantiated/getchoo_atlas.gz
getchooAtlasInstantiatedGzip []byte
getchooAtlasShow = sampleMustGunzip(getchooAtlasShowGzip)
//go:embed testdata/derivation/show_getchoo_atlas.json.gz
getchooAtlasShowGzip []byte
getchooAtlasCollective = sampleSplitPaths(sampleMustGunzip(getchooAtlasCollectiveGzip))
//go:embed testdata/derivation/collect_getchoo_atlas.gz
getchooAtlasCollectiveGzip []byte
getchooAtlasStdin = string(sampleMustGunzip(getchooAtlasStdinGzip))
//go:embed testdata/format/stdin_getchoo_atlas.gz
getchooAtlasStdinGzip []byte
)
// github:getchoo/borealis#glados
var (
getchooGladosOut = string(sampleMustGunzip(getchooGladosOutGzip))
//go:embed testdata/getchoo_glados.gz
getchooGladosOutGzip []byte
getchooGladosInstantiated = sampleSplitPaths(sampleMustGunzip(getchooGladosInstantiatedGzip))
//go:embed testdata/instantiated/getchoo_glados.gz
getchooGladosInstantiatedGzip []byte
getchooGladosShow = sampleMustGunzip(getchooGladosShowGzip)
//go:embed testdata/derivation/show_getchoo_glados.json.gz
getchooGladosShowGzip []byte
getchooGladosCollective = sampleSplitPaths(sampleMustGunzip(getchooGladosCollectiveGzip))
//go:embed testdata/derivation/collect_getchoo_glados.gz
getchooGladosCollectiveGzip []byte
getchooGladosStdin = string(sampleMustGunzip(getchooGladosStdinGzip))
//go:embed testdata/format/stdin_getchoo_glados.gz
getchooGladosStdinGzip []byte
)
// git+https://tangled.sh/@pluie.me/flake#pappardelle
var (
pluiedevPappardelleOut = string(sampleMustGunzip(pluiedevPappardelleGzip))
//go:embed testdata/pluiedev_pappardelle.gz
pluiedevPappardelleGzip []byte
pluiedevPappardelleInstantiated = sampleSplitPaths(sampleMustGunzip(pluiedevPappardelleInstantiatedGzip))
//go:embed testdata/instantiated/pluiedev_pappardelle.gz
pluiedevPappardelleInstantiatedGzip []byte
pluiedevPappardelleShow = sampleMustGunzip(pluiedevPappardelleShowGzip)
//go:embed testdata/derivation/show_pluiedev_pappardelle.json.gz
pluiedevPappardelleShowGzip []byte
pluiedevPappardelleCollective = sampleSplitPaths(sampleMustGunzip(pluiedevPappardelleCollectiveGzip))
//go:embed testdata/derivation/collect_pluiedev_pappardelle.gz
pluiedevPappardelleCollectiveGzip []byte
pluiedevPappardelleStdin = string(sampleMustGunzip(pluiedevPappardelleStdinGzip))
//go:embed testdata/format/stdin_pluiedev_pappardelle.gz
pluiedevPappardelleStdinGzip []byte
)

56
store.go Normal file
View File

@ -0,0 +1,56 @@
package nix
import (
"fmt"
"strings"
)
type Store interface {
// Environ returns extra environment variables specified by Store.
Environ() []string
fmt.Stringer
}
// Local points to a local filesystem path containing a nix store.
type Local string
func (Local) Environ() []string { return nil }
func (store Local) String() string { return string(store) }
const (
EnvAwsSharedCredentialsFile = "AWS_SHARED_CREDENTIALS_FILE"
)
// A BinaryCache holds credentials and parameters to a s3 binary cache.
type BinaryCache struct {
// Compression is the name of the compression algorithm to use. Example: "zstd".
Compression string `json:"compression"`
// ParallelCompression determines whether parallel compression is enabled.
ParallelCompression bool `json:"parallel_compression,omitempty"`
// Bucket is the s3 bucket name.
Bucket string `json:"bucket"`
// Endpoint is the s3 endpoint. Example: "s3.example.org".
Endpoint string `json:"endpoint,omitempty"`
// Region is the s3 region. Example: "ap-northeast-1".
Region string `json:"region"`
// Scheme is the s3 protocol. Example: "https".
Scheme string `json:"scheme"`
// CredentialsPath is the path to the s3 shared credentials file.
CredentialsPath string `json:"credentials_path"`
// KeyPath is the path to the nix secret key for signing all newly copied paths.
KeyPath string `json:"key_path"`
}
func (store *BinaryCache) Environ() []string {
return []string{EnvAwsSharedCredentialsFile + "=" + strings.TrimSpace(store.CredentialsPath)}
}
func (store *BinaryCache) String() string {
return fmt.Sprintf(
"s3://%s?compression=%s&parallel-compression=%t&region=%s&scheme=%s&endpoint=%s&secret-key=%s",
store.Bucket, store.Compression, store.ParallelCompression, store.Region, store.Scheme, store.Endpoint, store.KeyPath,
)
}

57
store_test.go Normal file
View File

@ -0,0 +1,57 @@
package nix_test
import (
"testing"
"gensokyo.uk/nix"
)
func TestLocal(t *testing.T) {
if got := nix.Local("/").String(); got != "/" {
t.Errorf("String: %v, want %v", got, "/")
}
if got := nix.Local("").Environ(); got != nil {
t.Errorf("Environ: %v, want %v", got, nil)
}
}
func TestBinaryCache(t *testing.T) {
testCases := []struct {
name string
store *nix.BinaryCache
want string
wantEnv []string
}{
{"example", &nix.BinaryCache{
Compression: "none",
ParallelCompression: false,
Bucket: "example",
Endpoint: "s3.example.org",
Region: "us-east-1",
Scheme: "http",
CredentialsPath: "/dev/null",
KeyPath: nonexistent,
}, "s3://example?compression=none&parallel-compression=false&region=us-east-1&scheme=http&endpoint=s3.example.org&secret-key=/proc/nonexistent",
[]string{nix.EnvAwsSharedCredentialsFile + "=/dev/null"}},
{"gensokyo", &nix.BinaryCache{
Compression: "zstd",
ParallelCompression: true,
Bucket: "nix-cache",
Endpoint: "s3.gensokyo.uk",
Region: "ap-northeast-1",
Scheme: "https",
CredentialsPath: "/var/lib/persist/cache/s3",
KeyPath: "/var/lib/persist/cache/key",
}, "s3://nix-cache?compression=zstd&parallel-compression=true&region=ap-northeast-1&scheme=https&endpoint=s3.gensokyo.uk&secret-key=/var/lib/persist/cache/key",
[]string{nix.EnvAwsSharedCredentialsFile + "=/var/lib/persist/cache/s3"}},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if got := tc.store.String(); got != tc.want {
t.Errorf("String: %q, want %q", got, tc.want)
}
})
}
}

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because it is too large Load Diff

BIN
testdata/format/stdin_getchoo_atlas.gz vendored Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

BIN
testdata/format/stdin_getchoo_glados.gz vendored Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

37432
testdata/getchoo_atlas vendored

File diff suppressed because it is too large Load Diff

BIN
testdata/getchoo_atlas.gz vendored Normal file

Binary file not shown.

84832
testdata/getchoo_glados vendored

File diff suppressed because it is too large Load Diff

BIN
testdata/getchoo_glados.gz vendored Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

BIN
testdata/instantiated/getchoo_atlas.gz vendored Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

BIN
testdata/instantiated/getchoo_glados.gz vendored Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

BIN
testdata/pluiedev_pappardelle.gz vendored Normal file

Binary file not shown.

BIN
testdata/segment/body.gz vendored Normal file

Binary file not shown.

BIN
testdata/segment/early.gz vendored Normal file

Binary file not shown.

BIN
testdata/segment/prefix.gz vendored Normal file

Binary file not shown.

BIN
testdata/segment/suffix.gz vendored Normal file

Binary file not shown.

BIN
testdata/segment/want.gz vendored Normal file

Binary file not shown.