Files
hakurei/internal/pkg/ir_test.go
Ophestra e0c720681b internal/pkg: standardise artifact IR
This should hopefully provide good separation between the artifact curing backend implementation and the (still work in progress) language. Making the IR parseable also guarantees uniqueness of the representation.

Signed-off-by: Ophestra <cat@gensokyo.uk>
2026-02-05 08:24:09 +09:00

115 lines
3.0 KiB
Go

package pkg_test
import (
"bytes"
"io"
"reflect"
"testing"
"hakurei.app/container/check"
"hakurei.app/internal/pkg"
)
func TestIRRoundtrip(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
a pkg.Artifact
}{
{"http get aligned", pkg.NewHTTPGet(
nil, "file:///testdata",
pkg.Checksum(bytes.Repeat([]byte{0xfd}, len(pkg.Checksum{}))),
)},
{"http get unaligned", pkg.NewHTTPGet(
nil, "https://hakurei.app",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
)},
{"http get tar", pkg.NewHTTPGetTar(
nil, "file:///testdata",
pkg.Checksum(bytes.Repeat([]byte{0xff}, len(pkg.Checksum{}))),
pkg.TarBzip2,
)},
{"http get tar unaligned", pkg.NewHTTPGetTar(
nil, "https://hakurei.app",
pkg.Checksum(bytes.Repeat([]byte{0xfe}, len(pkg.Checksum{}))),
pkg.TarUncompressed,
)},
{"exec offline", pkg.NewExec(
"exec-offline", nil, 0, false,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1"},
check.MustAbs("/opt/bin/testtool"),
[]string{"testtool"},
pkg.MustPath("/file", false, pkg.NewFile("file", []byte(
"stub file",
))), pkg.MustPath("/.hakurei", false, pkg.NewHTTPGetTar(
nil, "file:///hakurei.tar",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarUncompressed,
)), pkg.MustPath("/opt", false, pkg.NewHTTPGetTar(
nil, "file:///testtool.tar.gz",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarGzip,
)),
)},
{"exec net", pkg.NewExec(
"exec-net",
(*pkg.Checksum)(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
0, false,
pkg.AbsWork,
[]string{"HAKUREI_TEST=1"},
check.MustAbs("/opt/bin/testtool"),
[]string{"testtool", "net"},
pkg.MustPath("/file", false, pkg.NewFile("file", []byte(
"stub file",
))), pkg.MustPath("/.hakurei", false, pkg.NewHTTPGetTar(
nil, "file:///hakurei.tar",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarUncompressed,
)), pkg.MustPath("/opt", false, pkg.NewHTTPGetTar(
nil, "file:///testtool.tar.gz",
pkg.Checksum(bytes.Repeat([]byte{0xfc}, len(pkg.Checksum{}))),
pkg.TarGzip,
)),
)},
{"file anonymous", pkg.NewFile("", []byte{0})},
{"file", pkg.NewFile("stub", []byte("stub"))},
}
testCasesCache := make([]cacheTestCase, len(testCases))
for i, tc := range testCases {
want := tc.a
testCasesCache[i] = cacheTestCase{tc.name, nil,
func(t *testing.T, base *check.Absolute, c *pkg.Cache) {
r, w := io.Pipe()
done := make(chan error, 1)
go func() {
t.Helper()
done <- c.EncodeAll(w, want)
_ = w.Close()
}()
if got, err := c.NewDecoder(r).Decode(); err != nil {
t.Fatalf("Decode: error = %v", err)
} else if !reflect.DeepEqual(got, want) {
t.Fatalf("Decode: %#v, want %#v", got, want)
}
if err := <-done; err != nil {
t.Fatalf("EncodeAll: error = %v", err)
}
}, pkg.MustDecode(
"E4vEZKhCcL2gPZ2Tt59FS3lDng-d_2SKa2i5G_RbDfwGn6EemptFaGLPUDiOa94C",
),
}
}
checkWithCache(t, testCasesCache)
}