forked from rosa/hakurei
Compare commits
54 Commits
develop
...
pkgserver-
| Author | SHA1 | Date | |
|---|---|---|---|
|
6f78444b11
|
|||
|
2a3f6f5384
|
|||
|
ef8663461b
|
|||
|
0b3be27b9a
|
|||
|
61a25c88ae
|
|||
| c7e195fe64 | |||
| d5db9add98 | |||
| ab8abdc82b | |||
| 770fd46510 | |||
| 99f1c6aab4 | |||
| 9ee629d402 | |||
| f475dde8b9 | |||
| c43a0c41b6 | |||
| 55827f1a85 | |||
| 721bdddfa1 | |||
| fb18e599dd | |||
| ec9005c794 | |||
| c6d35b4003 | |||
| 6401533cc2 | |||
| 5d6c401beb | |||
| 0a2d6aec14 | |||
| 67b11335d6 | |||
| ef3bd1b60a | |||
| beae7c89db | |||
| ed26d1a1c2 | |||
| faa0006d47 | |||
| 796ddbc977 | |||
| 98ab020160 | |||
| 26a346036d | |||
| 4ac9c72132 | |||
| c39c07d440 | |||
| b3fa0fe271 | |||
| 92a90582bb | |||
| 2e5ac56bdf | |||
| 75133e0234 | |||
| c120d4de4f | |||
| d6af8edb4a | |||
| da25d609d5 | |||
| 95ceed0de0 | |||
|
74c213264a
|
|||
|
345cffddc2
|
|||
|
49163758c8
|
|||
|
ad22c15fb1
|
|||
|
9c774f7e0a
|
|||
|
707f0a349f
|
|||
|
7c35be066a
|
|||
|
f91d55fa5e
|
|||
|
5862cc1966
|
|||
|
b3f0360a05
|
|||
|
8938994036
|
|||
|
96d382f805
|
|||
|
5c785c135c
|
|||
|
0130f8ea6d
|
|||
|
faac5c4a83
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -27,6 +27,10 @@ go.work.sum
|
|||||||
|
|
||||||
# go generate
|
# go generate
|
||||||
/cmd/hakurei/LICENSE
|
/cmd/hakurei/LICENSE
|
||||||
|
/cmd/pkgserver/.sass-cache
|
||||||
|
/cmd/pkgserver/ui/static/*.js
|
||||||
|
/cmd/pkgserver/ui/static/*.css*
|
||||||
|
/cmd/pkgserver/ui/static/*.css.map
|
||||||
/internal/pkg/testdata/testtool
|
/internal/pkg/testdata/testtool
|
||||||
/internal/rosa/hakurei_current.tar.gz
|
/internal/rosa/hakurei_current.tar.gz
|
||||||
|
|
||||||
|
|||||||
@@ -175,6 +175,17 @@ func main() {
|
|||||||
fmt.Println("website : " +
|
fmt.Println("website : " +
|
||||||
strings.TrimSuffix(meta.Website, "/"))
|
strings.TrimSuffix(meta.Website, "/"))
|
||||||
}
|
}
|
||||||
|
if len(meta.Dependencies) > 0 {
|
||||||
|
fmt.Print("depends on :")
|
||||||
|
for _, d := range meta.Dependencies {
|
||||||
|
s := rosa.GetMetadata(d).Name
|
||||||
|
if version := rosa.Std.Version(d); version != rosa.Unversioned {
|
||||||
|
s += "-" + version
|
||||||
|
}
|
||||||
|
fmt.Print(" " + s)
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
const statusPrefix = "status : "
|
const statusPrefix = "status : "
|
||||||
if flagStatus {
|
if flagStatus {
|
||||||
@@ -424,6 +435,7 @@ func main() {
|
|||||||
{
|
{
|
||||||
var (
|
var (
|
||||||
flagDump string
|
flagDump string
|
||||||
|
flagExport string
|
||||||
)
|
)
|
||||||
c.NewCommand(
|
c.NewCommand(
|
||||||
"cure",
|
"cure",
|
||||||
@@ -436,10 +448,34 @@ func main() {
|
|||||||
return fmt.Errorf("unknown artifact %q", args[0])
|
return fmt.Errorf("unknown artifact %q", args[0])
|
||||||
} else if flagDump == "" {
|
} else if flagDump == "" {
|
||||||
pathname, _, err := cache.Cure(rosa.Std.Load(p))
|
pathname, _, err := cache.Cure(rosa.Std.Load(p))
|
||||||
if err == nil {
|
if err != nil {
|
||||||
log.Println(pathname)
|
|
||||||
}
|
|
||||||
return err
|
return err
|
||||||
|
}
|
||||||
|
log.Println(pathname)
|
||||||
|
|
||||||
|
if flagExport != "" {
|
||||||
|
msg.Verbosef("exporting %s to %s...", args[0], flagExport)
|
||||||
|
|
||||||
|
var f *os.File
|
||||||
|
if f, err = os.OpenFile(
|
||||||
|
flagExport,
|
||||||
|
os.O_WRONLY|os.O_CREATE|os.O_EXCL,
|
||||||
|
0400,
|
||||||
|
); err != nil {
|
||||||
|
return err
|
||||||
|
} else if _, err = pkg.Flatten(
|
||||||
|
os.DirFS(pathname.String()),
|
||||||
|
".",
|
||||||
|
f,
|
||||||
|
); err != nil {
|
||||||
|
_ = f.Close()
|
||||||
|
return err
|
||||||
|
} else if err = f.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
} else {
|
} else {
|
||||||
f, err := os.OpenFile(
|
f, err := os.OpenFile(
|
||||||
flagDump,
|
flagDump,
|
||||||
@@ -463,6 +499,11 @@ func main() {
|
|||||||
&flagDump,
|
&flagDump,
|
||||||
"dump", command.StringFlag(""),
|
"dump", command.StringFlag(""),
|
||||||
"Write IR to specified pathname and terminate",
|
"Write IR to specified pathname and terminate",
|
||||||
|
).
|
||||||
|
Flag(
|
||||||
|
&flagExport,
|
||||||
|
"export", command.StringFlag(""),
|
||||||
|
"Export cured artifact to specified pathname",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -477,17 +518,19 @@ func main() {
|
|||||||
"shell",
|
"shell",
|
||||||
"Interactive shell in the specified Rosa OS environment",
|
"Interactive shell in the specified Rosa OS environment",
|
||||||
func(args []string) error {
|
func(args []string) error {
|
||||||
root := make([]pkg.Artifact, 0, 6+len(args))
|
presets := make([]rosa.PArtifact, len(args))
|
||||||
for _, arg := range args {
|
for i, arg := range args {
|
||||||
p, ok := rosa.ResolveName(arg)
|
p, ok := rosa.ResolveName(arg)
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf("unknown artifact %q", arg)
|
return fmt.Errorf("unknown artifact %q", arg)
|
||||||
}
|
}
|
||||||
root = append(root, rosa.Std.Load(p))
|
presets[i] = p
|
||||||
}
|
}
|
||||||
|
root := make(rosa.Collect, 0, 6+len(args))
|
||||||
|
root = rosa.Std.AppendPresets(root, presets...)
|
||||||
|
|
||||||
if flagWithToolchain {
|
if flagWithToolchain {
|
||||||
musl, compilerRT, runtimes, clang := rosa.Std.NewLLVM()
|
musl, compilerRT, runtimes, clang := (rosa.Std - 1).NewLLVM()
|
||||||
root = append(root, musl, compilerRT, runtimes, clang)
|
root = append(root, musl, compilerRT, runtimes, clang)
|
||||||
} else {
|
} else {
|
||||||
root = append(root, rosa.Std.Load(rosa.Musl))
|
root = append(root, rosa.Std.Load(rosa.Musl))
|
||||||
@@ -497,6 +540,12 @@ func main() {
|
|||||||
rosa.Std.Load(rosa.Toybox),
|
rosa.Std.Load(rosa.Toybox),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if _, _, err := cache.Cure(&root); err == nil {
|
||||||
|
return errors.New("unreachable")
|
||||||
|
} else if !errors.Is(err, rosa.Collected{}) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
type cureRes struct {
|
type cureRes struct {
|
||||||
pathname *check.Absolute
|
pathname *check.Absolute
|
||||||
checksum unique.Handle[pkg.Checksum]
|
checksum unique.Handle[pkg.Checksum]
|
||||||
|
|||||||
176
cmd/pkgserver/api.go
Normal file
176
cmd/pkgserver/api.go
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"strconv"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"hakurei.app/internal/info"
|
||||||
|
"hakurei.app/internal/rosa"
|
||||||
|
)
|
||||||
|
|
||||||
|
// for lazy initialisation of serveInfo
|
||||||
|
var (
|
||||||
|
infoPayload struct {
|
||||||
|
// Current package count.
|
||||||
|
Count int `json:"count"`
|
||||||
|
// Hakurei version, set at link time.
|
||||||
|
HakureiVersion string `json:"hakurei_version"`
|
||||||
|
}
|
||||||
|
infoPayloadOnce sync.Once
|
||||||
|
)
|
||||||
|
|
||||||
|
// handleInfo writes constant system information.
|
||||||
|
func handleInfo(w http.ResponseWriter, _ *http.Request) {
|
||||||
|
infoPayloadOnce.Do(func() {
|
||||||
|
infoPayload.Count = int(rosa.PresetUnexportedStart)
|
||||||
|
infoPayload.HakureiVersion = info.Version()
|
||||||
|
})
|
||||||
|
// TODO(mae): cache entire response if no additional fields are planned
|
||||||
|
writeAPIPayload(w, infoPayload)
|
||||||
|
}
|
||||||
|
|
||||||
|
// newStatusHandler returns a [http.HandlerFunc] that offers status files for
|
||||||
|
// viewing or download, if available.
|
||||||
|
func (index *packageIndex) newStatusHandler(disposition bool) http.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
m, ok := index.names[path.Base(r.URL.Path)]
|
||||||
|
if !ok || !m.HasReport {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
contentType := "text/plain; charset=utf-8"
|
||||||
|
if disposition {
|
||||||
|
contentType = "application/octet-stream"
|
||||||
|
|
||||||
|
// quoting like this is unsound, but okay, because metadata is hardcoded
|
||||||
|
contentDisposition := `attachment; filename="`
|
||||||
|
contentDisposition += m.Name + "-"
|
||||||
|
if m.Version != "" {
|
||||||
|
contentDisposition += m.Version + "-"
|
||||||
|
}
|
||||||
|
contentDisposition += m.ids + `.log"`
|
||||||
|
w.Header().Set("Content-Disposition", contentDisposition)
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", contentType)
|
||||||
|
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||||
|
if err := func() (err error) {
|
||||||
|
defer index.handleAccess(&err)()
|
||||||
|
_, err = w.Write(m.status)
|
||||||
|
return
|
||||||
|
}(); err != nil {
|
||||||
|
log.Println(err)
|
||||||
|
http.Error(
|
||||||
|
w, "cannot deliver status, contact maintainers",
|
||||||
|
http.StatusInternalServerError,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleGet writes a slice of metadata with specified order.
|
||||||
|
func (index *packageIndex) handleGet(w http.ResponseWriter, r *http.Request) {
|
||||||
|
q := r.URL.Query()
|
||||||
|
limit, err := strconv.Atoi(q.Get("limit"))
|
||||||
|
if err != nil || limit > 100 || limit < 1 {
|
||||||
|
http.Error(
|
||||||
|
w, "limit must be an integer between 1 and 100",
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
i, err := strconv.Atoi(q.Get("index"))
|
||||||
|
if err != nil || i >= len(index.sorts[0]) || i < 0 {
|
||||||
|
http.Error(
|
||||||
|
w, "index must be an integer between 0 and "+
|
||||||
|
strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sort, err := strconv.Atoi(q.Get("sort"))
|
||||||
|
if err != nil || sort >= len(index.sorts) || sort < 0 {
|
||||||
|
http.Error(
|
||||||
|
w, "sort must be an integer between 0 and "+
|
||||||
|
strconv.Itoa(sortOrderEnd),
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
values := index.sorts[sort][i:min(i+limit, len(index.sorts[sort]))]
|
||||||
|
writeAPIPayload(w, &struct {
|
||||||
|
Values []*metadata `json:"values"`
|
||||||
|
}{values})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (index *packageIndex) handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
|
q := r.URL.Query()
|
||||||
|
limit, err := strconv.Atoi(q.Get("limit"))
|
||||||
|
if err != nil || limit > 100 || limit < 1 {
|
||||||
|
http.Error(
|
||||||
|
w, "limit must be an integer between 1 and 100",
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
i, err := strconv.Atoi(q.Get("index"))
|
||||||
|
if err != nil || i >= len(index.sorts[0]) || i < 0 {
|
||||||
|
http.Error(
|
||||||
|
w, "index must be an integer between 0 and "+
|
||||||
|
strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
search, err := url.PathUnescape(q.Get("search"))
|
||||||
|
if len(search) > 100 || err != nil {
|
||||||
|
http.Error(
|
||||||
|
w, "search must be a string between 0 and 100 characters long",
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
desc := q.Get("desc") == "true"
|
||||||
|
n, res, err := index.performSearchQuery(limit, i, search, desc)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
writeAPIPayload(w, &struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
Results []searchResult `json:"results"`
|
||||||
|
}{n, res})
|
||||||
|
}
|
||||||
|
|
||||||
|
// apiVersion is the name of the current API revision, as part of the pattern.
|
||||||
|
const apiVersion = "v1"
|
||||||
|
|
||||||
|
// registerAPI registers API handler functions.
|
||||||
|
func (index *packageIndex) registerAPI(mux *http.ServeMux) {
|
||||||
|
mux.HandleFunc("GET /api/"+apiVersion+"/info", handleInfo)
|
||||||
|
mux.HandleFunc("GET /api/"+apiVersion+"/get", index.handleGet)
|
||||||
|
mux.HandleFunc("GET /api/"+apiVersion+"/search", index.handleSearch)
|
||||||
|
mux.HandleFunc("GET /api/"+apiVersion+"/status/", index.newStatusHandler(false))
|
||||||
|
mux.HandleFunc("GET /status/", index.newStatusHandler(true))
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeAPIPayload sets headers common to API responses and encodes payload as
|
||||||
|
// JSON for the response body.
|
||||||
|
func writeAPIPayload(w http.ResponseWriter, payload any) {
|
||||||
|
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||||
|
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||||
|
w.Header().Set("Pragma", "no-cache")
|
||||||
|
w.Header().Set("Expires", "0")
|
||||||
|
|
||||||
|
if err := json.NewEncoder(w).Encode(payload); err != nil {
|
||||||
|
log.Println(err)
|
||||||
|
http.Error(
|
||||||
|
w, "cannot encode payload, contact maintainers",
|
||||||
|
http.StatusInternalServerError,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
183
cmd/pkgserver/api_test.go
Normal file
183
cmd/pkgserver/api_test.go
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"slices"
|
||||||
|
"strconv"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"hakurei.app/internal/info"
|
||||||
|
"hakurei.app/internal/rosa"
|
||||||
|
)
|
||||||
|
|
||||||
|
// prefix is prepended to every API path.
|
||||||
|
const prefix = "/api/" + apiVersion + "/"
|
||||||
|
|
||||||
|
func TestAPIInfo(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
handleInfo(w, httptest.NewRequestWithContext(
|
||||||
|
t.Context(),
|
||||||
|
http.MethodGet,
|
||||||
|
prefix+"info",
|
||||||
|
nil,
|
||||||
|
))
|
||||||
|
|
||||||
|
resp := w.Result()
|
||||||
|
checkStatus(t, resp, http.StatusOK)
|
||||||
|
checkAPIHeader(t, w.Header())
|
||||||
|
|
||||||
|
checkPayload(t, resp, struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
HakureiVersion string `json:"hakurei_version"`
|
||||||
|
}{int(rosa.PresetUnexportedStart), info.Version()})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAPIGet(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
const target = prefix + "get"
|
||||||
|
|
||||||
|
index := newIndex(t)
|
||||||
|
newRequest := func(suffix string) *httptest.ResponseRecorder {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
index.handleGet(w, httptest.NewRequestWithContext(
|
||||||
|
t.Context(),
|
||||||
|
http.MethodGet,
|
||||||
|
target+suffix,
|
||||||
|
nil,
|
||||||
|
))
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
|
||||||
|
checkValidate := func(t *testing.T, suffix string, vmin, vmax int, wantErr string) {
|
||||||
|
t.Run("invalid", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
w := newRequest("?" + suffix + "=invalid")
|
||||||
|
resp := w.Result()
|
||||||
|
checkError(t, resp, wantErr, http.StatusBadRequest)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("min", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
w := newRequest("?" + suffix + "=" + strconv.Itoa(vmin-1))
|
||||||
|
resp := w.Result()
|
||||||
|
checkError(t, resp, wantErr, http.StatusBadRequest)
|
||||||
|
|
||||||
|
w = newRequest("?" + suffix + "=" + strconv.Itoa(vmin))
|
||||||
|
resp = w.Result()
|
||||||
|
checkStatus(t, resp, http.StatusOK)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("max", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
w := newRequest("?" + suffix + "=" + strconv.Itoa(vmax+1))
|
||||||
|
resp := w.Result()
|
||||||
|
checkError(t, resp, wantErr, http.StatusBadRequest)
|
||||||
|
|
||||||
|
w = newRequest("?" + suffix + "=" + strconv.Itoa(vmax))
|
||||||
|
resp = w.Result()
|
||||||
|
checkStatus(t, resp, http.StatusOK)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("limit", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
checkValidate(
|
||||||
|
t, "index=0&sort=0&limit", 1, 100,
|
||||||
|
"limit must be an integer between 1 and 100",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("index", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
checkValidate(
|
||||||
|
t, "limit=1&sort=0&index", 0, int(rosa.PresetUnexportedStart-1),
|
||||||
|
"index must be an integer between 0 and "+strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("sort", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
checkValidate(
|
||||||
|
t, "index=0&limit=1&sort", 0, int(sortOrderEnd),
|
||||||
|
"sort must be an integer between 0 and "+strconv.Itoa(int(sortOrderEnd)),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
checkWithSuffix := func(name, suffix string, want []*metadata) {
|
||||||
|
t.Run(name, func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
w := newRequest(suffix)
|
||||||
|
resp := w.Result()
|
||||||
|
checkStatus(t, resp, http.StatusOK)
|
||||||
|
checkAPIHeader(t, w.Header())
|
||||||
|
checkPayloadFunc(t, resp, func(got *struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
Values []*metadata `json:"values"`
|
||||||
|
}) bool {
|
||||||
|
return got.Count == len(want) &&
|
||||||
|
slices.EqualFunc(got.Values, want, func(a, b *metadata) bool {
|
||||||
|
return (a.Version == b.Version ||
|
||||||
|
a.Version == rosa.Unversioned ||
|
||||||
|
b.Version == rosa.Unversioned) &&
|
||||||
|
a.HasReport == b.HasReport &&
|
||||||
|
a.Name == b.Name &&
|
||||||
|
a.Description == b.Description &&
|
||||||
|
a.Website == b.Website
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
checkWithSuffix("declarationAscending", "?limit=2&index=0&sort=0", []*metadata{
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(0),
|
||||||
|
Version: rosa.Std.Version(0),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(1),
|
||||||
|
Version: rosa.Std.Version(1),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
checkWithSuffix("declarationAscending offset", "?limit=3&index=5&sort=0", []*metadata{
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(5),
|
||||||
|
Version: rosa.Std.Version(5),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(6),
|
||||||
|
Version: rosa.Std.Version(6),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(7),
|
||||||
|
Version: rosa.Std.Version(7),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
checkWithSuffix("declarationDescending", "?limit=3&index=0&sort=1", []*metadata{
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 1),
|
||||||
|
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 1),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 2),
|
||||||
|
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 2),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 3),
|
||||||
|
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 3),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
checkWithSuffix("declarationDescending offset", "?limit=1&index=37&sort=1", []*metadata{
|
||||||
|
{
|
||||||
|
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 38),
|
||||||
|
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 38),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
105
cmd/pkgserver/index.go
Normal file
105
cmd/pkgserver/index.go
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"cmp"
|
||||||
|
"errors"
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"hakurei.app/internal/pkg"
|
||||||
|
"hakurei.app/internal/rosa"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
declarationAscending = iota
|
||||||
|
declarationDescending
|
||||||
|
nameAscending
|
||||||
|
nameDescending
|
||||||
|
sizeAscending
|
||||||
|
sizeDescending
|
||||||
|
|
||||||
|
sortOrderEnd = iota - 1
|
||||||
|
)
|
||||||
|
|
||||||
|
// packageIndex refers to metadata by name and various sort orders.
|
||||||
|
type packageIndex struct {
|
||||||
|
sorts [sortOrderEnd + 1][rosa.PresetUnexportedStart]*metadata
|
||||||
|
names map[string]*metadata
|
||||||
|
search searchCache
|
||||||
|
// Taken from [rosa.Report] if available.
|
||||||
|
handleAccess func(*error) func()
|
||||||
|
}
|
||||||
|
|
||||||
|
// metadata holds [rosa.Metadata] extended with additional information.
|
||||||
|
type metadata struct {
|
||||||
|
p rosa.PArtifact
|
||||||
|
*rosa.Metadata
|
||||||
|
|
||||||
|
// Populated via [rosa.Toolchain.Version], [rosa.Unversioned] is equivalent
|
||||||
|
// to the zero value. Otherwise, the zero value is invalid.
|
||||||
|
Version string `json:"version,omitempty"`
|
||||||
|
// Output data size, available if present in report.
|
||||||
|
Size int64 `json:"size,omitempty"`
|
||||||
|
// Whether the underlying [pkg.Artifact] is present in the report.
|
||||||
|
HasReport bool `json:"report"`
|
||||||
|
|
||||||
|
// Ident string encoded ahead of time.
|
||||||
|
ids string
|
||||||
|
// Backed by [rosa.Report], access must be prepared by HandleAccess.
|
||||||
|
status []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// populate deterministically populates packageIndex, optionally with a report.
|
||||||
|
func (index *packageIndex) populate(cache *pkg.Cache, report *rosa.Report) (err error) {
|
||||||
|
if report != nil {
|
||||||
|
defer report.HandleAccess(&err)()
|
||||||
|
index.handleAccess = report.HandleAccess
|
||||||
|
}
|
||||||
|
|
||||||
|
var work [rosa.PresetUnexportedStart]*metadata
|
||||||
|
index.names = make(map[string]*metadata)
|
||||||
|
for p := range rosa.PresetUnexportedStart {
|
||||||
|
m := metadata{
|
||||||
|
p: p,
|
||||||
|
|
||||||
|
Metadata: rosa.GetMetadata(p),
|
||||||
|
Version: rosa.Std.Version(p),
|
||||||
|
}
|
||||||
|
if m.Version == "" {
|
||||||
|
return errors.New("invalid version from " + m.Name)
|
||||||
|
}
|
||||||
|
if m.Version == rosa.Unversioned {
|
||||||
|
m.Version = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if cache != nil && report != nil {
|
||||||
|
id := cache.Ident(rosa.Std.Load(p))
|
||||||
|
m.ids = pkg.Encode(id.Value())
|
||||||
|
m.status, m.Size = report.ArtifactOf(id)
|
||||||
|
m.HasReport = m.Size >= 0
|
||||||
|
}
|
||||||
|
|
||||||
|
work[p] = &m
|
||||||
|
index.names[m.Name] = &m
|
||||||
|
}
|
||||||
|
|
||||||
|
index.sorts[declarationAscending] = work
|
||||||
|
index.sorts[declarationDescending] = work
|
||||||
|
slices.Reverse(index.sorts[declarationDescending][:])
|
||||||
|
|
||||||
|
index.sorts[nameAscending] = work
|
||||||
|
slices.SortFunc(index.sorts[nameAscending][:], func(a, b *metadata) int {
|
||||||
|
return strings.Compare(a.Name, b.Name)
|
||||||
|
})
|
||||||
|
index.sorts[nameDescending] = index.sorts[nameAscending]
|
||||||
|
slices.Reverse(index.sorts[nameDescending][:])
|
||||||
|
|
||||||
|
index.sorts[sizeAscending] = work
|
||||||
|
slices.SortFunc(index.sorts[sizeAscending][:], func(a, b *metadata) int {
|
||||||
|
return cmp.Compare(a.Size, b.Size)
|
||||||
|
})
|
||||||
|
index.sorts[sizeDescending] = index.sorts[sizeAscending]
|
||||||
|
slices.Reverse(index.sorts[sizeDescending][:])
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
114
cmd/pkgserver/main.go
Normal file
114
cmd/pkgserver/main.go
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"hakurei.app/command"
|
||||||
|
"hakurei.app/container/check"
|
||||||
|
"hakurei.app/internal/pkg"
|
||||||
|
"hakurei.app/internal/rosa"
|
||||||
|
"hakurei.app/message"
|
||||||
|
)
|
||||||
|
|
||||||
|
const shutdownTimeout = 15 * time.Second
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
log.SetFlags(0)
|
||||||
|
log.SetPrefix("pkgserver: ")
|
||||||
|
|
||||||
|
var (
|
||||||
|
flagBaseDir string
|
||||||
|
flagAddr string
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM, syscall.SIGHUP)
|
||||||
|
defer stop()
|
||||||
|
msg := message.New(log.Default())
|
||||||
|
|
||||||
|
c := command.New(os.Stderr, log.Printf, "pkgserver", func(args []string) error {
|
||||||
|
var (
|
||||||
|
cache *pkg.Cache
|
||||||
|
report *rosa.Report
|
||||||
|
)
|
||||||
|
switch len(args) {
|
||||||
|
case 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
case 1:
|
||||||
|
baseDir, err := check.NewAbs(flagBaseDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
cache, err = pkg.Open(ctx, msg, 0, baseDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer cache.Close()
|
||||||
|
|
||||||
|
report, err = rosa.OpenReport(args[0])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return errors.New("pkgserver requires 1 argument")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
var index packageIndex
|
||||||
|
index.search = make(searchCache)
|
||||||
|
if err := index.populate(cache, report); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
ticker := time.NewTicker(1 * time.Minute)
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
ticker.Stop()
|
||||||
|
return
|
||||||
|
case <-ticker.C:
|
||||||
|
index.search.clean()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
var mux http.ServeMux
|
||||||
|
uiRoutes(&mux)
|
||||||
|
index.registerAPI(&mux)
|
||||||
|
server := http.Server{
|
||||||
|
Addr: flagAddr,
|
||||||
|
Handler: &mux,
|
||||||
|
}
|
||||||
|
go func() {
|
||||||
|
<-ctx.Done()
|
||||||
|
c, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
|
||||||
|
defer cancel()
|
||||||
|
if err := server.Shutdown(c); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
return server.ListenAndServe()
|
||||||
|
}).Flag(
|
||||||
|
&flagBaseDir,
|
||||||
|
"b", command.StringFlag(""),
|
||||||
|
"base directory for cache",
|
||||||
|
).Flag(
|
||||||
|
&flagAddr,
|
||||||
|
"addr", command.StringFlag(":8067"),
|
||||||
|
"TCP network address to listen on",
|
||||||
|
)
|
||||||
|
c.MustParse(os.Args[1:], func(err error) {
|
||||||
|
if errors.Is(err, http.ErrServerClosed) {
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
log.Fatal(err)
|
||||||
|
})
|
||||||
|
}
|
||||||
96
cmd/pkgserver/main_test.go
Normal file
96
cmd/pkgserver/main_test.go
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
// newIndex returns the address of a newly populated packageIndex.
|
||||||
|
func newIndex(t *testing.T) *packageIndex {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
var index packageIndex
|
||||||
|
if err := index.populate(nil, nil); err != nil {
|
||||||
|
t.Fatalf("populate: error = %v", err)
|
||||||
|
}
|
||||||
|
return &index
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkStatus checks response status code.
|
||||||
|
func checkStatus(t *testing.T, resp *http.Response, want int) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
if resp.StatusCode != want {
|
||||||
|
t.Errorf(
|
||||||
|
"StatusCode: %s, want %s",
|
||||||
|
http.StatusText(resp.StatusCode),
|
||||||
|
http.StatusText(want),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkHeader checks the value of a header entry.
|
||||||
|
func checkHeader(t *testing.T, h http.Header, key, want string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
if got := h.Get(key); got != want {
|
||||||
|
t.Errorf("%s: %q, want %q", key, got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkAPIHeader checks common entries set for API endpoints.
|
||||||
|
func checkAPIHeader(t *testing.T, h http.Header) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
checkHeader(t, h, "Content-Type", "application/json; charset=utf-8")
|
||||||
|
checkHeader(t, h, "Cache-Control", "no-cache, no-store, must-revalidate")
|
||||||
|
checkHeader(t, h, "Pragma", "no-cache")
|
||||||
|
checkHeader(t, h, "Expires", "0")
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkPayloadFunc checks the JSON response of an API endpoint by passing it to f.
|
||||||
|
func checkPayloadFunc[T any](
|
||||||
|
t *testing.T,
|
||||||
|
resp *http.Response,
|
||||||
|
f func(got *T) bool,
|
||||||
|
) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
var got T
|
||||||
|
r := io.Reader(resp.Body)
|
||||||
|
if testing.Verbose() {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
r = io.TeeReader(r, &buf)
|
||||||
|
defer func() { t.Helper(); t.Log(buf.String()) }()
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r).Decode(&got); err != nil {
|
||||||
|
t.Fatalf("Decode: error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !f(&got) {
|
||||||
|
t.Errorf("Body: %#v", got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkPayload checks the JSON response of an API endpoint.
|
||||||
|
func checkPayload[T any](t *testing.T, resp *http.Response, want T) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
checkPayloadFunc(t, resp, func(got *T) bool {
|
||||||
|
return reflect.DeepEqual(got, &want)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkError(t *testing.T, resp *http.Response, error string, code int) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
checkStatus(t, resp, code)
|
||||||
|
if got, _ := io.ReadAll(resp.Body); string(got) != fmt.Sprintln(error) {
|
||||||
|
t.Errorf("Body: %q, want %q", string(got), error)
|
||||||
|
}
|
||||||
|
}
|
||||||
77
cmd/pkgserver/search.go
Normal file
77
cmd/pkgserver/search.go
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"cmp"
|
||||||
|
"maps"
|
||||||
|
"regexp"
|
||||||
|
"slices"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type searchCache map[string]searchCacheEntry
|
||||||
|
type searchResult struct {
|
||||||
|
NameIndices [][]int `json:"name_matches"`
|
||||||
|
DescIndices [][]int `json:"desc_matches,omitempty"`
|
||||||
|
Score float64 `json:"score"`
|
||||||
|
*metadata
|
||||||
|
}
|
||||||
|
type searchCacheEntry struct {
|
||||||
|
query string
|
||||||
|
results []searchResult
|
||||||
|
expiry time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func (index *packageIndex) performSearchQuery(limit int, i int, search string, desc bool) (int, []searchResult, error) {
|
||||||
|
entry, ok := index.search[search]
|
||||||
|
if ok {
|
||||||
|
return len(entry.results), entry.results[i:min(i+limit, len(entry.results))], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
regex, err := regexp.Compile(search)
|
||||||
|
if err != nil {
|
||||||
|
return 0, make([]searchResult, 0), err
|
||||||
|
}
|
||||||
|
res := make([]searchResult, 0)
|
||||||
|
for p := range maps.Values(index.names) {
|
||||||
|
nameIndices := regex.FindAllIndex([]byte(p.Name), -1)
|
||||||
|
var descIndices [][]int = nil
|
||||||
|
if desc {
|
||||||
|
descIndices = regex.FindAllIndex([]byte(p.Description), -1)
|
||||||
|
}
|
||||||
|
if nameIndices == nil && descIndices == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
score := float64(indexsum(nameIndices)) / (float64(len(nameIndices)) + 1)
|
||||||
|
if desc {
|
||||||
|
score += float64(indexsum(descIndices)) / (float64(len(descIndices)) + 1) / 10.0
|
||||||
|
}
|
||||||
|
res = append(res, searchResult{
|
||||||
|
NameIndices: nameIndices,
|
||||||
|
DescIndices: descIndices,
|
||||||
|
Score: score,
|
||||||
|
metadata: p,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
slices.SortFunc(res[:], func(a, b searchResult) int { return -cmp.Compare(a.Score, b.Score) })
|
||||||
|
expiry := time.Now().Add(1 * time.Minute)
|
||||||
|
entry = searchCacheEntry{
|
||||||
|
query: search,
|
||||||
|
results: res,
|
||||||
|
expiry: expiry,
|
||||||
|
}
|
||||||
|
index.search[search] = entry
|
||||||
|
|
||||||
|
return len(res), res[i:min(i+limit, len(entry.results))], nil
|
||||||
|
}
|
||||||
|
func (s *searchCache) clean() {
|
||||||
|
maps.DeleteFunc(*s, func(_ string, v searchCacheEntry) bool {
|
||||||
|
return v.expiry.Before(time.Now())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
func indexsum(in [][]int) int {
|
||||||
|
sum := 0
|
||||||
|
for i := 0; i < len(in); i++ {
|
||||||
|
sum += in[i][1] - in[i][0]
|
||||||
|
}
|
||||||
|
return sum
|
||||||
|
}
|
||||||
48
cmd/pkgserver/ui.go
Normal file
48
cmd/pkgserver/ui.go
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import "net/http"
|
||||||
|
|
||||||
|
func serveWebUI(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||||
|
w.Header().Set("Pragma", "no-cache")
|
||||||
|
w.Header().Set("Expires", "0")
|
||||||
|
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||||
|
w.Header().Set("X-XSS-Protection", "1")
|
||||||
|
w.Header().Set("X-Frame-Options", "DENY")
|
||||||
|
|
||||||
|
http.ServeFileFS(w, r, content, "ui/index.html")
|
||||||
|
}
|
||||||
|
func serveStaticContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
switch r.URL.Path {
|
||||||
|
case "/static/style.css":
|
||||||
|
darkTheme := r.CookiesNamed("dark_theme")
|
||||||
|
if len(darkTheme) > 0 && darkTheme[0].Value == "true" {
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/dark.css")
|
||||||
|
} else {
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/light.css")
|
||||||
|
}
|
||||||
|
case "/favicon.ico":
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/favicon.ico")
|
||||||
|
case "/static/index.js":
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/index.js")
|
||||||
|
case "/static/test.js":
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/test.js")
|
||||||
|
case "/static/test.css":
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/test.css")
|
||||||
|
case "/static/test_tests.js":
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/test_tests.js")
|
||||||
|
default:
|
||||||
|
http.NotFound(w, r)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func serveTester(w http.ResponseWriter, r *http.Request) {
|
||||||
|
http.ServeFileFS(w, r, content, "ui/test.html")
|
||||||
|
}
|
||||||
|
|
||||||
|
func uiRoutes(mux *http.ServeMux) {
|
||||||
|
mux.HandleFunc("GET /{$}", serveWebUI)
|
||||||
|
mux.HandleFunc("GET /favicon.ico", serveStaticContent)
|
||||||
|
mux.HandleFunc("GET /static/", serveStaticContent)
|
||||||
|
mux.HandleFunc("GET /test.html", serveTester)
|
||||||
|
}
|
||||||
35
cmd/pkgserver/ui/index.html
Normal file
35
cmd/pkgserver/ui/index.html
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<link rel="stylesheet" href="static/style.css">
|
||||||
|
<title>Hakurei PkgServer</title>
|
||||||
|
<script src="static/index.js"></script>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Hakurei PkgServer</h1>
|
||||||
|
|
||||||
|
<table id="pkg-list">
|
||||||
|
<tr><td>Loading...</td></tr>
|
||||||
|
</table>
|
||||||
|
<p>Showing entries <span id="entry-counter"></span>.</p>
|
||||||
|
<span class="bottom-nav"><a href="javascript:prevPage()">« Previous</a> <span id="page-number">1</span> <a href="javascript:nextPage()">Next »</a></span>
|
||||||
|
<span><label for="count">Entries per page: </label><select name="count" id="count">
|
||||||
|
<option value="10">10</option>
|
||||||
|
<option value="20">20</option>
|
||||||
|
<option value="30">30</option>
|
||||||
|
<option value="50">50</option>
|
||||||
|
</select></span>
|
||||||
|
<span><label for="sort">Sort by: </label><select name="sort" id="sort">
|
||||||
|
<option value="0">Definition (ascending)</option>
|
||||||
|
<option value="1">Definition (descending)</option>
|
||||||
|
<option value="2">Name (ascending)</option>
|
||||||
|
<option value="3">Name (descending)</option>
|
||||||
|
<option value="4">Size (ascending)</option>
|
||||||
|
<option value="5">Size (descending)</option>
|
||||||
|
</select></span>
|
||||||
|
</body>
|
||||||
|
<footer>
|
||||||
|
<p>©<a href="https://hakurei.app/">Hakurei</a> (<span id="hakurei-version">unknown</span>). Licensed under the MIT license.</p>
|
||||||
|
</footer>
|
||||||
|
</html>
|
||||||
0
cmd/pkgserver/ui/static/_common.scss
Normal file
0
cmd/pkgserver/ui/static/_common.scss
Normal file
6
cmd/pkgserver/ui/static/dark.scss
Normal file
6
cmd/pkgserver/ui/static/dark.scss
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
@use 'common';
|
||||||
|
|
||||||
|
html {
|
||||||
|
background-color: #2c2c2c;
|
||||||
|
color: ghostwhite;
|
||||||
|
}
|
||||||
BIN
cmd/pkgserver/ui/static/favicon.ico
Normal file
BIN
cmd/pkgserver/ui/static/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
155
cmd/pkgserver/ui/static/index.ts
Normal file
155
cmd/pkgserver/ui/static/index.ts
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
class PackageIndexEntry {
|
||||||
|
name: string
|
||||||
|
size: number | null
|
||||||
|
description: string | null
|
||||||
|
website: string | null
|
||||||
|
version: string | null
|
||||||
|
report: boolean
|
||||||
|
}
|
||||||
|
function toHTML(entry: PackageIndexEntry): HTMLTableRowElement {
|
||||||
|
let v = entry.version != null ? `<span>${escapeHtml(entry.version)}</span>` : ""
|
||||||
|
let s = entry.size != null ? `<p>Size: ${toByteSizeString(entry.size)} (${entry.size})</p>` : ""
|
||||||
|
let d = entry.description != null ? `<p>${escapeHtml(entry.description)}</p>` : ""
|
||||||
|
let w = entry.website != null ? `<a href="${encodeURI(entry.website)}">Website</a>` : ""
|
||||||
|
let r = entry.report ? `Log (<a href=\"${encodeURI('/api/v1/status/' + entry.name)}\">View</a> | <a href=\"${encodeURI('/status/' + entry.name)}\">Download</a>)` : ""
|
||||||
|
let row = <HTMLTableRowElement>(document.createElement('tr'))
|
||||||
|
row.innerHTML = `<td>
|
||||||
|
<h2>${escapeHtml(entry.name)} ${v}</h2>
|
||||||
|
${d}
|
||||||
|
${s}
|
||||||
|
${w}
|
||||||
|
${r}
|
||||||
|
</td>`
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
|
||||||
|
function toByteSizeString(bytes: number): string {
|
||||||
|
if(bytes == null || bytes < 1024) return `${bytes}B`
|
||||||
|
if(bytes < Math.pow(1024, 2)) return `${(bytes/1024).toFixed(2)}kiB`
|
||||||
|
if(bytes < Math.pow(1024, 3)) return `${(bytes/Math.pow(1024, 2)).toFixed(2)}MiB`
|
||||||
|
if(bytes < Math.pow(1024, 4)) return `${(bytes/Math.pow(1024, 3)).toFixed(2)}GiB`
|
||||||
|
if(bytes < Math.pow(1024, 5)) return `${(bytes/Math.pow(1024, 4)).toFixed(2)}TiB`
|
||||||
|
return "not only is it big, it's large"
|
||||||
|
}
|
||||||
|
|
||||||
|
const API_VERSION = 1
|
||||||
|
const ENDPOINT = `/api/v${API_VERSION}`
|
||||||
|
class InfoPayload {
|
||||||
|
count: number
|
||||||
|
hakurei_version: string
|
||||||
|
}
|
||||||
|
|
||||||
|
async function infoRequest(): Promise<InfoPayload> {
|
||||||
|
const res = await fetch(`${ENDPOINT}/info`)
|
||||||
|
const payload = await res.json()
|
||||||
|
return payload as InfoPayload
|
||||||
|
}
|
||||||
|
class GetPayload {
|
||||||
|
values: PackageIndexEntry[]
|
||||||
|
}
|
||||||
|
|
||||||
|
enum SortOrders {
|
||||||
|
DeclarationAscending,
|
||||||
|
DeclarationDescending,
|
||||||
|
NameAscending,
|
||||||
|
NameDescending
|
||||||
|
}
|
||||||
|
async function getRequest(limit: number, index: number, sort: SortOrders): Promise<GetPayload> {
|
||||||
|
const res = await fetch(`${ENDPOINT}/get?limit=${limit}&index=${index}&sort=${sort.valueOf()}`)
|
||||||
|
const payload = await res.json()
|
||||||
|
return payload as GetPayload
|
||||||
|
}
|
||||||
|
class State {
|
||||||
|
entriesPerPage: number = 10
|
||||||
|
entryIndex: number = 0
|
||||||
|
maxEntries: number = 0
|
||||||
|
sort: SortOrders = SortOrders.DeclarationAscending
|
||||||
|
|
||||||
|
getEntriesPerPage(): number {
|
||||||
|
return this.entriesPerPage
|
||||||
|
}
|
||||||
|
setEntriesPerPage(entriesPerPage: number) {
|
||||||
|
this.entriesPerPage = entriesPerPage
|
||||||
|
this.setEntryIndex(Math.floor(this.getEntryIndex() / entriesPerPage) * entriesPerPage)
|
||||||
|
}
|
||||||
|
getEntryIndex(): number {
|
||||||
|
return this.entryIndex
|
||||||
|
}
|
||||||
|
setEntryIndex(entryIndex: number) {
|
||||||
|
this.entryIndex = entryIndex
|
||||||
|
this.updatePage()
|
||||||
|
this.updateRange()
|
||||||
|
this.updateListings()
|
||||||
|
}
|
||||||
|
getMaxEntries(): number {
|
||||||
|
return this.maxEntries
|
||||||
|
}
|
||||||
|
setMaxEntries(max: number) {
|
||||||
|
this.maxEntries = max
|
||||||
|
}
|
||||||
|
getSortOrder(): SortOrders {
|
||||||
|
return this.sort
|
||||||
|
}
|
||||||
|
setSortOrder(sortOrder: SortOrders) {
|
||||||
|
this.sort = sortOrder
|
||||||
|
this.setEntryIndex(0)
|
||||||
|
}
|
||||||
|
updatePage() {
|
||||||
|
let page = Math.ceil(((this.getEntryIndex() + this.getEntriesPerPage()) - 1) / this.getEntriesPerPage())
|
||||||
|
document.getElementById("page-number").innerText = String(page)
|
||||||
|
}
|
||||||
|
updateRange() {
|
||||||
|
let max = Math.min(this.getEntryIndex() + this.getEntriesPerPage(), this.getMaxEntries())
|
||||||
|
document.getElementById("entry-counter").innerText = `${this.getEntryIndex() + 1}-${max} of ${this.getMaxEntries()}`
|
||||||
|
}
|
||||||
|
updateListings() {
|
||||||
|
getRequest(this.getEntriesPerPage(), this.getEntryIndex(), this.getSortOrder())
|
||||||
|
.then(res => {
|
||||||
|
let table = document.getElementById("pkg-list")
|
||||||
|
table.innerHTML = ''
|
||||||
|
res.values.forEach((row) => {
|
||||||
|
table.appendChild(toHTML(row))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
let STATE: State
|
||||||
|
|
||||||
|
function prevPage() {
|
||||||
|
let index = STATE.getEntryIndex()
|
||||||
|
STATE.setEntryIndex(Math.max(0, index - STATE.getEntriesPerPage()))
|
||||||
|
}
|
||||||
|
function nextPage() {
|
||||||
|
let index = STATE.getEntryIndex()
|
||||||
|
STATE.setEntryIndex(Math.min((Math.ceil(STATE.getMaxEntries() / STATE.getEntriesPerPage()) * STATE.getEntriesPerPage()) - STATE.getEntriesPerPage(), index + STATE.getEntriesPerPage()))
|
||||||
|
}
|
||||||
|
|
||||||
|
function escapeHtml(str: string): string {
|
||||||
|
if(str === undefined) return ""
|
||||||
|
return str
|
||||||
|
.replace(/&/g, '&')
|
||||||
|
.replace(/</g, '<')
|
||||||
|
.replace(/>/g, '>')
|
||||||
|
.replace(/"/g, '"')
|
||||||
|
.replace(/'/g, ''')
|
||||||
|
}
|
||||||
|
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
STATE = new State()
|
||||||
|
infoRequest()
|
||||||
|
.then(res => {
|
||||||
|
STATE.setMaxEntries(res.count)
|
||||||
|
document.getElementById("hakurei-version").innerText = res.hakurei_version
|
||||||
|
STATE.updateRange()
|
||||||
|
STATE.updateListings()
|
||||||
|
})
|
||||||
|
|
||||||
|
document.getElementById("count").addEventListener("change", (event) => {
|
||||||
|
STATE.setEntriesPerPage(parseInt((event.target as HTMLSelectElement).value))
|
||||||
|
})
|
||||||
|
document.getElementById("sort").addEventListener("change", (event) => {
|
||||||
|
STATE.setSortOrder(parseInt((event.target as HTMLSelectElement).value))
|
||||||
|
})
|
||||||
|
})
|
||||||
6
cmd/pkgserver/ui/static/light.scss
Normal file
6
cmd/pkgserver/ui/static/light.scss
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
@use 'common';
|
||||||
|
|
||||||
|
html {
|
||||||
|
background-color: #d3d3d3;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
4
cmd/pkgserver/ui/static/run_tests.ts
Normal file
4
cmd/pkgserver/ui/static/run_tests.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
import "./test_tests.js";
|
||||||
|
import { run, StreamReporter } from "./test.js";
|
||||||
|
run(new StreamReporter({ writeln: console.log }));
|
||||||
27
cmd/pkgserver/ui/static/test.scss
Normal file
27
cmd/pkgserver/ui/static/test.scss
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
.root {
|
||||||
|
margin: 1rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
details.test-node {
|
||||||
|
margin-left: 1rem;
|
||||||
|
padding: 0.2rem 0.5rem;
|
||||||
|
border-left: 2px dashed black;
|
||||||
|
> summary {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
&.failure > summary::marker {
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p.test-desc {
|
||||||
|
margin: 0 0 0 1rem;
|
||||||
|
padding: 2px 0;
|
||||||
|
> pre {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.italic {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
250
cmd/pkgserver/ui/static/test.ts
Normal file
250
cmd/pkgserver/ui/static/test.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
// =============================================================================
|
||||||
|
// DSL
|
||||||
|
|
||||||
|
type TestTree = { name: string } & (TestGroup | Test);
|
||||||
|
type TestGroup = { children: TestTree[] };
|
||||||
|
type Test = { test: (TestController) => void };
|
||||||
|
|
||||||
|
let TESTS: ({ name: string } & TestGroup)[] = [];
|
||||||
|
|
||||||
|
export function suite(name: string, children: TestTree[]) {
|
||||||
|
checkDuplicates(name, children)
|
||||||
|
TESTS.push({ name, children });
|
||||||
|
}
|
||||||
|
|
||||||
|
export function context(name: string, children: TestTree[]): TestTree {
|
||||||
|
checkDuplicates(name, children)
|
||||||
|
return { name, children };
|
||||||
|
}
|
||||||
|
export const group = context;
|
||||||
|
|
||||||
|
export function test(name: string, test: (TestController) => void): TestTree {
|
||||||
|
return { name, test };
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkDuplicates(parent: string, names: { name: string }[]) {
|
||||||
|
let seen = new Set<string>();
|
||||||
|
for (const { name } of names) {
|
||||||
|
if (seen.has(name)) {
|
||||||
|
throw new RangeError(`duplicate name '${name}' in '${parent}'`);
|
||||||
|
}
|
||||||
|
seen.add(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class FailNowSentinel {}
|
||||||
|
|
||||||
|
class TestController {
|
||||||
|
#logBuf: string[];
|
||||||
|
#failed: boolean;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.#logBuf = [];
|
||||||
|
this.#failed = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
fail() {
|
||||||
|
this.#failed = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
failed(): boolean {
|
||||||
|
return this.#failed;
|
||||||
|
}
|
||||||
|
|
||||||
|
failNow(): never {
|
||||||
|
this.fail();
|
||||||
|
throw new FailNowSentinel();
|
||||||
|
}
|
||||||
|
|
||||||
|
log(message: string) {
|
||||||
|
this.#logBuf.push(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
error(message: string) {
|
||||||
|
this.log(message);
|
||||||
|
this.fail();
|
||||||
|
}
|
||||||
|
|
||||||
|
fatal(message: string): never {
|
||||||
|
this.log(message);
|
||||||
|
this.failNow();
|
||||||
|
}
|
||||||
|
|
||||||
|
getLog(): string {
|
||||||
|
return this.#logBuf.join("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Execution
|
||||||
|
|
||||||
|
export interface TestResult {
|
||||||
|
success: boolean;
|
||||||
|
output: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function runTests(reporter: Reporter, parents: string[], tree: TestTree) {
|
||||||
|
const path = [...parents, tree.name];
|
||||||
|
if ("children" in tree) {
|
||||||
|
for (const c of tree.children) runTests(reporter, path, c);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let controller = new TestController();
|
||||||
|
let excStr: string;
|
||||||
|
try {
|
||||||
|
tree.test(controller);
|
||||||
|
} catch (e) {
|
||||||
|
if (!(e instanceof FailNowSentinel)) {
|
||||||
|
controller.fail();
|
||||||
|
excStr = extractExceptionString(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const log = controller.getLog();
|
||||||
|
const output = (log && excStr) ? `${log}\n${excStr}` : `${log}${excStr ?? ''}`;
|
||||||
|
reporter.update(path, { success: !controller.failed(), output });
|
||||||
|
}
|
||||||
|
|
||||||
|
export function run(reporter: Reporter) {
|
||||||
|
for (const suite of TESTS) {
|
||||||
|
for (const c of suite.children) runTests(reporter, [suite.name], c);
|
||||||
|
}
|
||||||
|
reporter.finalize();
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractExceptionString(e: any): string {
|
||||||
|
// String() instead of .toString() as null and undefined don't have
|
||||||
|
// properties.
|
||||||
|
const s = String(e);
|
||||||
|
if (!(e instanceof Error && "stack" in e)) return s;
|
||||||
|
// v8 (Chromium, NodeJS) include the error message, while
|
||||||
|
// Firefox and WebKit do not.
|
||||||
|
if (e.stack.includes(s)) return e.stack;
|
||||||
|
return `${s}\n${e.stack}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Reporting
|
||||||
|
|
||||||
|
export interface Reporter {
|
||||||
|
update(path: string[], result: TestResult): void;
|
||||||
|
finalize(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Stream {
|
||||||
|
writeln(s: string): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class StreamReporter implements Reporter {
|
||||||
|
stream: Stream;
|
||||||
|
verbose: boolean;
|
||||||
|
#failures: ({ path: string[] } & TestResult)[];
|
||||||
|
counts: { successes: number, failures: number };
|
||||||
|
|
||||||
|
constructor(stream: Stream, verbose: boolean = false) {
|
||||||
|
this.stream = stream;
|
||||||
|
this.verbose = verbose;
|
||||||
|
this.#failures = [];
|
||||||
|
this.counts = { successes: 0, failures: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
update(path: string[], result: TestResult) {
|
||||||
|
if (path.length === 0) throw new RangeError("path is empty");
|
||||||
|
const pathStr = path.join(" ❯ ");
|
||||||
|
if (result.success) {
|
||||||
|
this.counts.successes++;
|
||||||
|
if (this.verbose) this.stream.writeln(`✅️ ${pathStr}`);
|
||||||
|
} else {
|
||||||
|
this.counts.failures++;
|
||||||
|
this.stream.writeln(`⚠️ ${pathStr}`);
|
||||||
|
this.#failures.push({ path, ...result });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
finalize() {
|
||||||
|
// Transform [{ path: ["a", "b", "c"] }, { path: ["a", "b", "d"] }]
|
||||||
|
// into { "a ❯ b": ["c", "d"] }.
|
||||||
|
let pathMap = new Map<string, ({ name: string } & TestResult)[]>();
|
||||||
|
for (const f of this.#failures) {
|
||||||
|
const key = f.path.slice(0, -1).join(" ❯ ");
|
||||||
|
if (!pathMap.has(key)) pathMap.set(key, []);
|
||||||
|
pathMap.get(key).push({ name: f.path.at(-1), ...f });
|
||||||
|
}
|
||||||
|
|
||||||
|
this.stream.writeln("");
|
||||||
|
this.stream.writeln("FAILURES");
|
||||||
|
this.stream.writeln("========");
|
||||||
|
|
||||||
|
for (const [path, tests] of pathMap) {
|
||||||
|
if (tests.length === 1) {
|
||||||
|
this.#writeOutput(tests[0], path ? `${path} ❯ ` : "", false);
|
||||||
|
} else {
|
||||||
|
this.stream.writeln(path);
|
||||||
|
for (const t of tests) this.#writeOutput(t, " - ", true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.stream.writeln("");
|
||||||
|
const { successes, failures } = this.counts;
|
||||||
|
this.stream.writeln(`${successes} succeeded, ${failures} failed`);
|
||||||
|
}
|
||||||
|
|
||||||
|
#writeOutput(test: { name: string } & TestResult, prefix: string, nested: boolean) {
|
||||||
|
let output = "";
|
||||||
|
if (test.output) {
|
||||||
|
const lines = test.output.split("\n");
|
||||||
|
if (lines.length <= 1) {
|
||||||
|
output = `: ${test.output}`;
|
||||||
|
} else {
|
||||||
|
const padding = nested ? " " : " ";
|
||||||
|
output = ":\n" + lines.map((line) => padding + line).join("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.stream.writeln(`${prefix}${test.name}${output}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DOMReporter implements Reporter {
|
||||||
|
update(path: string[], result: TestResult) {
|
||||||
|
if (path.length === 0) throw new RangeError("path is empty");
|
||||||
|
const counter = document.getElementById(result.success ? "successes" : "failures");
|
||||||
|
counter.innerText = (Number(counter.innerText) + 1).toString();
|
||||||
|
let parent = document.getElementById("root");
|
||||||
|
for (const node of path) {
|
||||||
|
let child = null;
|
||||||
|
outer: for (const d of parent.children) {
|
||||||
|
for (const s of d.children) {
|
||||||
|
if (!(s instanceof HTMLElement)) continue;
|
||||||
|
if (s.tagName !== "SUMMARY" || s.innerText !== node) continue;
|
||||||
|
child = d;
|
||||||
|
break outer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (child === null) {
|
||||||
|
child = document.createElement("details");
|
||||||
|
child.className = "test-node";
|
||||||
|
const summary = document.createElement("summary");
|
||||||
|
summary.appendChild(document.createTextNode(node));
|
||||||
|
child.appendChild(summary);
|
||||||
|
parent.appendChild(child);
|
||||||
|
}
|
||||||
|
if (!result.success) {
|
||||||
|
child.open = true;
|
||||||
|
child.classList.add("failure");
|
||||||
|
}
|
||||||
|
parent = child;
|
||||||
|
}
|
||||||
|
const p = document.createElement("p");
|
||||||
|
p.classList.add("test-desc");
|
||||||
|
if (result.output) {
|
||||||
|
const pre = document.createElement("pre");
|
||||||
|
pre.appendChild(document.createTextNode(result.output));
|
||||||
|
p.appendChild(pre);
|
||||||
|
} else {
|
||||||
|
p.classList.add("italic");
|
||||||
|
p.appendChild(document.createTextNode("No output."));
|
||||||
|
}
|
||||||
|
parent.appendChild(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
finalize() {}
|
||||||
|
}
|
||||||
40
cmd/pkgserver/ui/static/test_tests.ts
Normal file
40
cmd/pkgserver/ui/static/test_tests.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import { context, group, suite, test } from "./test.js";
|
||||||
|
|
||||||
|
suite("dog", [
|
||||||
|
group("tail", [
|
||||||
|
test("wags when happy", (t) => {
|
||||||
|
if (0 / 0 !== Infinity / Infinity) {
|
||||||
|
t.fatal("undefined must not be defined");
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
test("idle when down", (t) => {
|
||||||
|
t.log("test test");
|
||||||
|
t.error("dog whining noises go here");
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
test("likes headpats", (t) => {
|
||||||
|
if (2 !== 2) {
|
||||||
|
t.error("IEEE 754 violated: 2 is NaN");
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
context("near cat", [
|
||||||
|
test("is ecstatic", (t) => {
|
||||||
|
if (("b" + "a" + + "a" + "a").toLowerCase() == "banana") {
|
||||||
|
t.error("🍌🍌🍌");
|
||||||
|
t.error("🍌🍌🍌");
|
||||||
|
t.error("🍌🍌🍌");
|
||||||
|
t.failNow();
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
test("playfully bites cats' tails", (t) => {
|
||||||
|
t.log("arf!");
|
||||||
|
throw new Error("nom");
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
suite("cat", [
|
||||||
|
test("likes headpats", (t) => {
|
||||||
|
t.log("meow");
|
||||||
|
}),
|
||||||
|
]);
|
||||||
5
cmd/pkgserver/ui/static/tsconfig.json
Normal file
5
cmd/pkgserver/ui/static/tsconfig.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2024"
|
||||||
|
}
|
||||||
|
}
|
||||||
28
cmd/pkgserver/ui/test.html
Normal file
28
cmd/pkgserver/ui/test.html
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<link rel="stylesheet" href="static/style.css">
|
||||||
|
<link rel="stylesheet" href="static/test.css">
|
||||||
|
<title>PkgServer Tests</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>PkgServer Tests</h1>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div id="counters">
|
||||||
|
<span id="successes">0</span> succeeded, <span id="failures">0</span> failed.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="root">
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script type="module" src="./static/test_tests.js"></script>
|
||||||
|
<script type="module">
|
||||||
|
import { DOMReporter, run } from "./static/test.js";
|
||||||
|
run(new DOMReporter());
|
||||||
|
</script>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
9
cmd/pkgserver/ui_full.go
Normal file
9
cmd/pkgserver/ui_full.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
//go:build frontend
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
//go:generate sh -c "sass ui/static/dark.scss ui/static/dark.css && sass ui/static/light.scss ui/static/light.css && sass ui/static/test.scss ui/static/test.css && tsc -p ui/static"
|
||||||
|
//go:embed ui/*
|
||||||
|
var content embed.FS
|
||||||
7
cmd/pkgserver/ui_stub.go
Normal file
7
cmd/pkgserver/ui_stub.go
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
//go:build !frontend
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import "testing/fstest"
|
||||||
|
|
||||||
|
var content fstest.MapFS
|
||||||
@@ -101,6 +101,10 @@ func init() {
|
|||||||
Description: "Commands for Manipulating POSIX Access Control Lists",
|
Description: "Commands for Manipulating POSIX Access Control Lists",
|
||||||
Website: "https://savannah.nongnu.org/projects/acl/",
|
Website: "https://savannah.nongnu.org/projects/acl/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Attr,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 16,
|
ID: 16,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
"sync"
|
||||||
@@ -167,6 +168,36 @@ const (
|
|||||||
PresetEnd
|
PresetEnd
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// P represents multiple [PArtifact] and is stable through JSON.
|
||||||
|
type P []PArtifact
|
||||||
|
|
||||||
|
// MarshalJSON represents [PArtifact] by their [Metadata.Name].
|
||||||
|
func (s P) MarshalJSON() ([]byte, error) {
|
||||||
|
names := make([]string, len(s))
|
||||||
|
for i, p := range s {
|
||||||
|
names[i] = GetMetadata(p).Name
|
||||||
|
}
|
||||||
|
return json.Marshal(names)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalJSON resolves the value created by MarshalJSON back to [P].
|
||||||
|
func (s *P) UnmarshalJSON(data []byte) error {
|
||||||
|
var names []string
|
||||||
|
if err := json.Unmarshal(data, &names); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
*s = make(P, len(names))
|
||||||
|
for i, name := range names {
|
||||||
|
if p, ok := ResolveName(name); !ok {
|
||||||
|
return fmt.Errorf("unknown artifact %q", name)
|
||||||
|
} else {
|
||||||
|
(*s)[i] = p
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// Metadata is stage-agnostic information of a [PArtifact] not directly
|
// Metadata is stage-agnostic information of a [PArtifact] not directly
|
||||||
// representable in the resulting [pkg.Artifact].
|
// representable in the resulting [pkg.Artifact].
|
||||||
type Metadata struct {
|
type Metadata struct {
|
||||||
@@ -179,6 +210,9 @@ type Metadata struct {
|
|||||||
// Project home page.
|
// Project home page.
|
||||||
Website string `json:"website,omitempty"`
|
Website string `json:"website,omitempty"`
|
||||||
|
|
||||||
|
// Runtime dependencies.
|
||||||
|
Dependencies P `json:"dependencies"`
|
||||||
|
|
||||||
// Project identifier on [Anitya].
|
// Project identifier on [Anitya].
|
||||||
//
|
//
|
||||||
// [Anitya]: https://release-monitoring.org/
|
// [Anitya]: https://release-monitoring.org/
|
||||||
@@ -256,9 +290,10 @@ var (
|
|||||||
artifactsM [PresetEnd]Metadata
|
artifactsM [PresetEnd]Metadata
|
||||||
|
|
||||||
// artifacts stores the result of Metadata.f.
|
// artifacts stores the result of Metadata.f.
|
||||||
artifacts [_toolchainEnd][len(artifactsM)]pkg.Artifact
|
artifacts [_toolchainEnd][len(artifactsM)]struct {
|
||||||
// versions stores the version of [PArtifact].
|
a pkg.Artifact
|
||||||
versions [_toolchainEnd][len(artifactsM)]string
|
v string
|
||||||
|
}
|
||||||
// artifactsOnce is for lazy initialisation of artifacts.
|
// artifactsOnce is for lazy initialisation of artifacts.
|
||||||
artifactsOnce [_toolchainEnd][len(artifactsM)]sync.Once
|
artifactsOnce [_toolchainEnd][len(artifactsM)]sync.Once
|
||||||
)
|
)
|
||||||
@@ -266,20 +301,23 @@ var (
|
|||||||
// GetMetadata returns [Metadata] of a [PArtifact].
|
// GetMetadata returns [Metadata] of a [PArtifact].
|
||||||
func GetMetadata(p PArtifact) *Metadata { return &artifactsM[p] }
|
func GetMetadata(p PArtifact) *Metadata { return &artifactsM[p] }
|
||||||
|
|
||||||
|
// construct constructs a [pkg.Artifact] corresponding to a [PArtifact] once.
|
||||||
|
func (t Toolchain) construct(p PArtifact) {
|
||||||
|
artifactsOnce[t][p].Do(func() {
|
||||||
|
artifacts[t][p].a, artifacts[t][p].v = artifactsM[p].f(t)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// Load returns the resulting [pkg.Artifact] of [PArtifact].
|
// Load returns the resulting [pkg.Artifact] of [PArtifact].
|
||||||
func (t Toolchain) Load(p PArtifact) pkg.Artifact {
|
func (t Toolchain) Load(p PArtifact) pkg.Artifact {
|
||||||
artifactsOnce[t][p].Do(func() {
|
t.construct(p)
|
||||||
artifacts[t][p], versions[t][p] = artifactsM[p].f(t)
|
return artifacts[t][p].a
|
||||||
})
|
|
||||||
return artifacts[t][p]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Version returns the version string of [PArtifact].
|
// Version returns the version string of [PArtifact].
|
||||||
func (t Toolchain) Version(p PArtifact) string {
|
func (t Toolchain) Version(p PArtifact) string {
|
||||||
artifactsOnce[t][p].Do(func() {
|
t.construct(p)
|
||||||
artifacts[t][p], versions[t][p] = artifactsM[p].f(t)
|
return artifacts[t][p].v
|
||||||
})
|
|
||||||
return versions[t][p]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ResolveName returns a [PArtifact] by name.
|
// ResolveName returns a [PArtifact] by name.
|
||||||
|
|||||||
@@ -4,24 +4,48 @@ import "hakurei.app/internal/pkg"
|
|||||||
|
|
||||||
func (t Toolchain) newCurl() (pkg.Artifact, string) {
|
func (t Toolchain) newCurl() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "8.18.0"
|
version = "8.19.0"
|
||||||
checksum = "YpOolP_sx1DIrCEJ3elgVAu0wTLDS-EZMZFvOP0eha7FaLueZUlEpuMwDzJNyi7i"
|
checksum = "YHuVLVVp8q_Y7-JWpID5ReNjq2Zk6t7ArHB6ngQXilp_R5l3cubdxu3UKo-xDByv"
|
||||||
)
|
)
|
||||||
return t.NewPackage("curl", version, pkg.NewHTTPGetTar(
|
return t.NewPackage("curl", version, pkg.NewHTTPGetTar(
|
||||||
nil, "https://curl.se/download/curl-"+version+".tar.bz2",
|
nil, "https://curl.se/download/curl-"+version+".tar.bz2",
|
||||||
mustDecode(checksum),
|
mustDecode(checksum),
|
||||||
pkg.TarBzip2,
|
pkg.TarBzip2,
|
||||||
), nil, &MakeHelper{
|
), &PackageAttr{
|
||||||
|
Patches: [][2]string{
|
||||||
|
{"test459-misplaced-line-break", `diff --git a/tests/data/test459 b/tests/data/test459
|
||||||
|
index 7a2e1db7b3..cc716aa65a 100644
|
||||||
|
--- a/tests/data/test459
|
||||||
|
+++ b/tests/data/test459
|
||||||
|
@@ -54,8 +54,8 @@ Content-Type: application/x-www-form-urlencoded
|
||||||
|
arg
|
||||||
|
</protocol>
|
||||||
|
<stderr mode="text">
|
||||||
|
-Warning: %LOGDIR/config:1 Option 'data' uses argument with unquoted whitespace.%SP
|
||||||
|
-Warning: This may cause side-effects. Consider double quotes.
|
||||||
|
+Warning: %LOGDIR/config:1 Option 'data' uses argument with unquoted%SP
|
||||||
|
+Warning: whitespace. This may cause side-effects. Consider double quotes.
|
||||||
|
</stderr>
|
||||||
|
</verify>
|
||||||
|
</testcase>
|
||||||
|
`},
|
||||||
|
},
|
||||||
|
}, &MakeHelper{
|
||||||
Configure: [][2]string{
|
Configure: [][2]string{
|
||||||
{"with-openssl"},
|
{"with-openssl"},
|
||||||
{"with-ca-bundle", "/system/etc/ssl/certs/ca-bundle.crt"},
|
{"with-ca-bundle", "/system/etc/ssl/certs/ca-bundle.crt"},
|
||||||
|
|
||||||
|
{"disable-smb"},
|
||||||
},
|
},
|
||||||
Check: []string{
|
Check: []string{
|
||||||
"TFLAGS=-j256",
|
`TFLAGS="-j$(expr "$(nproc)" '*' 2)"`,
|
||||||
"check",
|
"test-nonflaky",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Perl,
|
Perl,
|
||||||
|
Python,
|
||||||
|
PkgConfig,
|
||||||
|
Diffutils,
|
||||||
|
|
||||||
Libpsl,
|
Libpsl,
|
||||||
OpenSSL,
|
OpenSSL,
|
||||||
@@ -35,6 +59,11 @@ func init() {
|
|||||||
Description: "command line tool and library for transferring data with URLs",
|
Description: "command line tool and library for transferring data with URLs",
|
||||||
Website: "https://curl.se/",
|
Website: "https://curl.se/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Libpsl,
|
||||||
|
OpenSSL,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 381,
|
ID: 381,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,6 +46,14 @@ func init() {
|
|||||||
Description: "utilities and libraries to handle ELF files and DWARF data",
|
Description: "utilities and libraries to handle ELF files and DWARF data",
|
||||||
Website: "https://sourceware.org/elfutils/",
|
Website: "https://sourceware.org/elfutils/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Zlib,
|
||||||
|
Bzip2,
|
||||||
|
Zstd,
|
||||||
|
MuslFts,
|
||||||
|
MuslObstack,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 5679,
|
ID: 5679,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -36,9 +36,6 @@ index f135ad9..85c784c 100644
|
|||||||
// makes assumptions about /etc/passwd
|
// makes assumptions about /etc/passwd
|
||||||
SkipCheck: true,
|
SkipCheck: true,
|
||||||
},
|
},
|
||||||
M4,
|
|
||||||
Perl,
|
|
||||||
Autoconf,
|
|
||||||
Automake,
|
Automake,
|
||||||
Libtool,
|
Libtool,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
|
|||||||
@@ -24,10 +24,6 @@ func (t Toolchain) newFuse() (pkg.Artifact, string) {
|
|||||||
// this project uses pytest
|
// this project uses pytest
|
||||||
SkipTest: true,
|
SkipTest: true,
|
||||||
},
|
},
|
||||||
PythonIniConfig,
|
|
||||||
PythonPackaging,
|
|
||||||
PythonPluggy,
|
|
||||||
PythonPygments,
|
|
||||||
PythonPyTest,
|
PythonPyTest,
|
||||||
|
|
||||||
KernelHeaders,
|
KernelHeaders,
|
||||||
|
|||||||
@@ -52,16 +52,18 @@ disable_test t2200-add-update
|
|||||||
`GIT_PROVE_OPTS="--jobs 32 --failures"`,
|
`GIT_PROVE_OPTS="--jobs 32 --failures"`,
|
||||||
"prove",
|
"prove",
|
||||||
},
|
},
|
||||||
|
Install: `make \
|
||||||
|
"-j$(nproc)" \
|
||||||
|
DESTDIR=/work \
|
||||||
|
NO_INSTALL_HARDLINKS=1 \
|
||||||
|
install`,
|
||||||
},
|
},
|
||||||
Perl,
|
|
||||||
Diffutils,
|
Diffutils,
|
||||||
M4,
|
|
||||||
Autoconf,
|
Autoconf,
|
||||||
Gettext,
|
Gettext,
|
||||||
|
|
||||||
Zlib,
|
Zlib,
|
||||||
Curl,
|
Curl,
|
||||||
OpenSSL,
|
|
||||||
Libexpat,
|
Libexpat,
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
@@ -73,6 +75,12 @@ func init() {
|
|||||||
Description: "distributed version control system",
|
Description: "distributed version control system",
|
||||||
Website: "https://www.git-scm.com/",
|
Website: "https://www.git-scm.com/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Zlib,
|
||||||
|
Curl,
|
||||||
|
Libexpat,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 5350,
|
ID: 5350,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -82,14 +90,10 @@ func (t Toolchain) NewViaGit(
|
|||||||
name, url, rev string,
|
name, url, rev string,
|
||||||
checksum pkg.Checksum,
|
checksum pkg.Checksum,
|
||||||
) pkg.Artifact {
|
) pkg.Artifact {
|
||||||
return t.New(name+"-"+rev, 0, []pkg.Artifact{
|
return t.New(name+"-"+rev, 0, t.AppendPresets(nil,
|
||||||
t.Load(NSSCACert),
|
NSSCACert,
|
||||||
t.Load(OpenSSL),
|
Git,
|
||||||
t.Load(Libpsl),
|
), &checksum, nil, `
|
||||||
t.Load(Curl),
|
|
||||||
t.Load(Libexpat),
|
|
||||||
t.Load(Git),
|
|
||||||
}, &checksum, nil, `
|
|
||||||
git \
|
git \
|
||||||
-c advice.detachedHead=false \
|
-c advice.detachedHead=false \
|
||||||
clone \
|
clone \
|
||||||
|
|||||||
@@ -117,6 +117,11 @@ func init() {
|
|||||||
Description: "M4 macros to produce self-contained configure script",
|
Description: "M4 macros to produce self-contained configure script",
|
||||||
Website: "https://www.gnu.org/software/autoconf/",
|
Website: "https://www.gnu.org/software/autoconf/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
M4,
|
||||||
|
Perl,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 141,
|
ID: 141,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -143,8 +148,6 @@ test_disable '#!/bin/sh' t/distname.sh
|
|||||||
test_disable '#!/bin/sh' t/pr9.sh
|
test_disable '#!/bin/sh' t/pr9.sh
|
||||||
`,
|
`,
|
||||||
}, (*MakeHelper)(nil),
|
}, (*MakeHelper)(nil),
|
||||||
M4,
|
|
||||||
Perl,
|
|
||||||
Grep,
|
Grep,
|
||||||
Gzip,
|
Gzip,
|
||||||
Autoconf,
|
Autoconf,
|
||||||
@@ -159,6 +162,10 @@ func init() {
|
|||||||
Description: "a tool for automatically generating Makefile.in files",
|
Description: "a tool for automatically generating Makefile.in files",
|
||||||
Website: "https://www.gnu.org/software/automake/",
|
Website: "https://www.gnu.org/software/automake/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Autoconf,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 144,
|
ID: 144,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -524,6 +531,11 @@ func init() {
|
|||||||
Description: "the GNU square-wheel-reinvension of man pages",
|
Description: "the GNU square-wheel-reinvension of man pages",
|
||||||
Website: "https://www.gnu.org/software/texinfo/",
|
Website: "https://www.gnu.org/software/texinfo/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Perl,
|
||||||
|
Gawk,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 4958,
|
ID: 4958,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -660,7 +672,6 @@ func (t Toolchain) newBC() (pkg.Artifact, string) {
|
|||||||
Writable: true,
|
Writable: true,
|
||||||
Chmod: true,
|
Chmod: true,
|
||||||
}, (*MakeHelper)(nil),
|
}, (*MakeHelper)(nil),
|
||||||
Perl,
|
|
||||||
Texinfo,
|
Texinfo,
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
@@ -762,6 +773,10 @@ func init() {
|
|||||||
Description: "a shell tool for executing jobs in parallel using one or more computers",
|
Description: "a shell tool for executing jobs in parallel using one or more computers",
|
||||||
Website: "https://www.gnu.org/software/parallel/",
|
Website: "https://www.gnu.org/software/parallel/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Perl,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 5448,
|
ID: 5448,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -839,6 +854,10 @@ func init() {
|
|||||||
Description: "a C library for multiple-precision floating-point computations",
|
Description: "a C library for multiple-precision floating-point computations",
|
||||||
Website: "https://www.mpfr.org/",
|
Website: "https://www.mpfr.org/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
GMP,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 2019,
|
ID: 2019,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -854,7 +873,6 @@ func (t Toolchain) newMPC() (pkg.Artifact, string) {
|
|||||||
mustDecode(checksum),
|
mustDecode(checksum),
|
||||||
pkg.TarGzip,
|
pkg.TarGzip,
|
||||||
), nil, (*MakeHelper)(nil),
|
), nil, (*MakeHelper)(nil),
|
||||||
GMP,
|
|
||||||
MPFR,
|
MPFR,
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
@@ -866,6 +884,10 @@ func init() {
|
|||||||
Description: "a C library for the arithmetic of complex numbers",
|
Description: "a C library for the arithmetic of complex numbers",
|
||||||
Website: "https://www.multiprecision.org/",
|
Website: "https://www.multiprecision.org/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
MPFR,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 1667,
|
ID: 1667,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1063,10 +1085,7 @@ ln -s system/lib /work/
|
|||||||
},
|
},
|
||||||
Binutils,
|
Binutils,
|
||||||
|
|
||||||
GMP,
|
|
||||||
MPFR,
|
|
||||||
MPC,
|
MPC,
|
||||||
|
|
||||||
Zlib,
|
Zlib,
|
||||||
Libucontext,
|
Libucontext,
|
||||||
KernelHeaders,
|
KernelHeaders,
|
||||||
@@ -1080,6 +1099,14 @@ func init() {
|
|||||||
Description: "The GNU Compiler Collection",
|
Description: "The GNU Compiler Collection",
|
||||||
Website: "https://www.gnu.org/software/gcc/",
|
Website: "https://www.gnu.org/software/gcc/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Binutils,
|
||||||
|
|
||||||
|
MPC,
|
||||||
|
Zlib,
|
||||||
|
Libucontext,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 6502,
|
ID: 6502,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,22 +74,8 @@ func (t Toolchain) newGoLatest() (pkg.Artifact, string) {
|
|||||||
bootstrapExtra = append(bootstrapExtra, t.newGoBootstrap())
|
bootstrapExtra = append(bootstrapExtra, t.newGoBootstrap())
|
||||||
|
|
||||||
case "arm64":
|
case "arm64":
|
||||||
bootstrapEnv = append(bootstrapEnv,
|
bootstrapEnv = append(bootstrapEnv, "GOROOT_BOOTSTRAP=/system")
|
||||||
"GOROOT_BOOTSTRAP=/system",
|
bootstrapExtra = t.AppendPresets(bootstrapExtra, gcc)
|
||||||
)
|
|
||||||
bootstrapExtra = append(bootstrapExtra,
|
|
||||||
t.Load(Binutils),
|
|
||||||
|
|
||||||
t.Load(GMP),
|
|
||||||
t.Load(MPFR),
|
|
||||||
t.Load(MPC),
|
|
||||||
|
|
||||||
t.Load(Zlib),
|
|
||||||
t.Load(Libucontext),
|
|
||||||
|
|
||||||
t.Load(gcc),
|
|
||||||
)
|
|
||||||
|
|
||||||
finalEnv = append(finalEnv, "CGO_ENABLED=0")
|
finalEnv = append(finalEnv, "CGO_ENABLED=0")
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newGLib() (pkg.Artifact, string) {
|
func (t Toolchain) newGLib() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "2.87.3"
|
version = "2.87.5"
|
||||||
checksum = "iKSLpzZZVfmAZZmqfO1y6uHdlIks4hzPWrqeUCp4ZeQjrPFA3aAa4OmrBYMNS-Si"
|
checksum = "L5jurSfyCTlcSTfx-1RBHbNZPL0HnNQakmFXidgAV1JFu0lbytowCCBAALTp-WGc"
|
||||||
)
|
)
|
||||||
return t.NewPackage("glib", version, pkg.NewHTTPGet(
|
return t.NewPackage("glib", version, pkg.NewHTTPGet(
|
||||||
nil, "https://download.gnome.org/sources/glib/"+
|
nil, "https://download.gnome.org/sources/glib/"+
|
||||||
@@ -56,6 +56,12 @@ func init() {
|
|||||||
Description: "the GNU library of miscellaneous stuff",
|
Description: "the GNU library of miscellaneous stuff",
|
||||||
Website: "https://developer.gnome.org/glib/",
|
Website: "https://developer.gnome.org/glib/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
PCRE2,
|
||||||
|
Libffi,
|
||||||
|
Zlib,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 10024,
|
ID: 10024,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,29 +15,23 @@ echo
|
|||||||
hostname = ""
|
hostname = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
return t.New("hakurei"+suffix+"-"+hakureiVersion, 0, []pkg.Artifact{
|
return t.New("hakurei"+suffix+"-"+hakureiVersion, 0, t.AppendPresets(nil,
|
||||||
t.Load(Go),
|
Go,
|
||||||
|
PkgConfig,
|
||||||
|
|
||||||
t.Load(Gzip),
|
// dist tarball
|
||||||
t.Load(PkgConfig),
|
Gzip,
|
||||||
|
|
||||||
t.Load(KernelHeaders),
|
// statically linked
|
||||||
t.Load(Libseccomp),
|
Libseccomp,
|
||||||
t.Load(ACL),
|
ACL,
|
||||||
t.Load(Attr),
|
Fuse,
|
||||||
t.Load(Fuse),
|
XCB,
|
||||||
|
Wayland,
|
||||||
|
WaylandProtocols,
|
||||||
|
|
||||||
t.Load(Xproto),
|
KernelHeaders,
|
||||||
t.Load(LibXau),
|
), nil, []string{
|
||||||
t.Load(XCBProto),
|
|
||||||
t.Load(XCB),
|
|
||||||
|
|
||||||
t.Load(Libffi),
|
|
||||||
t.Load(Libexpat),
|
|
||||||
t.Load(Libxml2),
|
|
||||||
t.Load(Wayland),
|
|
||||||
t.Load(WaylandProtocols),
|
|
||||||
}, nil, []string{
|
|
||||||
"CGO_ENABLED=1",
|
"CGO_ENABLED=1",
|
||||||
"GOCACHE=/tmp/gocache",
|
"GOCACHE=/tmp/gocache",
|
||||||
"CC=clang -O3 -Werror",
|
"CC=clang -O3 -Werror",
|
||||||
|
|||||||
@@ -1246,13 +1246,9 @@ rm -v /work/system/lib/modules/` + kernelVersion + `/build
|
|||||||
Python,
|
Python,
|
||||||
|
|
||||||
XZ,
|
XZ,
|
||||||
Zlib,
|
|
||||||
Gzip,
|
Gzip,
|
||||||
Bzip2,
|
|
||||||
Zstd,
|
|
||||||
Kmod,
|
Kmod,
|
||||||
Elfutils,
|
Elfutils,
|
||||||
OpenSSL,
|
|
||||||
UtilLinux,
|
UtilLinux,
|
||||||
KernelHeaders,
|
KernelHeaders,
|
||||||
), kernelVersion
|
), kernelVersion
|
||||||
@@ -1286,8 +1282,8 @@ func init() {
|
|||||||
|
|
||||||
func (t Toolchain) newFirmware() (pkg.Artifact, string) {
|
func (t Toolchain) newFirmware() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "20260221"
|
version = "20260309"
|
||||||
checksum = "vTENPW5rZ6yLVq7YKDLHkCVgKXvwUWigEx7T4LcxoKeBVYIyf1_sEExeV4mo-e46"
|
checksum = "M1az8BxSiOEH3LA11Trc5VAlakwAHhP7-_LKWg6k-SVIzU3xclMDO4Tiujw1gQrC"
|
||||||
)
|
)
|
||||||
return t.NewPackage("firmware", version, pkg.NewHTTPGetTar(
|
return t.NewPackage("firmware", version, pkg.NewHTTPGetTar(
|
||||||
nil, "https://gitlab.com/kernel-firmware/linux-firmware/-/"+
|
nil, "https://gitlab.com/kernel-firmware/linux-firmware/-/"+
|
||||||
@@ -1315,9 +1311,7 @@ func (t Toolchain) newFirmware() (pkg.Artifact, string) {
|
|||||||
SkipCheck: true, // requires pre-commit
|
SkipCheck: true, // requires pre-commit
|
||||||
Install: `make "-j$(nproc)" DESTDIR=/work/system dedup`,
|
Install: `make "-j$(nproc)" DESTDIR=/work/system dedup`,
|
||||||
},
|
},
|
||||||
Perl,
|
|
||||||
Parallel,
|
Parallel,
|
||||||
Nettle,
|
|
||||||
Rdfind,
|
Rdfind,
|
||||||
Zstd,
|
Zstd,
|
||||||
Findutils,
|
Findutils,
|
||||||
|
|||||||
@@ -39,6 +39,12 @@ func init() {
|
|||||||
Description: "a set of tools to handle common tasks with Linux kernel modules",
|
Description: "a set of tools to handle common tasks with Linux kernel modules",
|
||||||
Website: "https://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git",
|
Website: "https://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Zlib,
|
||||||
|
Zstd,
|
||||||
|
OpenSSL,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 1517,
|
ID: 1517,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,6 +31,10 @@ func init() {
|
|||||||
Description: "an open source code library for the dynamic creation of images",
|
Description: "an open source code library for the dynamic creation of images",
|
||||||
Website: "https://libgd.github.io/",
|
Website: "https://libgd.github.io/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Zlib,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 880,
|
ID: 880,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ func (t Toolchain) newLibxslt() (pkg.Artifact, string) {
|
|||||||
SkipCheck: true,
|
SkipCheck: true,
|
||||||
},
|
},
|
||||||
XZ,
|
XZ,
|
||||||
Zlib,
|
|
||||||
Python,
|
Python,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
|
|
||||||
@@ -38,6 +37,10 @@ func init() {
|
|||||||
Description: "an XSLT processor based on libxml2",
|
Description: "an XSLT processor based on libxml2",
|
||||||
Website: "https://gitlab.gnome.org/GNOME/libxslt/",
|
Website: "https://gitlab.gnome.org/GNOME/libxslt/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Libxml2,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 13301,
|
ID: 13301,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -75,12 +75,12 @@ func llvmFlagName(flag int) string {
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
llvmVersionMajor = "22"
|
llvmVersionMajor = "22"
|
||||||
llvmVersion = llvmVersionMajor + ".1.0"
|
llvmVersion = llvmVersionMajor + ".1.1"
|
||||||
)
|
)
|
||||||
|
|
||||||
// newLLVMVariant returns a [pkg.Artifact] containing a LLVM variant.
|
// newLLVMVariant returns a [pkg.Artifact] containing a LLVM variant.
|
||||||
func (t Toolchain) newLLVMVariant(variant string, attr *llvmAttr) pkg.Artifact {
|
func (t Toolchain) newLLVMVariant(variant string, attr *llvmAttr) pkg.Artifact {
|
||||||
const checksum = "-_Tu5Lt8xkWoxm2VDVV7crh0WqZQbbblN3fYamMdPTDSy_54FAkD2ii7afSymPVV"
|
const checksum = "bQvV6D8AZvQykg7-uQb_saTbVavnSo1ykNJ3g57F5iE-evU3HuOYtcRnVIXTK76e"
|
||||||
|
|
||||||
if attr == nil {
|
if attr == nil {
|
||||||
panic("LLVM attr must be non-nil")
|
panic("LLVM attr must be non-nil")
|
||||||
@@ -189,8 +189,6 @@ ln -s ld.lld /work/system/bin/ld
|
|||||||
Append: cmakeAppend,
|
Append: cmakeAppend,
|
||||||
Script: script + attr.script,
|
Script: script + attr.script,
|
||||||
},
|
},
|
||||||
Zlib,
|
|
||||||
Libffi,
|
|
||||||
Python,
|
Python,
|
||||||
Perl,
|
Perl,
|
||||||
Diffutils,
|
Diffutils,
|
||||||
|
|||||||
@@ -38,6 +38,13 @@ func init() {
|
|||||||
Description: "an open source build system",
|
Description: "an open source build system",
|
||||||
Website: "https://mesonbuild.com/",
|
Website: "https://mesonbuild.com/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Python,
|
||||||
|
PkgConfig,
|
||||||
|
CMake,
|
||||||
|
Ninja,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 6472,
|
ID: 6472,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -66,15 +73,7 @@ func (*MesonHelper) name(name, version string) string {
|
|||||||
|
|
||||||
// extra returns hardcoded meson runtime dependencies.
|
// extra returns hardcoded meson runtime dependencies.
|
||||||
func (*MesonHelper) extra(int) []PArtifact {
|
func (*MesonHelper) extra(int) []PArtifact {
|
||||||
return []PArtifact{
|
return []PArtifact{Meson}
|
||||||
Zlib,
|
|
||||||
Python,
|
|
||||||
Meson,
|
|
||||||
Ninja,
|
|
||||||
|
|
||||||
PkgConfig,
|
|
||||||
CMake,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// wantsChmod returns false.
|
// wantsChmod returns false.
|
||||||
|
|||||||
@@ -19,9 +19,6 @@ func (t Toolchain) newMuslFts() (pkg.Artifact, string) {
|
|||||||
}, &MakeHelper{
|
}, &MakeHelper{
|
||||||
Generate: "./bootstrap.sh",
|
Generate: "./bootstrap.sh",
|
||||||
},
|
},
|
||||||
M4,
|
|
||||||
Perl,
|
|
||||||
Autoconf,
|
|
||||||
Automake,
|
Automake,
|
||||||
Libtool,
|
Libtool,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
|
|||||||
@@ -19,9 +19,6 @@ func (t Toolchain) newMuslObstack() (pkg.Artifact, string) {
|
|||||||
}, &MakeHelper{
|
}, &MakeHelper{
|
||||||
Generate: "./bootstrap.sh",
|
Generate: "./bootstrap.sh",
|
||||||
},
|
},
|
||||||
M4,
|
|
||||||
Perl,
|
|
||||||
Autoconf,
|
|
||||||
Automake,
|
Automake,
|
||||||
Libtool,
|
Libtool,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
|
|||||||
@@ -26,6 +26,10 @@ func init() {
|
|||||||
Description: "a low-level cryptographic library",
|
Description: "a low-level cryptographic library",
|
||||||
Website: "https://www.lysator.liu.se/~nisse/nettle/",
|
Website: "https://www.lysator.liu.se/~nisse/nettle/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
GMP,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 2073,
|
ID: 2073,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -75,6 +75,10 @@ func init() {
|
|||||||
Description: "Network Security Services",
|
Description: "Network Security Services",
|
||||||
Website: "https://firefox-source-docs.mozilla.org/security/nss/index.html",
|
Website: "https://firefox-source-docs.mozilla.org/security/nss/index.html",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Zlib,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 2503,
|
ID: 2503,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -92,14 +96,12 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newNSSCACert() (pkg.Artifact, string) {
|
func (t Toolchain) newNSSCACert() (pkg.Artifact, string) {
|
||||||
return t.New("nss-cacert", 0, []pkg.Artifact{
|
return t.New("nss-cacert", 0, t.AppendPresets(nil,
|
||||||
t.Load(Zlib),
|
Bash,
|
||||||
t.Load(Bash),
|
|
||||||
t.Load(Python),
|
|
||||||
|
|
||||||
t.Load(NSS),
|
NSS,
|
||||||
t.Load(buildcatrust),
|
buildcatrust,
|
||||||
}, nil, nil, `
|
), nil, nil, `
|
||||||
mkdir -p /work/system/etc/ssl/{certs/unbundled,certs/hashed,trust-source}
|
mkdir -p /work/system/etc/ssl/{certs/unbundled,certs/hashed,trust-source}
|
||||||
buildcatrust \
|
buildcatrust \
|
||||||
--certdata_input /system/nss/certdata.txt \
|
--certdata_input /system/nss/certdata.txt \
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newPerl() (pkg.Artifact, string) {
|
func (t Toolchain) newPerl() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "5.42.0"
|
version = "5.42.1"
|
||||||
checksum = "2KR7Jbpk-ZVn1a30LQRwbgUvg2AXlPQZfzrqCr31qD5-yEsTwVQ_W76eZH-EdxM9"
|
checksum = "FsJVq5CZFA7nZklfUl1eC6z2ECEu02XaB1pqfHSKtRLZWpnaBjlB55QOhjKpjkQ2"
|
||||||
)
|
)
|
||||||
return t.NewPackage("perl", version, pkg.NewHTTPGetTar(
|
return t.NewPackage("perl", version, pkg.NewHTTPGetTar(
|
||||||
nil, "https://www.cpan.org/src/5.0/perl-"+version+".tar.gz",
|
nil, "https://www.cpan.org/src/5.0/perl-"+version+".tar.gz",
|
||||||
@@ -68,14 +68,14 @@ func (t Toolchain) newViaPerlModuleBuild(
|
|||||||
name, version string,
|
name, version string,
|
||||||
source pkg.Artifact,
|
source pkg.Artifact,
|
||||||
patches [][2]string,
|
patches [][2]string,
|
||||||
extra ...pkg.Artifact,
|
extra ...PArtifact,
|
||||||
) pkg.Artifact {
|
) pkg.Artifact {
|
||||||
if name == "" || version == "" {
|
if name == "" || version == "" {
|
||||||
panic("names must be non-empty")
|
panic("names must be non-empty")
|
||||||
}
|
}
|
||||||
return t.New("perl-"+name, 0, slices.Concat(extra, []pkg.Artifact{
|
return t.New("perl-"+name, 0, t.AppendPresets(nil,
|
||||||
t.Load(Perl),
|
slices.Concat(P{Perl}, extra)...,
|
||||||
}), nil, nil, `
|
), nil, nil, `
|
||||||
cd /usr/src/`+name+`
|
cd /usr/src/`+name+`
|
||||||
perl Build.PL --prefix=/system
|
perl Build.PL --prefix=/system
|
||||||
./Build build
|
./Build build
|
||||||
@@ -105,6 +105,10 @@ func init() {
|
|||||||
Name: "perl-Module::Build",
|
Name: "perl-Module::Build",
|
||||||
Description: "build and install Perl modules",
|
Description: "build and install Perl modules",
|
||||||
Website: "https://metacpan.org/release/Module-Build",
|
Website: "https://metacpan.org/release/Module-Build",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Perl,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -267,6 +271,10 @@ func init() {
|
|||||||
Name: "perl-Text::WrapI18N",
|
Name: "perl-Text::WrapI18N",
|
||||||
Description: "line wrapping module",
|
Description: "line wrapping module",
|
||||||
Website: "https://metacpan.org/release/Text-WrapI18N",
|
Website: "https://metacpan.org/release/Text-WrapI18N",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
PerlTextCharWidth,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -313,6 +321,10 @@ func init() {
|
|||||||
Name: "perl-Unicode::GCString",
|
Name: "perl-Unicode::GCString",
|
||||||
Description: "String as Sequence of UAX #29 Grapheme Clusters",
|
Description: "String as Sequence of UAX #29 Grapheme Clusters",
|
||||||
Website: "https://metacpan.org/release/Unicode-LineBreak",
|
Website: "https://metacpan.org/release/Unicode-LineBreak",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
PerlMIMECharset,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -18,9 +18,6 @@ func (t Toolchain) newProcps() (pkg.Artifact, string) {
|
|||||||
{"without-ncurses"},
|
{"without-ncurses"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
M4,
|
|
||||||
Perl,
|
|
||||||
Autoconf,
|
|
||||||
Automake,
|
Automake,
|
||||||
Gettext,
|
Gettext,
|
||||||
Libtool,
|
Libtool,
|
||||||
|
|||||||
@@ -53,11 +53,11 @@ func (t Toolchain) newPython() (pkg.Artifact, string) {
|
|||||||
Check: []string{"test"},
|
Check: []string{"test"},
|
||||||
},
|
},
|
||||||
Zlib,
|
Zlib,
|
||||||
|
Bzip2,
|
||||||
Libffi,
|
Libffi,
|
||||||
|
OpenSSL,
|
||||||
|
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
OpenSSL,
|
|
||||||
Bzip2,
|
|
||||||
XZ,
|
XZ,
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
@@ -69,6 +69,13 @@ func init() {
|
|||||||
Description: "the Python programming language interpreter",
|
Description: "the Python programming language interpreter",
|
||||||
Website: "https://www.python.org/",
|
Website: "https://www.python.org/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Zlib,
|
||||||
|
Bzip2,
|
||||||
|
Libffi,
|
||||||
|
OpenSSL,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 13254,
|
ID: 13254,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -81,15 +88,9 @@ func newViaPip(
|
|||||||
wname := name + "-" + version + "-" + interpreter + "-" + abi + "-" + platform + ".whl"
|
wname := name + "-" + version + "-" + interpreter + "-" + abi + "-" + platform + ".whl"
|
||||||
return Metadata{
|
return Metadata{
|
||||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
f: func(t Toolchain) (pkg.Artifact, string) {
|
||||||
extraRes := make([]pkg.Artifact, len(extra))
|
return t.New(name+"-"+version, 0, t.AppendPresets(nil,
|
||||||
for i, p := range extra {
|
slices.Concat(P{Python}, extra)...,
|
||||||
extraRes[i] = t.Load(p)
|
), nil, nil, `
|
||||||
}
|
|
||||||
|
|
||||||
return t.New(name+"-"+version, 0, slices.Concat([]pkg.Artifact{
|
|
||||||
t.Load(Zlib),
|
|
||||||
t.Load(Python),
|
|
||||||
}, extraRes), nil, nil, `
|
|
||||||
pip3 install \
|
pip3 install \
|
||||||
--no-index \
|
--no-index \
|
||||||
--prefix=/system \
|
--prefix=/system \
|
||||||
@@ -104,18 +105,19 @@ pip3 install \
|
|||||||
Name: "python-" + name,
|
Name: "python-" + name,
|
||||||
Description: description,
|
Description: description,
|
||||||
Website: "https://pypi.org/project/" + name + "/",
|
Website: "https://pypi.org/project/" + name + "/",
|
||||||
|
|
||||||
|
Dependencies: slices.Concat(P{Python}, extra),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t Toolchain) newSetuptools() (pkg.Artifact, string) {
|
func (t Toolchain) newSetuptools() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "82.0.0"
|
version = "82.0.1"
|
||||||
checksum = "K9f8Yi7Gg95zjmQsE1LLw9UBb8NglI6EY6pQpdD6DM0Pmc_Td5w2qs1SMngTI6Jp"
|
checksum = "nznP46Tj539yqswtOrIM4nQgwLA1h-ApKX7z7ghazROCpyF5swtQGwsZoI93wkhc"
|
||||||
)
|
)
|
||||||
return t.New("setuptools-"+version, 0, []pkg.Artifact{
|
return t.New("setuptools-"+version, 0, t.AppendPresets(nil,
|
||||||
t.Load(Zlib),
|
Python,
|
||||||
t.Load(Python),
|
), nil, nil, `
|
||||||
}, nil, nil, `
|
|
||||||
pip3 install \
|
pip3 install \
|
||||||
--no-index \
|
--no-index \
|
||||||
--prefix=/system \
|
--prefix=/system \
|
||||||
@@ -132,10 +134,14 @@ func init() {
|
|||||||
artifactsM[Setuptools] = Metadata{
|
artifactsM[Setuptools] = Metadata{
|
||||||
f: Toolchain.newSetuptools,
|
f: Toolchain.newSetuptools,
|
||||||
|
|
||||||
Name: "setuptools",
|
Name: "python-setuptools",
|
||||||
Description: "the autotools of the Python ecosystem",
|
Description: "the autotools of the Python ecosystem",
|
||||||
Website: "https://pypi.org/project/setuptools/",
|
Website: "https://pypi.org/project/setuptools/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Python,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 4021,
|
ID: 4021,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -272,8 +278,6 @@ func init() {
|
|||||||
"https://files.pythonhosted.org/packages/"+
|
"https://files.pythonhosted.org/packages/"+
|
||||||
"78/55/896b06bf93a49bec0f4ae2a6f1ed12bd05c8860744ac3a70eda041064e4d/",
|
"78/55/896b06bf93a49bec0f4ae2a6f1ed12bd05c8860744ac3a70eda041064e4d/",
|
||||||
PythonDistlib,
|
PythonDistlib,
|
||||||
PythonFilelock,
|
|
||||||
PythonPlatformdirs,
|
|
||||||
PythonDiscovery,
|
PythonDiscovery,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -288,10 +292,6 @@ func init() {
|
|||||||
PythonIdentify,
|
PythonIdentify,
|
||||||
PythonNodeenv,
|
PythonNodeenv,
|
||||||
PythonPyYAML,
|
PythonPyYAML,
|
||||||
PythonDistlib,
|
|
||||||
PythonFilelock,
|
|
||||||
PythonPlatformdirs,
|
|
||||||
PythonDiscovery,
|
|
||||||
PythonVirtualenv,
|
PythonVirtualenv,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,21 +74,16 @@ EOF
|
|||||||
Bash,
|
Bash,
|
||||||
Python,
|
Python,
|
||||||
Ninja,
|
Ninja,
|
||||||
Bzip2,
|
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
Diffutils,
|
Diffutils,
|
||||||
|
|
||||||
OpenSSL,
|
OpenSSL,
|
||||||
Bzip2,
|
|
||||||
XZ,
|
XZ,
|
||||||
|
|
||||||
Flex,
|
Flex,
|
||||||
Bison,
|
Bison,
|
||||||
M4,
|
M4,
|
||||||
|
|
||||||
PCRE2,
|
|
||||||
Libffi,
|
|
||||||
Zlib,
|
|
||||||
GLib,
|
GLib,
|
||||||
Zstd,
|
Zstd,
|
||||||
DTC,
|
DTC,
|
||||||
@@ -103,6 +98,11 @@ func init() {
|
|||||||
Description: "a generic and open source machine emulator and virtualizer",
|
Description: "a generic and open source machine emulator and virtualizer",
|
||||||
Website: "https://www.qemu.org/",
|
Website: "https://www.qemu.org/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
GLib,
|
||||||
|
Zstd,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 13607,
|
ID: 13607,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,6 +28,10 @@ func init() {
|
|||||||
Description: "a program that finds duplicate files",
|
Description: "a program that finds duplicate files",
|
||||||
Website: "https://rdfind.pauldreik.se/",
|
Website: "https://rdfind.pauldreik.se/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Nettle,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 231641,
|
ID: 231641,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"slices"
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"hakurei.app/container/fhs"
|
"hakurei.app/container/fhs"
|
||||||
"hakurei.app/internal/pkg"
|
"hakurei.app/internal/pkg"
|
||||||
@@ -19,6 +20,9 @@ const (
|
|||||||
|
|
||||||
// kindBusyboxBin is the kind of [pkg.Artifact] of busyboxBin.
|
// kindBusyboxBin is the kind of [pkg.Artifact] of busyboxBin.
|
||||||
kindBusyboxBin
|
kindBusyboxBin
|
||||||
|
|
||||||
|
// kindCollection is the kind of [Collect]. It never cures successfully.
|
||||||
|
kindCollection
|
||||||
)
|
)
|
||||||
|
|
||||||
// mustDecode is like [pkg.MustDecode], but replaces the zero value and prints
|
// mustDecode is like [pkg.MustDecode], but replaces the zero value and prints
|
||||||
@@ -454,6 +458,48 @@ type PackageAttr struct {
|
|||||||
Flag int
|
Flag int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// pa holds whether a [PArtifact] is present.
|
||||||
|
type pa = [PresetEnd]bool
|
||||||
|
|
||||||
|
// paPool holds addresses of pa.
|
||||||
|
var paPool = sync.Pool{New: func() any { return new(pa) }}
|
||||||
|
|
||||||
|
// paGet returns the address of a new pa.
|
||||||
|
func paGet() *pa { return paPool.Get().(*pa) }
|
||||||
|
|
||||||
|
// paPut returns a pa to paPool.
|
||||||
|
func paPut(pv *pa) { *pv = pa{}; paPool.Put(pv) }
|
||||||
|
|
||||||
|
// appendPreset recursively appends a [PArtifact] and its runtime dependencies.
|
||||||
|
func (t Toolchain) appendPreset(
|
||||||
|
a []pkg.Artifact,
|
||||||
|
pv *pa, p PArtifact,
|
||||||
|
) []pkg.Artifact {
|
||||||
|
if pv[p] {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
pv[p] = true
|
||||||
|
|
||||||
|
for _, d := range GetMetadata(p).Dependencies {
|
||||||
|
a = t.appendPreset(a, pv, d)
|
||||||
|
}
|
||||||
|
return append(a, t.Load(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendPresets recursively appends multiple [PArtifact] and their runtime
|
||||||
|
// dependencies.
|
||||||
|
func (t Toolchain) AppendPresets(
|
||||||
|
a []pkg.Artifact,
|
||||||
|
presets ...PArtifact,
|
||||||
|
) []pkg.Artifact {
|
||||||
|
pv := paGet()
|
||||||
|
for _, p := range presets {
|
||||||
|
a = t.appendPreset(a, pv, p)
|
||||||
|
}
|
||||||
|
paPut(pv)
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
|
||||||
// NewPackage constructs a [pkg.Artifact] via a build system helper.
|
// NewPackage constructs a [pkg.Artifact] via a build system helper.
|
||||||
func (t Toolchain) NewPackage(
|
func (t Toolchain) NewPackage(
|
||||||
name, version string,
|
name, version string,
|
||||||
@@ -486,12 +532,14 @@ func (t Toolchain) NewPackage(
|
|||||||
extraRes := make([]pkg.Artifact, 0, dc)
|
extraRes := make([]pkg.Artifact, 0, dc)
|
||||||
extraRes = append(extraRes, attr.NonStage0...)
|
extraRes = append(extraRes, attr.NonStage0...)
|
||||||
if !t.isStage0() {
|
if !t.isStage0() {
|
||||||
|
pv := paGet()
|
||||||
for _, p := range helper.extra(attr.Flag) {
|
for _, p := range helper.extra(attr.Flag) {
|
||||||
extraRes = append(extraRes, t.Load(p))
|
extraRes = t.appendPreset(extraRes, pv, p)
|
||||||
}
|
}
|
||||||
for _, p := range extra {
|
for _, p := range extra {
|
||||||
extraRes = append(extraRes, t.Load(p))
|
extraRes = t.appendPreset(extraRes, pv, p)
|
||||||
}
|
}
|
||||||
|
paPut(pv)
|
||||||
}
|
}
|
||||||
|
|
||||||
var scriptEarly string
|
var scriptEarly string
|
||||||
@@ -543,3 +591,29 @@ cd '/usr/src/` + name + `/'
|
|||||||
})...,
|
})...,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Collected is returned by [Collect.Cure] to indicate a successful collection.
|
||||||
|
type Collected struct{}
|
||||||
|
|
||||||
|
// Error returns a constant string to satisfy error, but should never be seen
|
||||||
|
// by the user.
|
||||||
|
func (Collected) Error() string { return "artifacts successfully collected" }
|
||||||
|
|
||||||
|
// Collect implements [pkg.FloodArtifact] to concurrently cure multiple
|
||||||
|
// [pkg.Artifact]. It returns [Collected].
|
||||||
|
type Collect []pkg.Artifact
|
||||||
|
|
||||||
|
// Cure returns [Collected].
|
||||||
|
func (*Collect) Cure(*pkg.FContext) error { return Collected{} }
|
||||||
|
|
||||||
|
// Kind returns the hardcoded [pkg.Kind] value.
|
||||||
|
func (*Collect) Kind() pkg.Kind { return kindCollection }
|
||||||
|
|
||||||
|
// Params does not write anything, dependencies are already represented in the header.
|
||||||
|
func (*Collect) Params(*pkg.IContext) {}
|
||||||
|
|
||||||
|
// Dependencies returns [Collect] as is.
|
||||||
|
func (c *Collect) Dependencies() []pkg.Artifact { return *c }
|
||||||
|
|
||||||
|
// IsExclusive returns false: Cure is a noop.
|
||||||
|
func (*Collect) IsExclusive() bool { return false }
|
||||||
|
|||||||
@@ -48,6 +48,12 @@ func init() {
|
|||||||
Description: "tools to create and extract Squashfs filesystems",
|
Description: "tools to create and extract Squashfs filesystems",
|
||||||
Website: "https://github.com/plougher/squashfs-tools",
|
Website: "https://github.com/plougher/squashfs-tools",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Zstd,
|
||||||
|
Gzip,
|
||||||
|
Zlib,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 4879,
|
ID: 4879,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,13 +8,13 @@ import (
|
|||||||
|
|
||||||
func (t Toolchain) newTamaGo() (pkg.Artifact, string) {
|
func (t Toolchain) newTamaGo() (pkg.Artifact, string) {
|
||||||
const (
|
const (
|
||||||
version = "1.26.0"
|
version = "1.26.1"
|
||||||
checksum = "5XkfbpTpSdPJfwtTfUegfdu4LUy8nuZ7sCondiRIxTJI9eQONi8z_O_dq9yDkjw8"
|
checksum = "fimZnklQcYWGsTQU8KepLn-yCYaTfNdMI9DCg6NJVQv-3gOJnUEO9mqRCMAHnEXZ"
|
||||||
)
|
)
|
||||||
return t.New("tamago-go"+version, 0, []pkg.Artifact{
|
return t.New("tamago-go"+version, 0, t.AppendPresets(nil,
|
||||||
t.Load(Bash),
|
Bash,
|
||||||
t.Load(Go),
|
Go,
|
||||||
}, nil, []string{
|
), nil, []string{
|
||||||
"CC=cc",
|
"CC=cc",
|
||||||
"GOCACHE=/tmp/gocache",
|
"GOCACHE=/tmp/gocache",
|
||||||
}, `
|
}, `
|
||||||
|
|||||||
@@ -11,10 +11,10 @@ func (t Toolchain) newUnzip() (pkg.Artifact, string) {
|
|||||||
version = "6.0"
|
version = "6.0"
|
||||||
checksum = "fcqjB1IOVRNJ16K5gTGEDt3zCJDVBc7EDSra9w3H93stqkNwH1vaPQs_QGOpQZu1"
|
checksum = "fcqjB1IOVRNJ16K5gTGEDt3zCJDVBc7EDSra9w3H93stqkNwH1vaPQs_QGOpQZu1"
|
||||||
)
|
)
|
||||||
return t.New("unzip-"+version, 0, []pkg.Artifact{
|
return t.New("unzip-"+version, 0, t.AppendPresets(nil,
|
||||||
t.Load(Make),
|
Make,
|
||||||
t.Load(Coreutils),
|
Coreutils,
|
||||||
}, nil, nil, `
|
), nil, nil, `
|
||||||
cd /usr/src/unzip/
|
cd /usr/src/unzip/
|
||||||
unix/configure
|
unix/configure
|
||||||
make -f unix/Makefile generic1
|
make -f unix/Makefile generic1
|
||||||
|
|||||||
@@ -42,6 +42,12 @@ func init() {
|
|||||||
Description: "core Wayland window system code and protocol",
|
Description: "core Wayland window system code and protocol",
|
||||||
Website: "https://wayland.freedesktop.org/",
|
Website: "https://wayland.freedesktop.org/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Libffi,
|
||||||
|
Libexpat,
|
||||||
|
Libxml2,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 10061,
|
ID: 10061,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -112,9 +118,6 @@ GitLab
|
|||||||
},
|
},
|
||||||
}, (*MesonHelper)(nil),
|
}, (*MesonHelper)(nil),
|
||||||
Wayland,
|
Wayland,
|
||||||
Libffi,
|
|
||||||
Libexpat,
|
|
||||||
Libxml2,
|
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
func init() {
|
func init() {
|
||||||
|
|||||||
@@ -40,9 +40,6 @@ func (t Toolchain) newXproto() (pkg.Artifact, string) {
|
|||||||
// ancient configure script
|
// ancient configure script
|
||||||
Generate: "autoreconf -if",
|
Generate: "autoreconf -if",
|
||||||
},
|
},
|
||||||
M4,
|
|
||||||
Perl,
|
|
||||||
Autoconf,
|
|
||||||
Automake,
|
Automake,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
|
|
||||||
@@ -75,9 +72,6 @@ func (t Toolchain) newLibXau() (pkg.Artifact, string) {
|
|||||||
// ancient configure script
|
// ancient configure script
|
||||||
Generate: "autoreconf -if",
|
Generate: "autoreconf -if",
|
||||||
},
|
},
|
||||||
M4,
|
|
||||||
Perl,
|
|
||||||
Autoconf,
|
|
||||||
Automake,
|
Automake,
|
||||||
Libtool,
|
Libtool,
|
||||||
PkgConfig,
|
PkgConfig,
|
||||||
@@ -94,6 +88,10 @@ func init() {
|
|||||||
Description: "functions for handling Xauthority files and entries",
|
Description: "functions for handling Xauthority files and entries",
|
||||||
Website: "https://gitlab.freedesktop.org/xorg/lib/libxau",
|
Website: "https://gitlab.freedesktop.org/xorg/lib/libxau",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
Xproto,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 1765,
|
ID: 1765,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ func (t Toolchain) newXCB() (pkg.Artifact, string) {
|
|||||||
PkgConfig,
|
PkgConfig,
|
||||||
|
|
||||||
XCBProto,
|
XCBProto,
|
||||||
Xproto,
|
|
||||||
LibXau,
|
LibXau,
|
||||||
), version
|
), version
|
||||||
}
|
}
|
||||||
@@ -53,6 +52,11 @@ func init() {
|
|||||||
Description: "The X protocol C-language Binding",
|
Description: "The X protocol C-language Binding",
|
||||||
Website: "https://xcb.freedesktop.org/",
|
Website: "https://xcb.freedesktop.org/",
|
||||||
|
|
||||||
|
Dependencies: P{
|
||||||
|
XCBProto,
|
||||||
|
LibXau,
|
||||||
|
},
|
||||||
|
|
||||||
ID: 1767,
|
ID: 1767,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user