Compare commits
56 Commits
v0.3.6
...
aec0fcc3c7
| Author | SHA1 | Date | |
|---|---|---|---|
| aec0fcc3c7 | |||
| 671124fa3b | |||
| 06a74e69d7 | |||
|
0c0013c440
|
|||
| c1c9ca7c57 | |||
|
5e114f3932
|
|||
|
fb36c54025
|
|||
|
7011f8a580
|
|||
|
dac33d7720
|
|||
|
50649fdbf4
|
|||
|
91aa21d92d
|
|||
|
a1b515074e
|
|||
|
e130443cf4
|
|||
|
112c32fee2
|
|||
|
6d925b3d43
|
|||
|
2ec49a525f
|
|||
|
ce914abb57
|
|||
|
b03ad185de
|
|||
|
534cac83fb
|
|||
|
887edcbe48
|
|||
|
fa9bc70b39
|
|||
|
4a63fbbc2a
|
|||
| b104ad6e2d | |||
| 469bd1ee99 | |||
| 52a4e5b87d | |||
|
35d76c5d2b
|
|||
|
dfd3301a33
|
|||
|
a4ce41ea9a
|
|||
|
773e43a215
|
|||
|
f150e1fdd6
|
|||
|
dec7010c35
|
|||
|
69bd88282c
|
|||
|
ca2053d3ba
|
|||
|
8d0aa1127c
|
|||
|
48cdf8bf85
|
|||
|
7fb42ba49d
|
|||
|
19a2737148
|
|||
|
baf2def9cc
|
|||
|
242e042cb9
|
|||
|
6988c9c4db
|
|||
|
d6e0ed8c76
|
|||
|
53be3309c5
|
|||
|
644dd18a52
|
|||
|
27c6f976df
|
|||
|
279a973633
|
|||
|
9c1b522689
|
|||
|
5c8cd46c02
|
|||
|
2dba550a2b
|
|||
|
8c64812b34
|
|||
|
d1423d980d
|
|||
|
104da0f66a
|
|||
|
d996d9fbb7
|
|||
|
469f97ccc1
|
|||
|
af7a6180a1
|
|||
|
03b5c0e20a
|
|||
|
6a31fb4fa3
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -27,6 +27,10 @@ go.work.sum
|
||||
|
||||
# go generate
|
||||
/cmd/hakurei/LICENSE
|
||||
/cmd/pkgserver/.sass-cache
|
||||
/cmd/pkgserver/ui/static/*.js
|
||||
/cmd/pkgserver/ui/static/*.css*
|
||||
/cmd/pkgserver/ui/static/*.css.map
|
||||
/internal/pkg/testdata/testtool
|
||||
/internal/rosa/hakurei_current.tar.gz
|
||||
|
||||
|
||||
176
cmd/pkgserver/api.go
Normal file
176
cmd/pkgserver/api.go
Normal file
@@ -0,0 +1,176 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"hakurei.app/internal/info"
|
||||
"hakurei.app/internal/rosa"
|
||||
)
|
||||
|
||||
// for lazy initialisation of serveInfo
|
||||
var (
|
||||
infoPayload struct {
|
||||
// Current package count.
|
||||
Count int `json:"count"`
|
||||
// Hakurei version, set at link time.
|
||||
HakureiVersion string `json:"hakurei_version"`
|
||||
}
|
||||
infoPayloadOnce sync.Once
|
||||
)
|
||||
|
||||
// handleInfo writes constant system information.
|
||||
func handleInfo(w http.ResponseWriter, _ *http.Request) {
|
||||
infoPayloadOnce.Do(func() {
|
||||
infoPayload.Count = int(rosa.PresetUnexportedStart)
|
||||
infoPayload.HakureiVersion = info.Version()
|
||||
})
|
||||
// TODO(mae): cache entire response if no additional fields are planned
|
||||
writeAPIPayload(w, infoPayload)
|
||||
}
|
||||
|
||||
// newStatusHandler returns a [http.HandlerFunc] that offers status files for
|
||||
// viewing or download, if available.
|
||||
func (index *packageIndex) newStatusHandler(disposition bool) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
m, ok := index.names[path.Base(r.URL.Path)]
|
||||
if !ok || !m.HasReport {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
contentType := "text/plain; charset=utf-8"
|
||||
if disposition {
|
||||
contentType = "application/octet-stream"
|
||||
|
||||
// quoting like this is unsound, but okay, because metadata is hardcoded
|
||||
contentDisposition := `attachment; filename="`
|
||||
contentDisposition += m.Name + "-"
|
||||
if m.Version != "" {
|
||||
contentDisposition += m.Version + "-"
|
||||
}
|
||||
contentDisposition += m.ids + `.log"`
|
||||
w.Header().Set("Content-Disposition", contentDisposition)
|
||||
}
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
if err := func() (err error) {
|
||||
defer index.handleAccess(&err)()
|
||||
_, err = w.Write(m.status)
|
||||
return
|
||||
}(); err != nil {
|
||||
log.Println(err)
|
||||
http.Error(
|
||||
w, "cannot deliver status, contact maintainers",
|
||||
http.StatusInternalServerError,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// handleGet writes a slice of metadata with specified order.
|
||||
func (index *packageIndex) handleGet(w http.ResponseWriter, r *http.Request) {
|
||||
q := r.URL.Query()
|
||||
limit, err := strconv.Atoi(q.Get("limit"))
|
||||
if err != nil || limit > 100 || limit < 1 {
|
||||
http.Error(
|
||||
w, "limit must be an integer between 1 and 100",
|
||||
http.StatusBadRequest,
|
||||
)
|
||||
return
|
||||
}
|
||||
i, err := strconv.Atoi(q.Get("index"))
|
||||
if err != nil || i >= len(index.sorts[0]) || i < 0 {
|
||||
http.Error(
|
||||
w, "index must be an integer between 0 and "+
|
||||
strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
||||
http.StatusBadRequest,
|
||||
)
|
||||
return
|
||||
}
|
||||
sort, err := strconv.Atoi(q.Get("sort"))
|
||||
if err != nil || sort >= len(index.sorts) || sort < 0 {
|
||||
http.Error(
|
||||
w, "sort must be an integer between 0 and "+
|
||||
strconv.Itoa(sortOrderEnd),
|
||||
http.StatusBadRequest,
|
||||
)
|
||||
return
|
||||
}
|
||||
values := index.sorts[sort][i:min(i+limit, len(index.sorts[sort]))]
|
||||
writeAPIPayload(w, &struct {
|
||||
Values []*metadata `json:"values"`
|
||||
}{values})
|
||||
}
|
||||
|
||||
func (index *packageIndex) handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||
q := r.URL.Query()
|
||||
limit, err := strconv.Atoi(q.Get("limit"))
|
||||
if err != nil || limit > 100 || limit < 1 {
|
||||
http.Error(
|
||||
w, "limit must be an integer between 1 and 100",
|
||||
http.StatusBadRequest,
|
||||
)
|
||||
return
|
||||
}
|
||||
i, err := strconv.Atoi(q.Get("index"))
|
||||
if err != nil || i >= len(index.sorts[0]) || i < 0 {
|
||||
http.Error(
|
||||
w, "index must be an integer between 0 and "+
|
||||
strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
||||
http.StatusBadRequest,
|
||||
)
|
||||
return
|
||||
}
|
||||
search, err := url.PathUnescape(q.Get("search"))
|
||||
if len(search) > 100 || err != nil {
|
||||
http.Error(
|
||||
w, "search must be a string between 0 and 100 characters long",
|
||||
http.StatusBadRequest,
|
||||
)
|
||||
return
|
||||
}
|
||||
desc := q.Get("desc") == "true"
|
||||
n, res, err := index.performSearchQuery(limit, i, search, desc)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
writeAPIPayload(w, &struct {
|
||||
Count int `json:"count"`
|
||||
Results []searchResult `json:"results"`
|
||||
}{n, res})
|
||||
}
|
||||
|
||||
// apiVersion is the name of the current API revision, as part of the pattern.
|
||||
const apiVersion = "v1"
|
||||
|
||||
// registerAPI registers API handler functions.
|
||||
func (index *packageIndex) registerAPI(mux *http.ServeMux) {
|
||||
mux.HandleFunc("GET /api/"+apiVersion+"/info", handleInfo)
|
||||
mux.HandleFunc("GET /api/"+apiVersion+"/get", index.handleGet)
|
||||
mux.HandleFunc("GET /api/"+apiVersion+"/search", index.handleSearch)
|
||||
mux.HandleFunc("GET /api/"+apiVersion+"/status/", index.newStatusHandler(false))
|
||||
mux.HandleFunc("GET /status/", index.newStatusHandler(true))
|
||||
}
|
||||
|
||||
// writeAPIPayload sets headers common to API responses and encodes payload as
|
||||
// JSON for the response body.
|
||||
func writeAPIPayload(w http.ResponseWriter, payload any) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("Expires", "0")
|
||||
|
||||
if err := json.NewEncoder(w).Encode(payload); err != nil {
|
||||
log.Println(err)
|
||||
http.Error(
|
||||
w, "cannot encode payload, contact maintainers",
|
||||
http.StatusInternalServerError,
|
||||
)
|
||||
}
|
||||
}
|
||||
183
cmd/pkgserver/api_test.go
Normal file
183
cmd/pkgserver/api_test.go
Normal file
@@ -0,0 +1,183 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"slices"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"hakurei.app/internal/info"
|
||||
"hakurei.app/internal/rosa"
|
||||
)
|
||||
|
||||
// prefix is prepended to every API path.
|
||||
const prefix = "/api/" + apiVersion + "/"
|
||||
|
||||
func TestAPIInfo(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
handleInfo(w, httptest.NewRequestWithContext(
|
||||
t.Context(),
|
||||
http.MethodGet,
|
||||
prefix+"info",
|
||||
nil,
|
||||
))
|
||||
|
||||
resp := w.Result()
|
||||
checkStatus(t, resp, http.StatusOK)
|
||||
checkAPIHeader(t, w.Header())
|
||||
|
||||
checkPayload(t, resp, struct {
|
||||
Count int `json:"count"`
|
||||
HakureiVersion string `json:"hakurei_version"`
|
||||
}{int(rosa.PresetUnexportedStart), info.Version()})
|
||||
}
|
||||
|
||||
func TestAPIGet(t *testing.T) {
|
||||
t.Parallel()
|
||||
const target = prefix + "get"
|
||||
|
||||
index := newIndex(t)
|
||||
newRequest := func(suffix string) *httptest.ResponseRecorder {
|
||||
w := httptest.NewRecorder()
|
||||
index.handleGet(w, httptest.NewRequestWithContext(
|
||||
t.Context(),
|
||||
http.MethodGet,
|
||||
target+suffix,
|
||||
nil,
|
||||
))
|
||||
return w
|
||||
}
|
||||
|
||||
checkValidate := func(t *testing.T, suffix string, vmin, vmax int, wantErr string) {
|
||||
t.Run("invalid", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
w := newRequest("?" + suffix + "=invalid")
|
||||
resp := w.Result()
|
||||
checkError(t, resp, wantErr, http.StatusBadRequest)
|
||||
})
|
||||
|
||||
t.Run("min", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
w := newRequest("?" + suffix + "=" + strconv.Itoa(vmin-1))
|
||||
resp := w.Result()
|
||||
checkError(t, resp, wantErr, http.StatusBadRequest)
|
||||
|
||||
w = newRequest("?" + suffix + "=" + strconv.Itoa(vmin))
|
||||
resp = w.Result()
|
||||
checkStatus(t, resp, http.StatusOK)
|
||||
})
|
||||
|
||||
t.Run("max", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
w := newRequest("?" + suffix + "=" + strconv.Itoa(vmax+1))
|
||||
resp := w.Result()
|
||||
checkError(t, resp, wantErr, http.StatusBadRequest)
|
||||
|
||||
w = newRequest("?" + suffix + "=" + strconv.Itoa(vmax))
|
||||
resp = w.Result()
|
||||
checkStatus(t, resp, http.StatusOK)
|
||||
})
|
||||
}
|
||||
|
||||
t.Run("limit", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
checkValidate(
|
||||
t, "index=0&sort=0&limit", 1, 100,
|
||||
"limit must be an integer between 1 and 100",
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("index", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
checkValidate(
|
||||
t, "limit=1&sort=0&index", 0, int(rosa.PresetUnexportedStart-1),
|
||||
"index must be an integer between 0 and "+strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("sort", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
checkValidate(
|
||||
t, "index=0&limit=1&sort", 0, int(sortOrderEnd),
|
||||
"sort must be an integer between 0 and "+strconv.Itoa(int(sortOrderEnd)),
|
||||
)
|
||||
})
|
||||
|
||||
checkWithSuffix := func(name, suffix string, want []*metadata) {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
w := newRequest(suffix)
|
||||
resp := w.Result()
|
||||
checkStatus(t, resp, http.StatusOK)
|
||||
checkAPIHeader(t, w.Header())
|
||||
checkPayloadFunc(t, resp, func(got *struct {
|
||||
Count int `json:"count"`
|
||||
Values []*metadata `json:"values"`
|
||||
}) bool {
|
||||
return got.Count == len(want) &&
|
||||
slices.EqualFunc(got.Values, want, func(a, b *metadata) bool {
|
||||
return (a.Version == b.Version ||
|
||||
a.Version == rosa.Unversioned ||
|
||||
b.Version == rosa.Unversioned) &&
|
||||
a.HasReport == b.HasReport &&
|
||||
a.Name == b.Name &&
|
||||
a.Description == b.Description &&
|
||||
a.Website == b.Website
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
checkWithSuffix("declarationAscending", "?limit=2&index=0&sort=0", []*metadata{
|
||||
{
|
||||
Metadata: rosa.GetMetadata(0),
|
||||
Version: rosa.Std.Version(0),
|
||||
},
|
||||
{
|
||||
Metadata: rosa.GetMetadata(1),
|
||||
Version: rosa.Std.Version(1),
|
||||
},
|
||||
})
|
||||
checkWithSuffix("declarationAscending offset", "?limit=3&index=5&sort=0", []*metadata{
|
||||
{
|
||||
Metadata: rosa.GetMetadata(5),
|
||||
Version: rosa.Std.Version(5),
|
||||
},
|
||||
{
|
||||
Metadata: rosa.GetMetadata(6),
|
||||
Version: rosa.Std.Version(6),
|
||||
},
|
||||
{
|
||||
Metadata: rosa.GetMetadata(7),
|
||||
Version: rosa.Std.Version(7),
|
||||
},
|
||||
})
|
||||
checkWithSuffix("declarationDescending", "?limit=3&index=0&sort=1", []*metadata{
|
||||
{
|
||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 1),
|
||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 1),
|
||||
},
|
||||
{
|
||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 2),
|
||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 2),
|
||||
},
|
||||
{
|
||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 3),
|
||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 3),
|
||||
},
|
||||
})
|
||||
checkWithSuffix("declarationDescending offset", "?limit=1&index=37&sort=1", []*metadata{
|
||||
{
|
||||
Metadata: rosa.GetMetadata(rosa.PresetUnexportedStart - 38),
|
||||
Version: rosa.Std.Version(rosa.PresetUnexportedStart - 38),
|
||||
},
|
||||
})
|
||||
}
|
||||
105
cmd/pkgserver/index.go
Normal file
105
cmd/pkgserver/index.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"errors"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"hakurei.app/internal/pkg"
|
||||
"hakurei.app/internal/rosa"
|
||||
)
|
||||
|
||||
const (
|
||||
declarationAscending = iota
|
||||
declarationDescending
|
||||
nameAscending
|
||||
nameDescending
|
||||
sizeAscending
|
||||
sizeDescending
|
||||
|
||||
sortOrderEnd = iota - 1
|
||||
)
|
||||
|
||||
// packageIndex refers to metadata by name and various sort orders.
|
||||
type packageIndex struct {
|
||||
sorts [sortOrderEnd + 1][rosa.PresetUnexportedStart]*metadata
|
||||
names map[string]*metadata
|
||||
search searchCache
|
||||
// Taken from [rosa.Report] if available.
|
||||
handleAccess func(*error) func()
|
||||
}
|
||||
|
||||
// metadata holds [rosa.Metadata] extended with additional information.
|
||||
type metadata struct {
|
||||
p rosa.PArtifact
|
||||
*rosa.Metadata
|
||||
|
||||
// Populated via [rosa.Toolchain.Version], [rosa.Unversioned] is equivalent
|
||||
// to the zero value. Otherwise, the zero value is invalid.
|
||||
Version string `json:"version,omitempty"`
|
||||
// Output data size, available if present in report.
|
||||
Size int64 `json:"size,omitempty"`
|
||||
// Whether the underlying [pkg.Artifact] is present in the report.
|
||||
HasReport bool `json:"report"`
|
||||
|
||||
// Ident string encoded ahead of time.
|
||||
ids string
|
||||
// Backed by [rosa.Report], access must be prepared by HandleAccess.
|
||||
status []byte
|
||||
}
|
||||
|
||||
// populate deterministically populates packageIndex, optionally with a report.
|
||||
func (index *packageIndex) populate(cache *pkg.Cache, report *rosa.Report) (err error) {
|
||||
if report != nil {
|
||||
defer report.HandleAccess(&err)()
|
||||
index.handleAccess = report.HandleAccess
|
||||
}
|
||||
|
||||
var work [rosa.PresetUnexportedStart]*metadata
|
||||
index.names = make(map[string]*metadata)
|
||||
for p := range rosa.PresetUnexportedStart {
|
||||
m := metadata{
|
||||
p: p,
|
||||
|
||||
Metadata: rosa.GetMetadata(p),
|
||||
Version: rosa.Std.Version(p),
|
||||
}
|
||||
if m.Version == "" {
|
||||
return errors.New("invalid version from " + m.Name)
|
||||
}
|
||||
if m.Version == rosa.Unversioned {
|
||||
m.Version = ""
|
||||
}
|
||||
|
||||
if cache != nil && report != nil {
|
||||
id := cache.Ident(rosa.Std.Load(p))
|
||||
m.ids = pkg.Encode(id.Value())
|
||||
m.status, m.Size = report.ArtifactOf(id)
|
||||
m.HasReport = m.Size >= 0
|
||||
}
|
||||
|
||||
work[p] = &m
|
||||
index.names[m.Name] = &m
|
||||
}
|
||||
|
||||
index.sorts[declarationAscending] = work
|
||||
index.sorts[declarationDescending] = work
|
||||
slices.Reverse(index.sorts[declarationDescending][:])
|
||||
|
||||
index.sorts[nameAscending] = work
|
||||
slices.SortFunc(index.sorts[nameAscending][:], func(a, b *metadata) int {
|
||||
return strings.Compare(a.Name, b.Name)
|
||||
})
|
||||
index.sorts[nameDescending] = index.sorts[nameAscending]
|
||||
slices.Reverse(index.sorts[nameDescending][:])
|
||||
|
||||
index.sorts[sizeAscending] = work
|
||||
slices.SortFunc(index.sorts[sizeAscending][:], func(a, b *metadata) int {
|
||||
return cmp.Compare(a.Size, b.Size)
|
||||
})
|
||||
index.sorts[sizeDescending] = index.sorts[sizeAscending]
|
||||
slices.Reverse(index.sorts[sizeDescending][:])
|
||||
|
||||
return
|
||||
}
|
||||
114
cmd/pkgserver/main.go
Normal file
114
cmd/pkgserver/main.go
Normal file
@@ -0,0 +1,114 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"hakurei.app/command"
|
||||
"hakurei.app/container/check"
|
||||
"hakurei.app/internal/pkg"
|
||||
"hakurei.app/internal/rosa"
|
||||
"hakurei.app/message"
|
||||
)
|
||||
|
||||
const shutdownTimeout = 15 * time.Second
|
||||
|
||||
func main() {
|
||||
log.SetFlags(0)
|
||||
log.SetPrefix("pkgserver: ")
|
||||
|
||||
var (
|
||||
flagBaseDir string
|
||||
flagAddr string
|
||||
)
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM, syscall.SIGHUP)
|
||||
defer stop()
|
||||
msg := message.New(log.Default())
|
||||
|
||||
c := command.New(os.Stderr, log.Printf, "pkgserver", func(args []string) error {
|
||||
var (
|
||||
cache *pkg.Cache
|
||||
report *rosa.Report
|
||||
)
|
||||
switch len(args) {
|
||||
case 0:
|
||||
break
|
||||
|
||||
case 1:
|
||||
baseDir, err := check.NewAbs(flagBaseDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cache, err = pkg.Open(ctx, msg, 0, baseDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer cache.Close()
|
||||
|
||||
report, err = rosa.OpenReport(args[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
default:
|
||||
return errors.New("pkgserver requires 1 argument")
|
||||
|
||||
}
|
||||
|
||||
var index packageIndex
|
||||
index.search = make(searchCache)
|
||||
if err := index.populate(cache, report); err != nil {
|
||||
return err
|
||||
}
|
||||
ticker := time.NewTicker(1 * time.Minute)
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
ticker.Stop()
|
||||
return
|
||||
case <-ticker.C:
|
||||
index.search.clean()
|
||||
}
|
||||
}
|
||||
}()
|
||||
var mux http.ServeMux
|
||||
uiRoutes(&mux)
|
||||
index.registerAPI(&mux)
|
||||
server := http.Server{
|
||||
Addr: flagAddr,
|
||||
Handler: &mux,
|
||||
}
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
c, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
|
||||
defer cancel()
|
||||
if err := server.Shutdown(c); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}()
|
||||
return server.ListenAndServe()
|
||||
}).Flag(
|
||||
&flagBaseDir,
|
||||
"b", command.StringFlag(""),
|
||||
"base directory for cache",
|
||||
).Flag(
|
||||
&flagAddr,
|
||||
"addr", command.StringFlag(":8067"),
|
||||
"TCP network address to listen on",
|
||||
)
|
||||
c.MustParse(os.Args[1:], func(err error) {
|
||||
if errors.Is(err, http.ErrServerClosed) {
|
||||
os.Exit(0)
|
||||
}
|
||||
log.Fatal(err)
|
||||
})
|
||||
}
|
||||
96
cmd/pkgserver/main_test.go
Normal file
96
cmd/pkgserver/main_test.go
Normal file
@@ -0,0 +1,96 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// newIndex returns the address of a newly populated packageIndex.
|
||||
func newIndex(t *testing.T) *packageIndex {
|
||||
t.Helper()
|
||||
|
||||
var index packageIndex
|
||||
if err := index.populate(nil, nil); err != nil {
|
||||
t.Fatalf("populate: error = %v", err)
|
||||
}
|
||||
return &index
|
||||
}
|
||||
|
||||
// checkStatus checks response status code.
|
||||
func checkStatus(t *testing.T, resp *http.Response, want int) {
|
||||
t.Helper()
|
||||
|
||||
if resp.StatusCode != want {
|
||||
t.Errorf(
|
||||
"StatusCode: %s, want %s",
|
||||
http.StatusText(resp.StatusCode),
|
||||
http.StatusText(want),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// checkHeader checks the value of a header entry.
|
||||
func checkHeader(t *testing.T, h http.Header, key, want string) {
|
||||
t.Helper()
|
||||
|
||||
if got := h.Get(key); got != want {
|
||||
t.Errorf("%s: %q, want %q", key, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
// checkAPIHeader checks common entries set for API endpoints.
|
||||
func checkAPIHeader(t *testing.T, h http.Header) {
|
||||
t.Helper()
|
||||
|
||||
checkHeader(t, h, "Content-Type", "application/json; charset=utf-8")
|
||||
checkHeader(t, h, "Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
checkHeader(t, h, "Pragma", "no-cache")
|
||||
checkHeader(t, h, "Expires", "0")
|
||||
}
|
||||
|
||||
// checkPayloadFunc checks the JSON response of an API endpoint by passing it to f.
|
||||
func checkPayloadFunc[T any](
|
||||
t *testing.T,
|
||||
resp *http.Response,
|
||||
f func(got *T) bool,
|
||||
) {
|
||||
t.Helper()
|
||||
|
||||
var got T
|
||||
r := io.Reader(resp.Body)
|
||||
if testing.Verbose() {
|
||||
var buf bytes.Buffer
|
||||
r = io.TeeReader(r, &buf)
|
||||
defer func() { t.Helper(); t.Log(buf.String()) }()
|
||||
}
|
||||
if err := json.NewDecoder(r).Decode(&got); err != nil {
|
||||
t.Fatalf("Decode: error = %v", err)
|
||||
}
|
||||
|
||||
if !f(&got) {
|
||||
t.Errorf("Body: %#v", got)
|
||||
}
|
||||
}
|
||||
|
||||
// checkPayload checks the JSON response of an API endpoint.
|
||||
func checkPayload[T any](t *testing.T, resp *http.Response, want T) {
|
||||
t.Helper()
|
||||
|
||||
checkPayloadFunc(t, resp, func(got *T) bool {
|
||||
return reflect.DeepEqual(got, &want)
|
||||
})
|
||||
}
|
||||
|
||||
func checkError(t *testing.T, resp *http.Response, error string, code int) {
|
||||
t.Helper()
|
||||
|
||||
checkStatus(t, resp, code)
|
||||
if got, _ := io.ReadAll(resp.Body); string(got) != fmt.Sprintln(error) {
|
||||
t.Errorf("Body: %q, want %q", string(got), error)
|
||||
}
|
||||
}
|
||||
77
cmd/pkgserver/search.go
Normal file
77
cmd/pkgserver/search.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"maps"
|
||||
"regexp"
|
||||
"slices"
|
||||
"time"
|
||||
)
|
||||
|
||||
type searchCache map[string]searchCacheEntry
|
||||
type searchResult struct {
|
||||
NameIndices [][]int `json:"name_matches"`
|
||||
DescIndices [][]int `json:"desc_matches,omitempty"`
|
||||
Score float64 `json:"score"`
|
||||
*metadata
|
||||
}
|
||||
type searchCacheEntry struct {
|
||||
query string
|
||||
results []searchResult
|
||||
expiry time.Time
|
||||
}
|
||||
|
||||
func (index *packageIndex) performSearchQuery(limit int, i int, search string, desc bool) (int, []searchResult, error) {
|
||||
entry, ok := index.search[search]
|
||||
if ok {
|
||||
return len(entry.results), entry.results[i:min(i+limit, len(entry.results))], nil
|
||||
}
|
||||
|
||||
regex, err := regexp.Compile(search)
|
||||
if err != nil {
|
||||
return 0, make([]searchResult, 0), err
|
||||
}
|
||||
res := make([]searchResult, 0)
|
||||
for p := range maps.Values(index.names) {
|
||||
nameIndices := regex.FindAllIndex([]byte(p.Name), -1)
|
||||
var descIndices [][]int = nil
|
||||
if desc {
|
||||
descIndices = regex.FindAllIndex([]byte(p.Description), -1)
|
||||
}
|
||||
if nameIndices == nil && descIndices == nil {
|
||||
continue
|
||||
}
|
||||
score := float64(indexsum(nameIndices)) / (float64(len(nameIndices)) + 1)
|
||||
if desc {
|
||||
score += float64(indexsum(descIndices)) / (float64(len(descIndices)) + 1) / 10.0
|
||||
}
|
||||
res = append(res, searchResult{
|
||||
NameIndices: nameIndices,
|
||||
DescIndices: descIndices,
|
||||
Score: score,
|
||||
metadata: p,
|
||||
})
|
||||
}
|
||||
slices.SortFunc(res[:], func(a, b searchResult) int { return -cmp.Compare(a.Score, b.Score) })
|
||||
expiry := time.Now().Add(1 * time.Minute)
|
||||
entry = searchCacheEntry{
|
||||
query: search,
|
||||
results: res,
|
||||
expiry: expiry,
|
||||
}
|
||||
index.search[search] = entry
|
||||
|
||||
return len(res), res[i:min(i+limit, len(entry.results))], nil
|
||||
}
|
||||
func (s *searchCache) clean() {
|
||||
maps.DeleteFunc(*s, func(_ string, v searchCacheEntry) bool {
|
||||
return v.expiry.Before(time.Now())
|
||||
})
|
||||
}
|
||||
func indexsum(in [][]int) int {
|
||||
sum := 0
|
||||
for i := 0; i < len(in); i++ {
|
||||
sum += in[i][1] - in[i][0]
|
||||
}
|
||||
return sum
|
||||
}
|
||||
38
cmd/pkgserver/ui.go
Normal file
38
cmd/pkgserver/ui.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package main
|
||||
|
||||
import "net/http"
|
||||
|
||||
func serveWebUI(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("Expires", "0")
|
||||
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
w.Header().Set("X-XSS-Protection", "1")
|
||||
w.Header().Set("X-Frame-Options", "DENY")
|
||||
|
||||
http.ServeFileFS(w, r, content, "ui/index.html")
|
||||
}
|
||||
func serveStaticContent(w http.ResponseWriter, r *http.Request) {
|
||||
switch r.URL.Path {
|
||||
case "/static/style.css":
|
||||
darkTheme := r.CookiesNamed("dark_theme")
|
||||
if len(darkTheme) > 0 && darkTheme[0].Value == "true" {
|
||||
http.ServeFileFS(w, r, content, "ui/static/dark.css")
|
||||
} else {
|
||||
http.ServeFileFS(w, r, content, "ui/static/light.css")
|
||||
}
|
||||
case "/favicon.ico":
|
||||
http.ServeFileFS(w, r, content, "ui/static/favicon.ico")
|
||||
case "/static/index.js":
|
||||
http.ServeFileFS(w, r, content, "ui/static/index.js")
|
||||
default:
|
||||
http.NotFound(w, r)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func uiRoutes(mux *http.ServeMux) {
|
||||
mux.HandleFunc("GET /{$}", serveWebUI)
|
||||
mux.HandleFunc("GET /favicon.ico", serveStaticContent)
|
||||
mux.HandleFunc("GET /static/", serveStaticContent)
|
||||
}
|
||||
35
cmd/pkgserver/ui/index.html
Normal file
35
cmd/pkgserver/ui/index.html
Normal file
@@ -0,0 +1,35 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<link rel="stylesheet" href="static/style.css">
|
||||
<title>Hakurei PkgServer</title>
|
||||
<script src="static/index.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Hakurei PkgServer</h1>
|
||||
|
||||
<table id="pkg-list">
|
||||
<tr><td>Loading...</td></tr>
|
||||
</table>
|
||||
<p>Showing entries <span id="entry-counter"></span>.</p>
|
||||
<span class="bottom-nav"><a href="javascript:prevPage()">« Previous</a> <span id="page-number">1</span> <a href="javascript:nextPage()">Next »</a></span>
|
||||
<span><label for="count">Entries per page: </label><select name="count" id="count">
|
||||
<option value="10">10</option>
|
||||
<option value="20">20</option>
|
||||
<option value="30">30</option>
|
||||
<option value="50">50</option>
|
||||
</select></span>
|
||||
<span><label for="sort">Sort by: </label><select name="sort" id="sort">
|
||||
<option value="0">Definition (ascending)</option>
|
||||
<option value="1">Definition (descending)</option>
|
||||
<option value="2">Name (ascending)</option>
|
||||
<option value="3">Name (descending)</option>
|
||||
<option value="4">Size (ascending)</option>
|
||||
<option value="5">Size (descending)</option>
|
||||
</select></span>
|
||||
</body>
|
||||
<footer>
|
||||
<p>©<a href="https://hakurei.app/">Hakurei</a> (<span id="hakurei-version">unknown</span>). Licensed under the MIT license.</p>
|
||||
</footer>
|
||||
</html>
|
||||
0
cmd/pkgserver/ui/static/_common.scss
Normal file
0
cmd/pkgserver/ui/static/_common.scss
Normal file
6
cmd/pkgserver/ui/static/dark.scss
Normal file
6
cmd/pkgserver/ui/static/dark.scss
Normal file
@@ -0,0 +1,6 @@
|
||||
@use 'common';
|
||||
|
||||
html {
|
||||
background-color: #2c2c2c;
|
||||
color: ghostwhite;
|
||||
}
|
||||
BIN
cmd/pkgserver/ui/static/favicon.ico
Normal file
BIN
cmd/pkgserver/ui/static/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
155
cmd/pkgserver/ui/static/index.ts
Normal file
155
cmd/pkgserver/ui/static/index.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
class PackageIndexEntry {
|
||||
name: string
|
||||
size: number | null
|
||||
description: string | null
|
||||
website: string | null
|
||||
version: string | null
|
||||
report: boolean
|
||||
}
|
||||
function toHTML(entry: PackageIndexEntry): HTMLTableRowElement {
|
||||
let v = entry.version != null ? `<span>${escapeHtml(entry.version)}</span>` : ""
|
||||
let s = entry.size != null ? `<p>Size: ${toByteSizeString(entry.size)} (${entry.size})</p>` : ""
|
||||
let d = entry.description != null ? `<p>${escapeHtml(entry.description)}</p>` : ""
|
||||
let w = entry.website != null ? `<a href="${encodeURI(entry.website)}">Website</a>` : ""
|
||||
let r = entry.report ? `Log (<a href=\"${encodeURI('/api/v1/status/' + entry.name)}\">View</a> | <a href=\"${encodeURI('/status/' + entry.name)}\">Download</a>)` : ""
|
||||
let row = <HTMLTableRowElement>(document.createElement('tr'))
|
||||
row.innerHTML = `<td>
|
||||
<h2>${escapeHtml(entry.name)} ${v}</h2>
|
||||
${d}
|
||||
${s}
|
||||
${w}
|
||||
${r}
|
||||
</td>`
|
||||
return row
|
||||
}
|
||||
|
||||
function toByteSizeString(bytes: number): string {
|
||||
if(bytes == null || bytes < 1024) return `${bytes}B`
|
||||
if(bytes < Math.pow(1024, 2)) return `${(bytes/1024).toFixed(2)}kiB`
|
||||
if(bytes < Math.pow(1024, 3)) return `${(bytes/Math.pow(1024, 2)).toFixed(2)}MiB`
|
||||
if(bytes < Math.pow(1024, 4)) return `${(bytes/Math.pow(1024, 3)).toFixed(2)}GiB`
|
||||
if(bytes < Math.pow(1024, 5)) return `${(bytes/Math.pow(1024, 4)).toFixed(2)}TiB`
|
||||
return "not only is it big, it's large"
|
||||
}
|
||||
|
||||
const API_VERSION = 1
|
||||
const ENDPOINT = `/api/v${API_VERSION}`
|
||||
class InfoPayload {
|
||||
count: number
|
||||
hakurei_version: string
|
||||
}
|
||||
|
||||
async function infoRequest(): Promise<InfoPayload> {
|
||||
const res = await fetch(`${ENDPOINT}/info`)
|
||||
const payload = await res.json()
|
||||
return payload as InfoPayload
|
||||
}
|
||||
class GetPayload {
|
||||
values: PackageIndexEntry[]
|
||||
}
|
||||
|
||||
enum SortOrders {
|
||||
DeclarationAscending,
|
||||
DeclarationDescending,
|
||||
NameAscending,
|
||||
NameDescending
|
||||
}
|
||||
async function getRequest(limit: number, index: number, sort: SortOrders): Promise<GetPayload> {
|
||||
const res = await fetch(`${ENDPOINT}/get?limit=${limit}&index=${index}&sort=${sort.valueOf()}`)
|
||||
const payload = await res.json()
|
||||
return payload as GetPayload
|
||||
}
|
||||
class State {
|
||||
entriesPerPage: number = 10
|
||||
entryIndex: number = 0
|
||||
maxEntries: number = 0
|
||||
sort: SortOrders = SortOrders.DeclarationAscending
|
||||
|
||||
getEntriesPerPage(): number {
|
||||
return this.entriesPerPage
|
||||
}
|
||||
setEntriesPerPage(entriesPerPage: number) {
|
||||
this.entriesPerPage = entriesPerPage
|
||||
this.setEntryIndex(Math.floor(this.getEntryIndex() / entriesPerPage) * entriesPerPage)
|
||||
}
|
||||
getEntryIndex(): number {
|
||||
return this.entryIndex
|
||||
}
|
||||
setEntryIndex(entryIndex: number) {
|
||||
this.entryIndex = entryIndex
|
||||
this.updatePage()
|
||||
this.updateRange()
|
||||
this.updateListings()
|
||||
}
|
||||
getMaxEntries(): number {
|
||||
return this.maxEntries
|
||||
}
|
||||
setMaxEntries(max: number) {
|
||||
this.maxEntries = max
|
||||
}
|
||||
getSortOrder(): SortOrders {
|
||||
return this.sort
|
||||
}
|
||||
setSortOrder(sortOrder: SortOrders) {
|
||||
this.sort = sortOrder
|
||||
this.setEntryIndex(0)
|
||||
}
|
||||
updatePage() {
|
||||
let page = Math.ceil(((this.getEntryIndex() + this.getEntriesPerPage()) - 1) / this.getEntriesPerPage())
|
||||
document.getElementById("page-number").innerText = String(page)
|
||||
}
|
||||
updateRange() {
|
||||
let max = Math.min(this.getEntryIndex() + this.getEntriesPerPage(), this.getMaxEntries())
|
||||
document.getElementById("entry-counter").innerText = `${this.getEntryIndex() + 1}-${max} of ${this.getMaxEntries()}`
|
||||
}
|
||||
updateListings() {
|
||||
getRequest(this.getEntriesPerPage(), this.getEntryIndex(), this.getSortOrder())
|
||||
.then(res => {
|
||||
let table = document.getElementById("pkg-list")
|
||||
table.innerHTML = ''
|
||||
for(let i = 0; i < res.values.length; i++) {
|
||||
table.appendChild(toHTML(res.values[i]))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
let STATE: State
|
||||
|
||||
function prevPage() {
|
||||
let index = STATE.getEntryIndex()
|
||||
STATE.setEntryIndex(Math.max(0, index - STATE.getEntriesPerPage()))
|
||||
}
|
||||
function nextPage() {
|
||||
let index = STATE.getEntryIndex()
|
||||
STATE.setEntryIndex(Math.min((Math.ceil(STATE.getMaxEntries() / STATE.getEntriesPerPage()) * STATE.getEntriesPerPage()) - STATE.getEntriesPerPage(), index + STATE.getEntriesPerPage()))
|
||||
}
|
||||
|
||||
function escapeHtml(str: string): string {
|
||||
if(str === undefined) return ""
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''')
|
||||
}
|
||||
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
STATE = new State()
|
||||
infoRequest()
|
||||
.then(res => {
|
||||
STATE.setMaxEntries(res.count)
|
||||
document.getElementById("hakurei-version").innerText = res.hakurei_version
|
||||
STATE.updateRange()
|
||||
STATE.updateListings()
|
||||
})
|
||||
|
||||
document.getElementById("count").addEventListener("change", (event) => {
|
||||
STATE.setEntriesPerPage(parseInt((event.target as HTMLSelectElement).value))
|
||||
})
|
||||
document.getElementById("sort").addEventListener("change", (event) => {
|
||||
STATE.setSortOrder(parseInt((event.target as HTMLSelectElement).value))
|
||||
})
|
||||
})
|
||||
6
cmd/pkgserver/ui/static/light.scss
Normal file
6
cmd/pkgserver/ui/static/light.scss
Normal file
@@ -0,0 +1,6 @@
|
||||
@use 'common';
|
||||
|
||||
html {
|
||||
background-color: #d3d3d3;
|
||||
color: black;
|
||||
}
|
||||
5
cmd/pkgserver/ui/static/tsconfig.json
Normal file
5
cmd/pkgserver/ui/static/tsconfig.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2024"
|
||||
}
|
||||
}
|
||||
9
cmd/pkgserver/ui_full.go
Normal file
9
cmd/pkgserver/ui_full.go
Normal file
@@ -0,0 +1,9 @@
|
||||
//go:build frontend
|
||||
|
||||
package main
|
||||
|
||||
import "embed"
|
||||
|
||||
//go:generate sh -c "sass ui/static/dark.scss ui/static/dark.css && sass ui/static/light.scss ui/static/light.css && tsc -p ui/static"
|
||||
//go:embed ui/*
|
||||
var content embed.FS
|
||||
7
cmd/pkgserver/ui_stub.go
Normal file
7
cmd/pkgserver/ui_stub.go
Normal file
@@ -0,0 +1,7 @@
|
||||
//go:build !frontend
|
||||
|
||||
package main
|
||||
|
||||
import "testing/fstest"
|
||||
|
||||
var content fstest.MapFS
|
||||
@@ -40,7 +40,7 @@ type (
|
||||
AllowOrphan bool
|
||||
// Scheduling policy to set via sched_setscheduler(2). The zero value
|
||||
// skips this call. Supported policies are [SCHED_BATCH], [SCHED_IDLE].
|
||||
SchedPolicy int
|
||||
SchedPolicy SchedPolicy
|
||||
// Cgroup fd, nil to disable.
|
||||
Cgroup *int
|
||||
// ExtraFiles passed through to initial process in the container, with
|
||||
@@ -373,12 +373,23 @@ func (p *Container) Start() error {
|
||||
|
||||
// sched_setscheduler: thread-directed but acts on all processes
|
||||
// created from the calling thread
|
||||
if p.SchedPolicy > 0 {
|
||||
if p.SchedPolicy > 0 && p.SchedPolicy <= _SCHED_LAST {
|
||||
var param schedParam
|
||||
if priority, err := p.SchedPolicy.GetPriorityMin(); err != nil {
|
||||
return &StartError{
|
||||
Fatal: true,
|
||||
Step: "get minimum priority",
|
||||
Err: err,
|
||||
}
|
||||
} else {
|
||||
param.priority = priority
|
||||
}
|
||||
|
||||
p.msg.Verbosef("setting scheduling policy %d", p.SchedPolicy)
|
||||
if err := schedSetscheduler(
|
||||
0, // calling thread
|
||||
p.SchedPolicy,
|
||||
&schedParam{0},
|
||||
¶m,
|
||||
); err != nil {
|
||||
return &StartError{
|
||||
Fatal: true,
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
package container
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"strconv"
|
||||
"sync"
|
||||
. "syscall"
|
||||
"unsafe"
|
||||
|
||||
@@ -43,18 +46,132 @@ func Isatty(fd int) bool {
|
||||
return r == 0
|
||||
}
|
||||
|
||||
// SchedPolicy denotes a scheduling policy defined in include/uapi/linux/sched.h.
|
||||
type SchedPolicy int
|
||||
|
||||
// include/uapi/linux/sched.h
|
||||
const (
|
||||
SCHED_NORMAL = iota
|
||||
SCHED_NORMAL SchedPolicy = iota
|
||||
SCHED_FIFO
|
||||
SCHED_RR
|
||||
SCHED_BATCH
|
||||
_ // SCHED_ISO: reserved but not implemented yet
|
||||
_SCHED_ISO // SCHED_ISO: reserved but not implemented yet
|
||||
SCHED_IDLE
|
||||
SCHED_DEADLINE
|
||||
SCHED_EXT
|
||||
|
||||
_SCHED_LAST SchedPolicy = iota - 1
|
||||
)
|
||||
|
||||
var _ encoding.TextMarshaler = _SCHED_LAST
|
||||
var _ encoding.TextUnmarshaler = new(_SCHED_LAST)
|
||||
|
||||
// String returns a unique representation of policy, also used in encoding.
|
||||
func (policy SchedPolicy) String() string {
|
||||
switch policy {
|
||||
case SCHED_NORMAL:
|
||||
return ""
|
||||
case SCHED_FIFO:
|
||||
return "fifo"
|
||||
case SCHED_RR:
|
||||
return "rr"
|
||||
case SCHED_BATCH:
|
||||
return "batch"
|
||||
case SCHED_IDLE:
|
||||
return "idle"
|
||||
case SCHED_DEADLINE:
|
||||
return "deadline"
|
||||
case SCHED_EXT:
|
||||
return "ext"
|
||||
|
||||
default:
|
||||
return "invalid policy " + strconv.Itoa(int(policy))
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalText performs bounds checking and returns the result of String.
|
||||
func (policy SchedPolicy) MarshalText() ([]byte, error) {
|
||||
if policy == _SCHED_ISO || policy < 0 || policy > _SCHED_LAST {
|
||||
return nil, EINVAL
|
||||
}
|
||||
return []byte(policy.String()), nil
|
||||
}
|
||||
|
||||
// InvalidSchedPolicyError is an invalid string representation of a [SchedPolicy].
|
||||
type InvalidSchedPolicyError string
|
||||
|
||||
func (InvalidSchedPolicyError) Unwrap() error { return EINVAL }
|
||||
func (e InvalidSchedPolicyError) Error() string {
|
||||
return "invalid scheduling policy " + strconv.Quote(string(e))
|
||||
}
|
||||
|
||||
// UnmarshalText is the inverse of MarshalText.
|
||||
func (policy *SchedPolicy) UnmarshalText(text []byte) error {
|
||||
switch string(text) {
|
||||
case "fifo":
|
||||
*policy = SCHED_FIFO
|
||||
case "rr":
|
||||
*policy = SCHED_RR
|
||||
case "batch":
|
||||
*policy = SCHED_BATCH
|
||||
case "idle":
|
||||
*policy = SCHED_IDLE
|
||||
case "deadline":
|
||||
*policy = SCHED_DEADLINE
|
||||
case "ext":
|
||||
*policy = SCHED_EXT
|
||||
|
||||
case "":
|
||||
*policy = 0
|
||||
return nil
|
||||
default:
|
||||
return InvalidSchedPolicyError(text)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// for sched_get_priority_max and sched_get_priority_min
|
||||
var (
|
||||
schedPriority [_SCHED_LAST + 1][2]std.Int
|
||||
schedPriorityErr [_SCHED_LAST + 1][2]error
|
||||
schedPriorityOnce [_SCHED_LAST + 1][2]sync.Once
|
||||
)
|
||||
|
||||
// GetPriorityMax returns the maximum priority value that can be used with the
|
||||
// scheduling algorithm identified by policy.
|
||||
func (policy SchedPolicy) GetPriorityMax() (std.Int, error) {
|
||||
schedPriorityOnce[policy][0].Do(func() {
|
||||
priority, _, errno := Syscall(
|
||||
SYS_SCHED_GET_PRIORITY_MAX,
|
||||
uintptr(policy),
|
||||
0, 0,
|
||||
)
|
||||
schedPriority[policy][0] = std.Int(priority)
|
||||
if schedPriority[policy][0] < 0 {
|
||||
schedPriorityErr[policy][0] = errno
|
||||
}
|
||||
})
|
||||
return schedPriority[policy][0], schedPriorityErr[policy][0]
|
||||
}
|
||||
|
||||
// GetPriorityMin returns the minimum priority value that can be used with the
|
||||
// scheduling algorithm identified by policy.
|
||||
func (policy SchedPolicy) GetPriorityMin() (std.Int, error) {
|
||||
schedPriorityOnce[policy][1].Do(func() {
|
||||
priority, _, errno := Syscall(
|
||||
SYS_SCHED_GET_PRIORITY_MIN,
|
||||
uintptr(policy),
|
||||
0, 0,
|
||||
)
|
||||
schedPriority[policy][1] = std.Int(priority)
|
||||
if schedPriority[policy][1] < 0 {
|
||||
schedPriorityErr[policy][1] = errno
|
||||
}
|
||||
})
|
||||
return schedPriority[policy][1], schedPriorityErr[policy][1]
|
||||
|
||||
}
|
||||
|
||||
// schedParam is equivalent to struct sched_param from include/linux/sched.h.
|
||||
type schedParam struct {
|
||||
// sched_priority
|
||||
@@ -74,7 +191,7 @@ type schedParam struct {
|
||||
// this if you do not have something similar in place!
|
||||
//
|
||||
// [very subtle to use correctly]: https://www.openwall.com/lists/musl/2016/03/01/4
|
||||
func schedSetscheduler(tid, policy int, param *schedParam) error {
|
||||
func schedSetscheduler(tid int, policy SchedPolicy, param *schedParam) error {
|
||||
if r, _, errno := Syscall(
|
||||
SYS_SCHED_SETSCHEDULER,
|
||||
uintptr(tid),
|
||||
|
||||
100
container/syscall_test.go
Normal file
100
container/syscall_test.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package container_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"math"
|
||||
"reflect"
|
||||
"syscall"
|
||||
"testing"
|
||||
|
||||
"hakurei.app/container"
|
||||
"hakurei.app/container/std"
|
||||
)
|
||||
|
||||
func TestSchedPolicyJSON(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
policy container.SchedPolicy
|
||||
want string
|
||||
encodeErr error
|
||||
decodeErr error
|
||||
}{
|
||||
{container.SCHED_NORMAL, `""`, nil, nil},
|
||||
{container.SCHED_FIFO, `"fifo"`, nil, nil},
|
||||
{container.SCHED_RR, `"rr"`, nil, nil},
|
||||
{container.SCHED_BATCH, `"batch"`, nil, nil},
|
||||
{4, `"invalid policy 4"`, syscall.EINVAL, container.InvalidSchedPolicyError("invalid policy 4")},
|
||||
{container.SCHED_IDLE, `"idle"`, nil, nil},
|
||||
{container.SCHED_DEADLINE, `"deadline"`, nil, nil},
|
||||
{container.SCHED_EXT, `"ext"`, nil, nil},
|
||||
{math.MaxInt, `"iso"`, syscall.EINVAL, container.InvalidSchedPolicyError("iso")},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
name := tc.policy.String()
|
||||
if tc.policy == container.SCHED_NORMAL {
|
||||
name = "normal"
|
||||
}
|
||||
|
||||
t.Run(name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := json.Marshal(tc.policy)
|
||||
if !errors.Is(err, tc.encodeErr) {
|
||||
t.Fatalf("Marshal: error = %v, want %v", err, tc.encodeErr)
|
||||
}
|
||||
if err == nil && string(got) != tc.want {
|
||||
t.Fatalf("Marshal: %s, want %s", string(got), tc.want)
|
||||
}
|
||||
|
||||
var v container.SchedPolicy
|
||||
if err = json.Unmarshal([]byte(tc.want), &v); !reflect.DeepEqual(err, tc.decodeErr) {
|
||||
t.Fatalf("Unmarshal: error = %v, want %v", err, tc.decodeErr)
|
||||
}
|
||||
if err == nil && v != tc.policy {
|
||||
t.Fatalf("Unmarshal: %d, want %d", v, tc.policy)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchedPolicyMinMax(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
policy container.SchedPolicy
|
||||
min, max std.Int
|
||||
err error
|
||||
}{
|
||||
{container.SCHED_NORMAL, 0, 0, nil},
|
||||
{container.SCHED_FIFO, 1, 99, nil},
|
||||
{container.SCHED_RR, 1, 99, nil},
|
||||
{container.SCHED_BATCH, 0, 0, nil},
|
||||
{4, -1, -1, syscall.EINVAL},
|
||||
{container.SCHED_IDLE, 0, 0, nil},
|
||||
{container.SCHED_DEADLINE, 0, 0, nil},
|
||||
{container.SCHED_EXT, 0, 0, nil},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
name := tc.policy.String()
|
||||
if tc.policy == container.SCHED_NORMAL {
|
||||
name = "normal"
|
||||
}
|
||||
|
||||
t.Run(name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
if priority, err := tc.policy.GetPriorityMax(); !reflect.DeepEqual(err, tc.err) {
|
||||
t.Fatalf("GetPriorityMax: error = %v, want %v", err, tc.err)
|
||||
} else if priority != tc.max {
|
||||
t.Fatalf("GetPriorityMax: %d, want %d", priority, tc.max)
|
||||
}
|
||||
if priority, err := tc.policy.GetPriorityMin(); !reflect.DeepEqual(err, tc.err) {
|
||||
t.Fatalf("GetPriorityMin: error = %v, want %v", err, tc.err)
|
||||
} else if priority != tc.min {
|
||||
t.Fatalf("GetPriorityMin: %d, want %d", priority, tc.min)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
12
flake.lock
generated
12
flake.lock
generated
@@ -7,11 +7,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1765384171,
|
||||
"narHash": "sha256-FuFtkJrW1Z7u+3lhzPRau69E0CNjADku1mLQQflUORo=",
|
||||
"lastModified": 1772985280,
|
||||
"narHash": "sha256-FdrNykOoY9VStevU4zjSUdvsL9SzJTcXt4omdEDZDLk=",
|
||||
"owner": "nix-community",
|
||||
"repo": "home-manager",
|
||||
"rev": "44777152652bc9eacf8876976fa72cc77ca8b9d8",
|
||||
"rev": "8f736f007139d7f70752657dff6a401a585d6cbc",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -23,11 +23,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1765311797,
|
||||
"narHash": "sha256-mSD5Ob7a+T2RNjvPvOA1dkJHGVrNVl8ZOrAwBjKBDQo=",
|
||||
"lastModified": 1772822230,
|
||||
"narHash": "sha256-yf3iYLGbGVlIthlQIk5/4/EQDZNNEmuqKZkQssMljuw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "09eb77e94fa25202af8f3e81ddc7353d9970ac1b",
|
||||
"rev": "71caefce12ba78d84fe618cf61644dce01cf3a96",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@@ -99,7 +99,7 @@
|
||||
hakurei = pkgs.pkgsStatic.callPackage ./package.nix {
|
||||
inherit (pkgs)
|
||||
# passthru.buildInputs
|
||||
go
|
||||
go_1_26
|
||||
clang
|
||||
|
||||
# nativeBuildInputs
|
||||
@@ -182,7 +182,7 @@
|
||||
let
|
||||
# this is used for interactive vm testing during development, where tests might be broken
|
||||
package = self.packages.${pkgs.stdenv.hostPlatform.system}.hakurei.override {
|
||||
buildGoModule = previousArgs: pkgs.pkgsStatic.buildGoModule (previousArgs // { doCheck = false; });
|
||||
buildGo126Module = previousArgs: pkgs.pkgsStatic.buildGo126Module (previousArgs // { doCheck = false; });
|
||||
};
|
||||
in
|
||||
{
|
||||
|
||||
@@ -40,7 +40,7 @@ type ExecPath struct {
|
||||
}
|
||||
|
||||
// SchedPolicy is the [container] scheduling policy.
|
||||
var SchedPolicy int
|
||||
var SchedPolicy container.SchedPolicy
|
||||
|
||||
// PromoteLayers returns artifacts with identical-by-content layers promoted to
|
||||
// the highest priority instance, as if mounted via [ExecPath].
|
||||
|
||||
@@ -19,6 +19,8 @@ const (
|
||||
LLVMRuntimes
|
||||
LLVMClang
|
||||
|
||||
// EarlyInit is the Rosa OS initramfs init program.
|
||||
EarlyInit
|
||||
// ImageInitramfs is the Rosa OS initramfs archive.
|
||||
ImageInitramfs
|
||||
|
||||
@@ -28,6 +30,8 @@ const (
|
||||
KernelHeaders
|
||||
// KernelSource is a writable kernel source tree installed to [AbsUsrSrc].
|
||||
KernelSource
|
||||
// Firmware is firmware blobs for use with the Linux kernel.
|
||||
Firmware
|
||||
|
||||
ACL
|
||||
ArgpStandalone
|
||||
@@ -85,9 +89,11 @@ const (
|
||||
NSS
|
||||
NSSCACert
|
||||
Ncurses
|
||||
Nettle
|
||||
Ninja
|
||||
OpenSSL
|
||||
PCRE2
|
||||
Parallel
|
||||
Patch
|
||||
Perl
|
||||
PerlLocaleGettext
|
||||
@@ -103,12 +109,23 @@ const (
|
||||
PkgConfig
|
||||
Procps
|
||||
Python
|
||||
PythonCfgv
|
||||
PythonDiscovery
|
||||
PythonDistlib
|
||||
PythonFilelock
|
||||
PythonIdentify
|
||||
PythonIniConfig
|
||||
PythonNodeenv
|
||||
PythonPackaging
|
||||
PythonPlatformdirs
|
||||
PythonPluggy
|
||||
PythonPreCommit
|
||||
PythonPyTest
|
||||
PythonPyYAML
|
||||
PythonPygments
|
||||
PythonVirtualenv
|
||||
QEMU
|
||||
Rdfind
|
||||
Rsync
|
||||
Sed
|
||||
Setuptools
|
||||
|
||||
@@ -128,6 +128,9 @@ type CMakeHelper struct {
|
||||
Cache [][2]string
|
||||
// Runs after install.
|
||||
Script string
|
||||
|
||||
// Whether to generate Makefile instead.
|
||||
Make bool
|
||||
}
|
||||
|
||||
var _ Helper = new(CMakeHelper)
|
||||
@@ -141,7 +144,10 @@ func (attr *CMakeHelper) name(name, version string) string {
|
||||
}
|
||||
|
||||
// extra returns a hardcoded slice of [CMake] and [Ninja].
|
||||
func (*CMakeHelper) extra(int) []PArtifact {
|
||||
func (attr *CMakeHelper) extra(int) []PArtifact {
|
||||
if attr != nil && attr.Make {
|
||||
return []PArtifact{CMake, Make}
|
||||
}
|
||||
return []PArtifact{CMake, Ninja}
|
||||
}
|
||||
|
||||
@@ -173,11 +179,19 @@ func (attr *CMakeHelper) script(name string) string {
|
||||
panic("CACHE must be non-empty")
|
||||
}
|
||||
|
||||
generate := "Ninja"
|
||||
jobs := ""
|
||||
if attr.Make {
|
||||
generate = "'Unix Makefiles'"
|
||||
jobs += ` "--parallel=$(nproc)"`
|
||||
}
|
||||
|
||||
return `
|
||||
cmake -G Ninja \
|
||||
cmake -G ` + generate + ` \
|
||||
-DCMAKE_C_COMPILER_TARGET="${ROSA_TRIPLE}" \
|
||||
-DCMAKE_CXX_COMPILER_TARGET="${ROSA_TRIPLE}" \
|
||||
-DCMAKE_ASM_COMPILER_TARGET="${ROSA_TRIPLE}" \
|
||||
-DCMAKE_INSTALL_LIBDIR=lib \
|
||||
` + strings.Join(slices.Collect(func(yield func(string) bool) {
|
||||
for _, v := range attr.Cache {
|
||||
if !yield("-D" + v[0] + "=" + v[1]) {
|
||||
@@ -185,9 +199,9 @@ cmake -G Ninja \
|
||||
}
|
||||
}
|
||||
}), " \\\n\t") + ` \
|
||||
-DCMAKE_INSTALL_PREFIX=/work/system \
|
||||
-DCMAKE_INSTALL_PREFIX=/system \
|
||||
'/usr/src/` + name + `/` + path.Join(attr.Append...) + `'
|
||||
cmake --build .
|
||||
cmake --install .
|
||||
cmake --build .` + jobs + `
|
||||
cmake --install . --prefix=/work/system
|
||||
` + attr.Script
|
||||
}
|
||||
|
||||
@@ -678,8 +678,8 @@ func init() {
|
||||
|
||||
func (t Toolchain) newLibiconv() (pkg.Artifact, string) {
|
||||
const (
|
||||
version = "1.18"
|
||||
checksum = "iV5q3VxP5VPdJ-X7O5OQI4fGm8VjeYb5viLd1L3eAHg26bbHb2_Qn63XPF3ucVZr"
|
||||
version = "1.19"
|
||||
checksum = "UibB6E23y4MksNqYmCCrA3zTFO6vJugD1DEDqqWYFZNuBsUWMVMcncb_5pPAr88x"
|
||||
)
|
||||
return t.NewPackage("libiconv", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/libiconv/libiconv-"+version+".tar.gz",
|
||||
@@ -741,6 +741,31 @@ func init() {
|
||||
}
|
||||
}
|
||||
|
||||
func (t Toolchain) newParallel() (pkg.Artifact, string) {
|
||||
const (
|
||||
version = "20260222"
|
||||
checksum = "4wxjMi3G2zMxr9hvLcIn6D7_12A3e5UNObeTPhzn7mDAYwsZApmmkxfGPyllQQ7E"
|
||||
)
|
||||
return t.NewPackage("parallel", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/parallel/parallel-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), nil, (*MakeHelper)(nil),
|
||||
Perl,
|
||||
), version
|
||||
}
|
||||
func init() {
|
||||
artifactsM[Parallel] = Metadata{
|
||||
f: Toolchain.newParallel,
|
||||
|
||||
Name: "parallel",
|
||||
Description: "a shell tool for executing jobs in parallel using one or more computers",
|
||||
Website: "https://www.gnu.org/software/parallel/",
|
||||
|
||||
ID: 5448,
|
||||
}
|
||||
}
|
||||
|
||||
func (t Toolchain) newBinutils() (pkg.Artifact, string) {
|
||||
const (
|
||||
version = "2.46.0"
|
||||
|
||||
@@ -2,7 +2,19 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newHakurei(suffix, script string) pkg.Artifact {
|
||||
func (t Toolchain) newHakurei(
|
||||
suffix, script string,
|
||||
withHostname bool,
|
||||
) pkg.Artifact {
|
||||
hostname := `
|
||||
echo '# Building test helper (hostname).'
|
||||
go build -v -o /bin/hostname /usr/src/hostname/main.go
|
||||
echo
|
||||
`
|
||||
if !withHostname {
|
||||
hostname = ""
|
||||
}
|
||||
|
||||
return t.New("hakurei"+suffix+"-"+hakureiVersion, 0, []pkg.Artifact{
|
||||
t.Load(Go),
|
||||
|
||||
@@ -29,17 +41,12 @@ func (t Toolchain) newHakurei(suffix, script string) pkg.Artifact {
|
||||
"CGO_ENABLED=1",
|
||||
"GOCACHE=/tmp/gocache",
|
||||
"CC=clang -O3 -Werror",
|
||||
}, `
|
||||
echo '# Building test helper (hostname).'
|
||||
go build -v -o /bin/hostname /usr/src/hostname/main.go
|
||||
echo
|
||||
|
||||
chmod -R +w /usr/src/hakurei
|
||||
}, hostname+`
|
||||
cd /usr/src/hakurei
|
||||
|
||||
HAKUREI_VERSION='v`+hakureiVersion+`'
|
||||
`+script, pkg.Path(AbsUsrSrc.Append("hakurei"), true, t.NewPatchedSource(
|
||||
"hakurei", hakureiVersion, hakureiSource, true, hakureiPatches...,
|
||||
"hakurei", hakureiVersion, hakureiSource, false, hakureiPatches...,
|
||||
)), pkg.Path(AbsUsrSrc.Append("hostname", "main.go"), false, pkg.NewFile(
|
||||
"hostname.go",
|
||||
[]byte(`
|
||||
@@ -69,10 +76,11 @@ go build -trimpath -v -o /work/system/libexec/hakurei -ldflags="-s -w
|
||||
-buildid=
|
||||
-linkmode external
|
||||
-extldflags=-static
|
||||
-X hakurei.app/internal/info.buildVersion="$HAKUREI_VERSION"
|
||||
-X hakurei.app/internal/info.buildVersion=${HAKUREI_VERSION}
|
||||
-X hakurei.app/internal/info.hakureiPath=/system/bin/hakurei
|
||||
-X hakurei.app/internal/info.hsuPath=/system/bin/hsu
|
||||
-X main.hakureiPath=/system/bin/hakurei" ./...
|
||||
-X main.hakureiPath=/system/bin/hakurei
|
||||
" ./...
|
||||
echo
|
||||
|
||||
echo '# Testing hakurei.'
|
||||
@@ -84,7 +92,7 @@ mkdir -p /work/system/bin/
|
||||
hakurei \
|
||||
sharefs \
|
||||
../../bin/)
|
||||
`), hakureiVersion
|
||||
`, true), hakureiVersion
|
||||
},
|
||||
|
||||
Name: "hakurei",
|
||||
@@ -98,7 +106,7 @@ mkdir -p /work/system/bin/
|
||||
return t.newHakurei("-dist", `
|
||||
export HAKUREI_VERSION
|
||||
DESTDIR=/work /usr/src/hakurei/dist/release.sh
|
||||
`), hakureiVersion
|
||||
`, true), hakureiVersion
|
||||
},
|
||||
|
||||
Name: "hakurei-dist",
|
||||
|
||||
@@ -4,48 +4,15 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
const hakureiVersion = "0.3.5"
|
||||
const hakureiVersion = "0.3.6"
|
||||
|
||||
// hakureiSource is the source code of a hakurei release.
|
||||
var hakureiSource = pkg.NewHTTPGetTar(
|
||||
nil, "https://git.gensokyo.uk/security/hakurei/archive/"+
|
||||
"v"+hakureiVersion+".tar.gz",
|
||||
mustDecode("6Tn38NLezRD2d3aGdFg5qFfqn8_KvC6HwMKwJMPvaHmVw8xRgxn8B0PObswl2mOk"),
|
||||
mustDecode("Yul9J2yV0x453lQP9KUnG_wEJo_DbKMNM7xHJGt4rITCSeX9VRK2J4kzAxcv_0-b"),
|
||||
pkg.TarGzip,
|
||||
)
|
||||
|
||||
// hakureiPatches are patches applied against a hakurei release.
|
||||
var hakureiPatches = [][2]string{
|
||||
{"createTemp-error-injection", `diff --git a/container/dispatcher_test.go b/container/dispatcher_test.go
|
||||
index 5de37fc..fe0c4db 100644
|
||||
--- a/container/dispatcher_test.go
|
||||
+++ b/container/dispatcher_test.go
|
||||
@@ -238,8 +238,11 @@ func sliceAddr[S any](s []S) *[]S { return &s }
|
||||
|
||||
func newCheckedFile(t *testing.T, name, wantData string, closeErr error) osFile {
|
||||
f := &checkedOsFile{t: t, name: name, want: wantData, closeErr: closeErr}
|
||||
- // check happens in Close, and cleanup is not guaranteed to run, so relying on it for sloppy implementations will cause sporadic test results
|
||||
- f.cleanup = runtime.AddCleanup(f, func(name string) { f.t.Fatalf("checkedOsFile %s became unreachable without a call to Close", name) }, f.name)
|
||||
+ // check happens in Close, and cleanup is not guaranteed to run, so relying
|
||||
+ // on it for sloppy implementations will cause sporadic test results
|
||||
+ f.cleanup = runtime.AddCleanup(f, func(name string) {
|
||||
+ panic("checkedOsFile " + name + " became unreachable without a call to Close")
|
||||
+ }, name)
|
||||
return f
|
||||
}
|
||||
|
||||
diff --git a/container/initplace_test.go b/container/initplace_test.go
|
||||
index afeddbe..1c2f20b 100644
|
||||
--- a/container/initplace_test.go
|
||||
+++ b/container/initplace_test.go
|
||||
@@ -21,7 +21,7 @@ func TestTmpfileOp(t *testing.T) {
|
||||
Path: samplePath,
|
||||
Data: sampleData,
|
||||
}, nil, nil, []stub.Call{
|
||||
- call("createTemp", stub.ExpectArgs{"/", "tmp.*"}, newCheckedFile(t, "tmp.32768", sampleDataString, nil), stub.UniqueError(5)),
|
||||
+ call("createTemp", stub.ExpectArgs{"/", "tmp.*"}, (*checkedOsFile)(nil), stub.UniqueError(5)),
|
||||
}, stub.UniqueError(5)},
|
||||
|
||||
{"Write", &Params{ParentPerm: 0700}, &TmpfileOp{
|
||||
`},
|
||||
}
|
||||
var hakureiPatches [][2]string
|
||||
|
||||
@@ -2,10 +2,32 @@ package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func init() {
|
||||
artifactsM[EarlyInit] = Metadata{
|
||||
Name: "earlyinit",
|
||||
Description: "Rosa OS initramfs init program",
|
||||
|
||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
||||
return t.newHakurei("-early-init", `
|
||||
mkdir -p /work/system/libexec/hakurei/
|
||||
|
||||
echo '# Building earlyinit.'
|
||||
go build -trimpath -v -o /work/system/libexec/hakurei -ldflags="-s -w
|
||||
-buildid=
|
||||
-linkmode external
|
||||
-extldflags=-static
|
||||
-X hakurei.app/internal/info.buildVersion=${HAKUREI_VERSION}
|
||||
" ./cmd/earlyinit
|
||||
echo
|
||||
`, false), Unversioned
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (t Toolchain) newImageInitramfs() (pkg.Artifact, string) {
|
||||
return t.New("initramfs", TNoToolchain, []pkg.Artifact{
|
||||
t.Load(Zstd),
|
||||
t.Load(Hakurei),
|
||||
t.Load(EarlyInit),
|
||||
t.Load(GenInitCPIO),
|
||||
}, nil, nil, `
|
||||
gen_init_cpio -t 4294967295 -c /usr/src/initramfs | zstd > /work/initramfs.zst
|
||||
|
||||
@@ -82,6 +82,11 @@ install -Dm0500 \
|
||||
echo "Installing linux $1..."
|
||||
cp -av "$2" "$4"
|
||||
cp -av "$3" "$4"
|
||||
`))),
|
||||
pkg.Path(AbsUsrSrc.Append(
|
||||
".depmod",
|
||||
), false, pkg.NewFile("depmod", []byte(`#!/bin/sh
|
||||
exec /system/sbin/depmod -m /lib/modules "$@"
|
||||
`))),
|
||||
},
|
||||
|
||||
@@ -1210,6 +1215,11 @@ cgit 1.2.3-korg
|
||||
"all",
|
||||
},
|
||||
Install: `
|
||||
# kernel is not aware of kmod moduledir
|
||||
install -Dm0500 \
|
||||
/usr/src/.depmod \
|
||||
/sbin/depmod
|
||||
|
||||
make \
|
||||
"-j$(nproc)" \
|
||||
-f /usr/src/kernel/Makefile \
|
||||
@@ -1217,9 +1227,10 @@ make \
|
||||
LLVM=1 \
|
||||
INSTALL_PATH=/work \
|
||||
install \
|
||||
INSTALL_MOD_PATH=/work \
|
||||
INSTALL_MOD_PATH=/work/system \
|
||||
DEPMOD=/sbin/depmod \
|
||||
modules_install
|
||||
rm -v /work/lib/modules/` + kernelVersion + `/build
|
||||
rm -v /work/system/lib/modules/` + kernelVersion + `/build
|
||||
`,
|
||||
},
|
||||
Flex,
|
||||
@@ -1272,3 +1283,55 @@ func init() {
|
||||
Description: "a program in the kernel source tree for creating initramfs archive",
|
||||
}
|
||||
}
|
||||
|
||||
func (t Toolchain) newFirmware() (pkg.Artifact, string) {
|
||||
const (
|
||||
version = "20260221"
|
||||
checksum = "vTENPW5rZ6yLVq7YKDLHkCVgKXvwUWigEx7T4LcxoKeBVYIyf1_sEExeV4mo-e46"
|
||||
)
|
||||
return t.NewPackage("firmware", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://gitlab.com/kernel-firmware/linux-firmware/-/"+
|
||||
"archive/"+version+"/linux-firmware-"+version+".tar.bz2",
|
||||
mustDecode(checksum),
|
||||
pkg.TarBzip2,
|
||||
), &PackageAttr{
|
||||
// dedup creates temporary file
|
||||
Writable: true,
|
||||
// does not use configure
|
||||
EnterSource: true,
|
||||
|
||||
Env: []string{
|
||||
"HOME=/proc/nonexistent",
|
||||
},
|
||||
}, &MakeHelper{
|
||||
OmitDefaults: true,
|
||||
SkipConfigure: true,
|
||||
InPlace: true,
|
||||
|
||||
Make: []string{
|
||||
"DESTDIR=/work/system",
|
||||
"install-zst",
|
||||
},
|
||||
SkipCheck: true, // requires pre-commit
|
||||
Install: `make "-j$(nproc)" DESTDIR=/work/system dedup`,
|
||||
},
|
||||
Perl,
|
||||
Parallel,
|
||||
Nettle,
|
||||
Rdfind,
|
||||
Zstd,
|
||||
Findutils,
|
||||
Coreutils,
|
||||
), version
|
||||
}
|
||||
func init() {
|
||||
artifactsM[Firmware] = Metadata{
|
||||
f: Toolchain.newFirmware,
|
||||
|
||||
Name: "firmware",
|
||||
Description: "firmware blobs for use with the Linux kernel",
|
||||
Website: "https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git/",
|
||||
|
||||
ID: 141464,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
#
|
||||
# Automatically generated file; DO NOT EDIT.
|
||||
# Linux/arm64 6.12.73 Kernel Configuration
|
||||
# Linux/arm64 6.12.76 Kernel Configuration
|
||||
#
|
||||
CONFIG_CC_VERSION_TEXT="clang version 21.1.8"
|
||||
CONFIG_CC_VERSION_TEXT="clang version 22.1.0"
|
||||
CONFIG_GCC_VERSION=0
|
||||
CONFIG_CC_IS_CLANG=y
|
||||
CONFIG_CLANG_VERSION=210108
|
||||
CONFIG_CLANG_VERSION=220100
|
||||
CONFIG_AS_IS_LLVM=y
|
||||
CONFIG_AS_VERSION=210108
|
||||
CONFIG_AS_VERSION=220100
|
||||
CONFIG_LD_VERSION=0
|
||||
CONFIG_LD_IS_LLD=y
|
||||
CONFIG_LLD_VERSION=210108
|
||||
CONFIG_LLD_VERSION=220100
|
||||
CONFIG_RUSTC_VERSION=0
|
||||
CONFIG_RUSTC_LLVM_VERSION=0
|
||||
CONFIG_CC_HAS_ASM_GOTO_OUTPUT=y
|
||||
@@ -4984,7 +4984,7 @@ CONFIG_SERIAL_TEGRA_TCU=m
|
||||
CONFIG_SERIAL_MAX3100=m
|
||||
CONFIG_SERIAL_MAX310X=m
|
||||
CONFIG_SERIAL_IMX=m
|
||||
CONFIG_SERIAL_IMX_CONSOLE=m
|
||||
# CONFIG_SERIAL_IMX_CONSOLE is not set
|
||||
CONFIG_SERIAL_IMX_EARLYCON=y
|
||||
CONFIG_SERIAL_UARTLITE=m
|
||||
CONFIG_SERIAL_UARTLITE_NR_UARTS=1
|
||||
@@ -5772,6 +5772,7 @@ CONFIG_GPIO_MADERA=m
|
||||
CONFIG_GPIO_MAX77650=m
|
||||
CONFIG_GPIO_PMIC_EIC_SPRD=m
|
||||
CONFIG_GPIO_SL28CPLD=m
|
||||
CONFIG_GPIO_TN48M_CPLD=m
|
||||
CONFIG_GPIO_TPS65086=m
|
||||
CONFIG_GPIO_TPS65218=m
|
||||
CONFIG_GPIO_TPS65219=m
|
||||
@@ -6471,6 +6472,7 @@ CONFIG_MFD_MAX5970=m
|
||||
# CONFIG_MFD_CS47L85 is not set
|
||||
# CONFIG_MFD_CS47L90 is not set
|
||||
# CONFIG_MFD_CS47L92 is not set
|
||||
CONFIG_MFD_TN48M_CPLD=m
|
||||
# CONFIG_MFD_DA9052_SPI is not set
|
||||
CONFIG_MFD_DA9062=m
|
||||
CONFIG_MFD_DA9063=m
|
||||
@@ -12532,6 +12534,7 @@ CONFIG_RESET_SUNXI=y
|
||||
CONFIG_RESET_TI_SCI=m
|
||||
CONFIG_RESET_TI_SYSCON=m
|
||||
CONFIG_RESET_TI_TPS380X=m
|
||||
CONFIG_RESET_TN48M_CPLD=m
|
||||
CONFIG_RESET_UNIPHIER=m
|
||||
CONFIG_RESET_UNIPHIER_GLUE=m
|
||||
CONFIG_RESET_ZYNQMP=y
|
||||
@@ -14022,7 +14025,6 @@ CONFIG_LOCK_DEBUGGING_SUPPORT=y
|
||||
|
||||
# CONFIG_DEBUG_IRQFLAGS is not set
|
||||
CONFIG_STACKTRACE=y
|
||||
# CONFIG_WARN_ALL_UNSEEDED_RANDOM is not set
|
||||
# CONFIG_DEBUG_KOBJECT is not set
|
||||
|
||||
#
|
||||
@@ -14057,7 +14059,7 @@ CONFIG_USER_STACKTRACE_SUPPORT=y
|
||||
CONFIG_NOP_TRACER=y
|
||||
CONFIG_HAVE_FUNCTION_TRACER=y
|
||||
CONFIG_HAVE_FUNCTION_GRAPH_TRACER=y
|
||||
CONFIG_HAVE_FUNCTION_GRAPH_RETVAL=y
|
||||
CONFIG_HAVE_FUNCTION_GRAPH_FREGS=y
|
||||
CONFIG_HAVE_DYNAMIC_FTRACE=y
|
||||
CONFIG_HAVE_DYNAMIC_FTRACE_WITH_ARGS=y
|
||||
CONFIG_HAVE_FTRACE_MCOUNT_RECORD=y
|
||||
|
||||
@@ -14,6 +14,7 @@ func (t Toolchain) newKmod() (pkg.Artifact, string) {
|
||||
pkg.TarGzip,
|
||||
), nil, &MesonHelper{
|
||||
Setup: [][2]string{
|
||||
{"Dmoduledir", "/system/lib/modules"},
|
||||
{"Dsysconfdir", "/system/etc"},
|
||||
{"Dbashcompletiondir", "no"},
|
||||
{"Dfishcompletiondir", "no"},
|
||||
|
||||
@@ -23,6 +23,7 @@ func (t Toolchain) newLibxslt() (pkg.Artifact, string) {
|
||||
SkipCheck: true,
|
||||
},
|
||||
XZ,
|
||||
Zlib,
|
||||
Python,
|
||||
PkgConfig,
|
||||
|
||||
|
||||
@@ -125,6 +125,8 @@ func (t Toolchain) newLLVMVariant(variant string, attr *llvmAttr) pkg.Artifact {
|
||||
|
||||
[2]string{"LLVM_INSTALL_BINUTILS_SYMLINKS", "ON"},
|
||||
[2]string{"LLVM_INSTALL_CCTOOLS_SYMLINKS", "ON"},
|
||||
|
||||
[2]string{"LLVM_LIT_ARGS", "'--verbose'"},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -187,6 +189,7 @@ ln -s ld.lld /work/system/bin/ld
|
||||
Append: cmakeAppend,
|
||||
Script: script + attr.script,
|
||||
},
|
||||
Zlib,
|
||||
Libffi,
|
||||
Python,
|
||||
Perl,
|
||||
|
||||
@@ -13,6 +13,7 @@ func (t Toolchain) newMeson() (pkg.Artifact, string) {
|
||||
checksum = "w895BXF_icncnXatT_OLCFe2PYEtg4KrKooMgUYdN-nQVvbFX3PvYWHGEpogsHtd"
|
||||
)
|
||||
return t.New("meson-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Zlib),
|
||||
t.Load(Python),
|
||||
t.Load(Setuptools),
|
||||
}, nil, nil, `
|
||||
@@ -66,6 +67,7 @@ func (*MesonHelper) name(name, version string) string {
|
||||
// extra returns hardcoded meson runtime dependencies.
|
||||
func (*MesonHelper) extra(int) []PArtifact {
|
||||
return []PArtifact{
|
||||
Zlib,
|
||||
Python,
|
||||
Meson,
|
||||
Ninja,
|
||||
|
||||
31
internal/rosa/nettle.go
Normal file
31
internal/rosa/nettle.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newNettle() (pkg.Artifact, string) {
|
||||
const (
|
||||
version = "4.0"
|
||||
checksum = "6agC-vHzzoqAlaX3K9tX8yHgrm03HLqPZzVzq8jh_ePbuPMIvpxereu_uRJFmQK7"
|
||||
)
|
||||
return t.NewPackage("nettle", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://ftpmirror.gnu.org/gnu/nettle/nettle-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), nil, (*MakeHelper)(nil),
|
||||
M4,
|
||||
Diffutils,
|
||||
|
||||
GMP,
|
||||
), version
|
||||
}
|
||||
func init() {
|
||||
artifactsM[Nettle] = Metadata{
|
||||
f: Toolchain.newNettle,
|
||||
|
||||
Name: "nettle",
|
||||
Description: "a low-level cryptographic library",
|
||||
Website: "https://www.lysator.liu.se/~nisse/nettle/",
|
||||
|
||||
ID: 2073,
|
||||
}
|
||||
}
|
||||
@@ -84,7 +84,7 @@ func init() {
|
||||
artifactsM[buildcatrust] = newViaPip(
|
||||
"buildcatrust",
|
||||
"transform certificate stores between formats",
|
||||
version, "none", "any",
|
||||
version, "py3", "none", "any",
|
||||
"k_FGzkRCLjbTWBkuBLzQJ1S8FPAz19neJZlMHm0t10F2Y0hElmvVwdSBRc03Rjo1",
|
||||
"https://github.com/nix-community/buildcatrust/"+
|
||||
"releases/download/v"+version+"/",
|
||||
@@ -93,6 +93,7 @@ func init() {
|
||||
|
||||
func (t Toolchain) newNSSCACert() (pkg.Artifact, string) {
|
||||
return t.New("nss-cacert", 0, []pkg.Artifact{
|
||||
t.Load(Zlib),
|
||||
t.Load(Bash),
|
||||
t.Load(Python),
|
||||
|
||||
@@ -75,10 +75,10 @@ func init() {
|
||||
|
||||
// newViaPip is a helper for installing python dependencies via pip.
|
||||
func newViaPip(
|
||||
name, description, version, abi, platform, checksum, prefix string,
|
||||
name, description, version, interpreter, abi, platform, checksum, prefix string,
|
||||
extra ...PArtifact,
|
||||
) Metadata {
|
||||
wname := name + "-" + version + "-py3-" + abi + "-" + platform + ".whl"
|
||||
wname := name + "-" + version + "-" + interpreter + "-" + abi + "-" + platform + ".whl"
|
||||
return Metadata{
|
||||
f: func(t Toolchain) (pkg.Artifact, string) {
|
||||
extraRes := make([]pkg.Artifact, len(extra))
|
||||
@@ -87,6 +87,7 @@ func newViaPip(
|
||||
}
|
||||
|
||||
return t.New(name+"-"+version, 0, slices.Concat([]pkg.Artifact{
|
||||
t.Load(Zlib),
|
||||
t.Load(Python),
|
||||
}, extraRes), nil, nil, `
|
||||
pip3 install \
|
||||
@@ -112,6 +113,7 @@ func (t Toolchain) newSetuptools() (pkg.Artifact, string) {
|
||||
checksum = "K9f8Yi7Gg95zjmQsE1LLw9UBb8NglI6EY6pQpdD6DM0Pmc_Td5w2qs1SMngTI6Jp"
|
||||
)
|
||||
return t.New("setuptools-"+version, 0, []pkg.Artifact{
|
||||
t.Load(Zlib),
|
||||
t.Load(Python),
|
||||
}, nil, nil, `
|
||||
pip3 install \
|
||||
@@ -142,7 +144,7 @@ func init() {
|
||||
artifactsM[PythonPygments] = newViaPip(
|
||||
"pygments",
|
||||
" a syntax highlighting package written in Python",
|
||||
"2.19.2", "none", "any",
|
||||
"2.19.2", "py3", "none", "any",
|
||||
"ak_lwTalmSr7W4Mjy2XBZPG9I6a0gwSy2pS87N8x4QEuZYif0ie9z0OcfRfi9msd",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/",
|
||||
@@ -151,7 +153,7 @@ func init() {
|
||||
artifactsM[PythonPluggy] = newViaPip(
|
||||
"pluggy",
|
||||
"the core framework used by the pytest, tox, and devpi projects",
|
||||
"1.6.0", "none", "any",
|
||||
"1.6.0", "py3", "none", "any",
|
||||
"2HWYBaEwM66-y1hSUcWI1MyE7dVVuNNRW24XD6iJBey4YaUdAK8WeXdtFMQGC-4J",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/",
|
||||
@@ -160,7 +162,7 @@ func init() {
|
||||
artifactsM[PythonPackaging] = newViaPip(
|
||||
"packaging",
|
||||
"reusable core utilities for various Python Packaging interoperability specifications",
|
||||
"26.0", "none", "any",
|
||||
"26.0", "py3", "none", "any",
|
||||
"iVVXcqdwHDskPKoCFUlh2x8J0Gyq-bhO4ns9DvUJ7oJjeOegRYtSIvLV33Bki-pP",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/",
|
||||
@@ -169,15 +171,16 @@ func init() {
|
||||
artifactsM[PythonIniConfig] = newViaPip(
|
||||
"iniconfig",
|
||||
"a small and simple INI-file parser module",
|
||||
"2.3.0", "none", "any",
|
||||
"2.3.0", "py3", "none", "any",
|
||||
"SDgs4S5bXi77aVOeKTPv2TUrS3M9rduiK4DpU0hCmDsSBWqnZcWInq9lsx6INxut",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/",
|
||||
)
|
||||
|
||||
artifactsM[PythonPyTest] = newViaPip(
|
||||
"pytest",
|
||||
"the pytest framework",
|
||||
"9.0.2", "none", "any",
|
||||
"9.0.2", "py3", "none", "any",
|
||||
"IM2wDbLke1EtZhF92zvAjUl_Hms1uKDtM7U8Dt4acOaChMnDg1pW7ib8U0wYGDLH",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/",
|
||||
@@ -186,4 +189,109 @@ func init() {
|
||||
PythonPluggy,
|
||||
PythonPygments,
|
||||
)
|
||||
|
||||
artifactsM[PythonCfgv] = newViaPip(
|
||||
"cfgv",
|
||||
"validate configuration and produce human readable error messages",
|
||||
"3.5.0", "py2.py3", "none", "any",
|
||||
"yFKTyVRlmnLKAxvvge15kAd_GOP1Xh3fZ0NFImO5pBdD5e0zj3GRmA6Q1HdtLTYO",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/",
|
||||
)
|
||||
|
||||
artifactsM[PythonIdentify] = newViaPip(
|
||||
"identify",
|
||||
"file identification library for Python",
|
||||
"2.6.17", "py2.py3", "none", "any",
|
||||
"9RxK3igO-Pxxof5AuCAGiF_L1SWi4SpuSF1fWNXCzE2D4oTRSob-9VpFMLlybrSv",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"40/66/71c1227dff78aaeb942fed29dd5651f2aec166cc7c9aeea3e8b26a539b7d/",
|
||||
)
|
||||
|
||||
artifactsM[PythonNodeenv] = newViaPip(
|
||||
"nodeenv",
|
||||
"a tool to create isolated node.js environments",
|
||||
"1.10.0", "py2.py3", "none", "any",
|
||||
"ihUb4-WQXYIhYOOKSsXlKIzjzQieOYl6ojro9H-0DFzGheaRTtuyZgsCmriq58sq",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/",
|
||||
)
|
||||
|
||||
artifactsM[PythonPyYAML] = newViaPip(
|
||||
"pyyaml",
|
||||
"a complete YAML 1.1 parser",
|
||||
"6.0.3", "cp314", "cp314", "musllinux_1_2_x86_64",
|
||||
"4_jhCFpUNtyrFp2HOMqUisR005u90MHId53eS7rkUbcGXkoaJ7JRsY21dREHEfGN",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/",
|
||||
)
|
||||
|
||||
artifactsM[PythonDistlib] = newViaPip(
|
||||
"distlib",
|
||||
"used as the basis for third-party packaging tools",
|
||||
"0.4.0", "py2.py3", "none", "any",
|
||||
"lGLLfYVhUhXOTw_84zULaH2K8n6pk1OOVXmJfGavev7N42msbtHoq-XY5D_xULI_",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/",
|
||||
)
|
||||
|
||||
artifactsM[PythonFilelock] = newViaPip(
|
||||
"filelock",
|
||||
"a platform-independent file locking library for Python",
|
||||
"3.25.0", "py3", "none", "any",
|
||||
"0gSQIYNUEjOs1JBxXjGwfLnwFPFINwqyU_Zqgj7fT_EGafv_HaD5h3Xv2Rq_qQ44",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/",
|
||||
)
|
||||
|
||||
artifactsM[PythonPlatformdirs] = newViaPip(
|
||||
"platformdirs",
|
||||
"a Python package for determining platform-specific directories",
|
||||
"4.9.4", "py3", "none", "any",
|
||||
"JGNpMCX2JMn-7c9bk3QzOSNDgJRR_5lH-jIqfy0zXMZppRCdLsTNbdp4V7QFwxOI",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/",
|
||||
)
|
||||
|
||||
artifactsM[PythonDiscovery] = newViaPip(
|
||||
"python_discovery",
|
||||
"looks for a python installation",
|
||||
"1.1.1", "py3", "none", "any",
|
||||
"Jk_qGMfZYm0fdNOSvMdVQZuQbJlqu3NWRm7T2fRtiBXmHLQyOdJE3ypI_it1OJR0",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"75/0f/2bf7e3b5a4a65f623cb820feb5793e243fad58ae561015ee15a6152f67a2/",
|
||||
PythonFilelock,
|
||||
PythonPlatformdirs,
|
||||
)
|
||||
|
||||
artifactsM[PythonVirtualenv] = newViaPip(
|
||||
"virtualenv",
|
||||
"a tool for creating isolated virtual python environments",
|
||||
"21.1.0", "py3", "none", "any",
|
||||
"SLvdr3gJZ7GTS-kiRyq2RvJdrQ8SZYC1pglbViWCMLCuAIcbLNjVEUJZ4hDtKUxm",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"78/55/896b06bf93a49bec0f4ae2a6f1ed12bd05c8860744ac3a70eda041064e4d/",
|
||||
PythonDistlib,
|
||||
PythonFilelock,
|
||||
PythonPlatformdirs,
|
||||
PythonDiscovery,
|
||||
)
|
||||
|
||||
artifactsM[PythonPreCommit] = newViaPip(
|
||||
"pre_commit",
|
||||
"a framework for managing and maintaining multi-language pre-commit hooks",
|
||||
"4.5.1", "py2.py3", "none", "any",
|
||||
"9G2Hv5JpvXFZVfw4pv_KAsmHD6bvot9Z0YBDmW6JeJizqTA4xEQCKel-pCERqQFK",
|
||||
"https://files.pythonhosted.org/packages/"+
|
||||
"5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/",
|
||||
PythonCfgv,
|
||||
PythonIdentify,
|
||||
PythonNodeenv,
|
||||
PythonPyYAML,
|
||||
PythonDistlib,
|
||||
PythonFilelock,
|
||||
PythonPlatformdirs,
|
||||
PythonDiscovery,
|
||||
PythonVirtualenv,
|
||||
)
|
||||
}
|
||||
|
||||
33
internal/rosa/rdfind.go
Normal file
33
internal/rosa/rdfind.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package rosa
|
||||
|
||||
import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newRdfind() (pkg.Artifact, string) {
|
||||
const (
|
||||
version = "1.8.0"
|
||||
checksum = "PoaeJ2WIG6yyfe5VAYZlOdAQiR3mb3WhAUMj2ziTCx_IIEal4640HMJUb4SzU9U3"
|
||||
)
|
||||
return t.NewPackage("rdfind", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://rdfind.pauldreik.se/rdfind-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), nil, &MakeHelper{
|
||||
// test suite hard codes /bin/echo
|
||||
ScriptCheckEarly: `
|
||||
ln -s ../system/bin/toybox /bin/echo
|
||||
`,
|
||||
},
|
||||
Nettle,
|
||||
), version
|
||||
}
|
||||
func init() {
|
||||
artifactsM[Rdfind] = Metadata{
|
||||
f: Toolchain.newRdfind,
|
||||
|
||||
Name: "rdfind",
|
||||
Description: "a program that finds duplicate files",
|
||||
Website: "https://rdfind.pauldreik.se/",
|
||||
|
||||
ID: 231641,
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,7 @@ func (t Toolchain) newStage0() (pkg.Artifact, string) {
|
||||
runtimes,
|
||||
clang,
|
||||
|
||||
t.Load(Zlib),
|
||||
t.Load(Bzip2),
|
||||
|
||||
t.Load(Patch),
|
||||
|
||||
@@ -44,5 +44,7 @@ func init() {
|
||||
Name: "tamago",
|
||||
Description: "a Go toolchain extended with support for bare metal execution",
|
||||
Website: "https://github.com/usbarmory/tamago-go",
|
||||
|
||||
ID: 388872,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,22 +4,28 @@ import "hakurei.app/internal/pkg"
|
||||
|
||||
func (t Toolchain) newZlib() (pkg.Artifact, string) {
|
||||
const (
|
||||
version = "1.3.1"
|
||||
checksum = "E-eIpNzE8oJ5DsqH4UuA_0GDKuQF5csqI8ooDx2w7Vx-woJ2mb-YtSbEyIMN44mH"
|
||||
version = "1.3.2"
|
||||
checksum = "KHZrePe42vL2XvOUE3KlJkp1UgWhWkl0jjT_BOvFhuM4GzieEH9S7CioepOFVGYB"
|
||||
)
|
||||
return t.NewPackage("zlib", version, pkg.NewHTTPGetTar(
|
||||
nil, "https://www.zlib.net/fossils/zlib-"+version+".tar.gz",
|
||||
mustDecode(checksum),
|
||||
pkg.TarGzip,
|
||||
), &PackageAttr{
|
||||
Env: []string{
|
||||
"CC=clang -fPIC",
|
||||
},
|
||||
}, &MakeHelper{
|
||||
OmitDefaults: true,
|
||||
), nil, &CMakeHelper{
|
||||
Cache: [][2]string{
|
||||
{"CMAKE_BUILD_TYPE", "Release"},
|
||||
|
||||
Host: `""`,
|
||||
Build: `""`,
|
||||
{"ZLIB_BUILD_TESTING", "OFF"},
|
||||
{"ZLIB_BUILD_SHARED", "ON"},
|
||||
{"ZLIB_BUILD_STATIC", "ON"},
|
||||
{"ZLIB_BUILD_MINIZIP", "OFF"},
|
||||
|
||||
{"ZLIB_INSTALL", "ON"},
|
||||
{"ZLIB_PREFIX", "OFF"},
|
||||
},
|
||||
|
||||
// ninja dependency loop
|
||||
Make: true,
|
||||
}), version
|
||||
}
|
||||
func init() {
|
||||
|
||||
@@ -16,7 +16,6 @@ func (t Toolchain) newZstd() (pkg.Artifact, string) {
|
||||
Append: []string{"build", "cmake"},
|
||||
Cache: [][2]string{
|
||||
{"CMAKE_BUILD_TYPE", "Release"},
|
||||
{"CMAKE_INSTALL_LIBDIR", "lib"},
|
||||
},
|
||||
}), version
|
||||
}
|
||||
|
||||
34
package.nix
34
package.nix
@@ -1,7 +1,7 @@
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
buildGoModule,
|
||||
buildGo126Module,
|
||||
makeBinaryWrapper,
|
||||
xdg-dbus-proxy,
|
||||
pkg-config,
|
||||
@@ -17,7 +17,7 @@
|
||||
fuse3,
|
||||
|
||||
# for passthru.buildInputs
|
||||
go,
|
||||
go_1_26,
|
||||
clang,
|
||||
|
||||
# for check
|
||||
@@ -28,7 +28,7 @@
|
||||
withStatic ? stdenv.hostPlatform.isStatic,
|
||||
}:
|
||||
|
||||
buildGoModule rec {
|
||||
buildGo126Module rec {
|
||||
pname = "hakurei";
|
||||
version = "0.3.6";
|
||||
|
||||
@@ -51,7 +51,7 @@ buildGoModule rec {
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
go
|
||||
go_1_26
|
||||
pkg-config
|
||||
wayland-scanner
|
||||
];
|
||||
@@ -125,16 +125,20 @@ buildGoModule rec {
|
||||
--inherit-argv0 --prefix PATH : ${lib.makeBinPath appPackages}
|
||||
'';
|
||||
|
||||
passthru.targetPkgs = [
|
||||
go
|
||||
clang
|
||||
xorg.xorgproto
|
||||
util-linux
|
||||
passthru = {
|
||||
go = go_1_26;
|
||||
|
||||
# for go generate
|
||||
wayland-protocols
|
||||
wayland-scanner
|
||||
]
|
||||
++ buildInputs
|
||||
++ nativeBuildInputs;
|
||||
targetPkgs = [
|
||||
go_1_26
|
||||
clang
|
||||
xorg.xorgproto
|
||||
util-linux
|
||||
|
||||
# for go generate
|
||||
wayland-protocols
|
||||
wayland-scanner
|
||||
]
|
||||
++ buildInputs
|
||||
++ nativeBuildInputs;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ testers.nixosTest {
|
||||
(writeShellScriptBin "hakurei-test" ''
|
||||
# Assert hst CGO_ENABLED=0: ${
|
||||
with pkgs;
|
||||
runCommand "hakurei-hst-cgo" { nativeBuildInputs = [ go ]; } ''
|
||||
runCommand "hakurei-hst-cgo" { nativeBuildInputs = [ self.packages.${system}.hakurei.go ]; } ''
|
||||
cp -r ${options.environment.hakurei.package.default.src} "$out"
|
||||
chmod -R +w "$out"
|
||||
cp ${writeText "hst_cgo_test.go" ''package hakurei_test;import("testing";"hakurei.app/hst");func TestTemplate(t *testing.T){hst.Template()}''} "$out/hst_cgo_test.go"
|
||||
|
||||
Reference in New Issue
Block a user