forked from rosa/hakurei
Compare commits
10 Commits
c43a0c41b6
...
cfdda6dd7c
| Author | SHA1 | Date | |
|---|---|---|---|
|
cfdda6dd7c
|
|||
|
33a0ca6722
|
|||
|
b039fd4203
|
|||
| c7e195fe64 | |||
| d5db9add98 | |||
| ab8abdc82b | |||
| 770fd46510 | |||
| 99f1c6aab4 | |||
| 9ee629d402 | |||
| f475dde8b9 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -28,6 +28,9 @@ go.work.sum
|
|||||||
# go generate
|
# go generate
|
||||||
/cmd/hakurei/LICENSE
|
/cmd/hakurei/LICENSE
|
||||||
/cmd/pkgserver/.sass-cache
|
/cmd/pkgserver/.sass-cache
|
||||||
|
/cmd/pkgserver/ui/static/*.js
|
||||||
|
/cmd/pkgserver/ui/static/*.css*
|
||||||
|
/cmd/pkgserver/ui/static/*.css.map
|
||||||
/internal/pkg/testdata/testtool
|
/internal/pkg/testdata/testtool
|
||||||
/internal/rosa/hakurei_current.tar.gz
|
/internal/rosa/hakurei_current.tar.gz
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,15 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"hakurei.app/internal/info"
|
"hakurei.app/internal/info"
|
||||||
"hakurei.app/internal/pkg"
|
|
||||||
"hakurei.app/internal/rosa"
|
"hakurei.app/internal/rosa"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -37,43 +34,41 @@ func handleInfo(w http.ResponseWriter, _ *http.Request) {
|
|||||||
writeAPIPayload(w, infoPayload)
|
writeAPIPayload(w, infoPayload)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// newStatusHandler returns a [http.HandlerFunc] that offers status files for
|
||||||
|
// viewing or download, if available.
|
||||||
func (index *packageIndex) newStatusHandler(disposition bool) http.HandlerFunc {
|
func (index *packageIndex) newStatusHandler(disposition bool) http.HandlerFunc {
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
name := path.Base(r.URL.Path)
|
m, ok := index.names[path.Base(r.URL.Path)]
|
||||||
p, ok := rosa.ResolveName(name)
|
if !ok || !m.HasReport {
|
||||||
if !ok {
|
|
||||||
http.NotFound(w, r)
|
http.NotFound(w, r)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
m := rosa.GetMetadata(p)
|
|
||||||
pk, ok := index.names[m.Name]
|
|
||||||
if !ok {
|
|
||||||
http.NotFound(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if len(pk.status) > 0 {
|
|
||||||
if disposition {
|
|
||||||
w.Header().Set("Content-Type", "application/octet-stream")
|
|
||||||
} else {
|
|
||||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
|
||||||
}
|
|
||||||
if disposition {
|
|
||||||
var version string
|
|
||||||
if pk.Version != "\u0000" {
|
|
||||||
version = pk.Version
|
|
||||||
} else {
|
|
||||||
version = "unknown"
|
|
||||||
}
|
|
||||||
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s-%s-%s.log\"", pk.Name, version, pkg.Encode(pk.ident.Value())))
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
contentType := "text/plain; charset=utf-8"
|
||||||
_, err := io.Copy(w, bytes.NewReader(pk.status))
|
if disposition {
|
||||||
if err != nil {
|
contentType = "application/octet-stream"
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
|
// quoting like this is unsound, but okay, because metadata is hardcoded
|
||||||
|
contentDisposition := `attachment; filename="`
|
||||||
|
contentDisposition += m.Name + "-"
|
||||||
|
if m.Version != "" {
|
||||||
|
contentDisposition += m.Version + "-"
|
||||||
}
|
}
|
||||||
} else {
|
contentDisposition += m.ids + `.log"`
|
||||||
http.NotFound(w, r)
|
w.Header().Set("Content-Disposition", contentDisposition)
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", contentType)
|
||||||
|
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||||
|
if err := func() (err error) {
|
||||||
|
defer index.handleAccess(&err)()
|
||||||
|
_, err = w.Write(m.status)
|
||||||
|
return
|
||||||
|
}(); err != nil {
|
||||||
|
log.Println(err)
|
||||||
|
http.Error(
|
||||||
|
w, "cannot deliver status, contact maintainers",
|
||||||
|
http.StatusInternalServerError,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -108,11 +103,47 @@ func (index *packageIndex) handleGet(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
values := index.sorts[sort][i:min(i+limit, len(index.sorts[sort]))]
|
values := index.sorts[sort][i:min(i+limit, len(index.sorts[sort]))]
|
||||||
// TODO(mae): remove count field
|
|
||||||
writeAPIPayload(w, &struct {
|
writeAPIPayload(w, &struct {
|
||||||
Count int `json:"count"`
|
|
||||||
Values []*metadata `json:"values"`
|
Values []*metadata `json:"values"`
|
||||||
}{len(values), values})
|
}{values})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (index *packageIndex) handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
|
q := r.URL.Query()
|
||||||
|
limit, err := strconv.Atoi(q.Get("limit"))
|
||||||
|
if err != nil || limit > 100 || limit < 1 {
|
||||||
|
http.Error(
|
||||||
|
w, "limit must be an integer between 1 and 100",
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
i, err := strconv.Atoi(q.Get("index"))
|
||||||
|
if err != nil || i >= len(index.sorts[0]) || i < 0 {
|
||||||
|
http.Error(
|
||||||
|
w, "index must be an integer between 0 and "+
|
||||||
|
strconv.Itoa(int(rosa.PresetUnexportedStart-1)),
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
search, err := url.PathUnescape(q.Get("search"))
|
||||||
|
if len(search) > 100 || err != nil {
|
||||||
|
http.Error(
|
||||||
|
w, "search must be a string between 0 and 100 characters long",
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
desc := q.Get("desc") == "true"
|
||||||
|
n, res, err := index.performSearchQuery(limit, i, search, desc)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
writeAPIPayload(w, &struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
Results []searchResult `json:"results"`
|
||||||
|
}{n, res})
|
||||||
}
|
}
|
||||||
|
|
||||||
// apiVersion is the name of the current API revision, as part of the pattern.
|
// apiVersion is the name of the current API revision, as part of the pattern.
|
||||||
@@ -122,6 +153,7 @@ const apiVersion = "v1"
|
|||||||
func (index *packageIndex) registerAPI(mux *http.ServeMux) {
|
func (index *packageIndex) registerAPI(mux *http.ServeMux) {
|
||||||
mux.HandleFunc("GET /api/"+apiVersion+"/info", handleInfo)
|
mux.HandleFunc("GET /api/"+apiVersion+"/info", handleInfo)
|
||||||
mux.HandleFunc("GET /api/"+apiVersion+"/get", index.handleGet)
|
mux.HandleFunc("GET /api/"+apiVersion+"/get", index.handleGet)
|
||||||
|
mux.HandleFunc("GET /api/"+apiVersion+"/search", index.handleSearch)
|
||||||
mux.HandleFunc("GET /api/"+apiVersion+"/status/", index.newStatusHandler(false))
|
mux.HandleFunc("GET /api/"+apiVersion+"/status/", index.newStatusHandler(false))
|
||||||
mux.HandleFunc("GET /status/", index.newStatusHandler(true))
|
mux.HandleFunc("GET /status/", index.newStatusHandler(true))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmp"
|
||||||
"errors"
|
"errors"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"unique"
|
|
||||||
|
|
||||||
"hakurei.app/internal/pkg"
|
"hakurei.app/internal/pkg"
|
||||||
"hakurei.app/internal/rosa"
|
"hakurei.app/internal/rosa"
|
||||||
@@ -15,14 +15,19 @@ const (
|
|||||||
declarationDescending
|
declarationDescending
|
||||||
nameAscending
|
nameAscending
|
||||||
nameDescending
|
nameDescending
|
||||||
|
sizeAscending
|
||||||
|
sizeDescending
|
||||||
|
|
||||||
sortOrderEnd = iota - 1
|
sortOrderEnd = iota - 1
|
||||||
)
|
)
|
||||||
|
|
||||||
// packageIndex refers to metadata by name and various sort orders.
|
// packageIndex refers to metadata by name and various sort orders.
|
||||||
type packageIndex struct {
|
type packageIndex struct {
|
||||||
sorts [sortOrderEnd + 1][rosa.PresetUnexportedStart]*metadata
|
sorts [sortOrderEnd + 1][rosa.PresetUnexportedStart]*metadata
|
||||||
names map[string]*metadata
|
names map[string]*metadata
|
||||||
|
search searchCache
|
||||||
|
// Taken from [rosa.Report] if available.
|
||||||
|
handleAccess func(*error) func()
|
||||||
}
|
}
|
||||||
|
|
||||||
// metadata holds [rosa.Metadata] extended with additional information.
|
// metadata holds [rosa.Metadata] extended with additional information.
|
||||||
@@ -33,11 +38,13 @@ type metadata struct {
|
|||||||
// Populated via [rosa.Toolchain.Version], [rosa.Unversioned] is equivalent
|
// Populated via [rosa.Toolchain.Version], [rosa.Unversioned] is equivalent
|
||||||
// to the zero value. Otherwise, the zero value is invalid.
|
// to the zero value. Otherwise, the zero value is invalid.
|
||||||
Version string `json:"version,omitempty"`
|
Version string `json:"version,omitempty"`
|
||||||
|
// Output data size, available if present in report.
|
||||||
|
Size int64 `json:"size,omitempty"`
|
||||||
// Whether the underlying [pkg.Artifact] is present in the report.
|
// Whether the underlying [pkg.Artifact] is present in the report.
|
||||||
HasReport bool `json:"report"`
|
HasReport bool `json:"report"`
|
||||||
|
|
||||||
// Ident resolved from underlying [pkg.Artifact].
|
// Ident string encoded ahead of time.
|
||||||
ident unique.Handle[pkg.ID]
|
ids string
|
||||||
// Backed by [rosa.Report], access must be prepared by HandleAccess.
|
// Backed by [rosa.Report], access must be prepared by HandleAccess.
|
||||||
status []byte
|
status []byte
|
||||||
}
|
}
|
||||||
@@ -46,6 +53,7 @@ type metadata struct {
|
|||||||
func (index *packageIndex) populate(cache *pkg.Cache, report *rosa.Report) (err error) {
|
func (index *packageIndex) populate(cache *pkg.Cache, report *rosa.Report) (err error) {
|
||||||
if report != nil {
|
if report != nil {
|
||||||
defer report.HandleAccess(&err)()
|
defer report.HandleAccess(&err)()
|
||||||
|
index.handleAccess = report.HandleAccess
|
||||||
}
|
}
|
||||||
|
|
||||||
var work [rosa.PresetUnexportedStart]*metadata
|
var work [rosa.PresetUnexportedStart]*metadata
|
||||||
@@ -65,12 +73,10 @@ func (index *packageIndex) populate(cache *pkg.Cache, report *rosa.Report) (err
|
|||||||
}
|
}
|
||||||
|
|
||||||
if cache != nil && report != nil {
|
if cache != nil && report != nil {
|
||||||
m.ident = cache.Ident(rosa.Std.Load(p))
|
id := cache.Ident(rosa.Std.Load(p))
|
||||||
status, n := report.ArtifactOf(m.ident)
|
m.ids = pkg.Encode(id.Value())
|
||||||
if n >= 0 {
|
m.status, m.Size = report.ArtifactOf(id)
|
||||||
m.HasReport = true
|
m.HasReport = m.Size >= 0
|
||||||
m.status = status
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
work[p] = &m
|
work[p] = &m
|
||||||
@@ -88,5 +94,12 @@ func (index *packageIndex) populate(cache *pkg.Cache, report *rosa.Report) (err
|
|||||||
index.sorts[nameDescending] = index.sorts[nameAscending]
|
index.sorts[nameDescending] = index.sorts[nameAscending]
|
||||||
slices.Reverse(index.sorts[nameDescending][:])
|
slices.Reverse(index.sorts[nameDescending][:])
|
||||||
|
|
||||||
|
index.sorts[sizeAscending] = work
|
||||||
|
slices.SortFunc(index.sorts[sizeAscending][:], func(a, b *metadata) int {
|
||||||
|
return cmp.Compare(a.Size, b.Size)
|
||||||
|
})
|
||||||
|
index.sorts[sizeDescending] = index.sorts[sizeAscending]
|
||||||
|
slices.Reverse(index.sorts[sizeDescending][:])
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -64,10 +64,22 @@ func main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var index packageIndex
|
var index packageIndex
|
||||||
|
index.search = make(searchCache)
|
||||||
if err := index.populate(cache, report); err != nil {
|
if err := index.populate(cache, report); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
ticker := time.NewTicker(1 * time.Minute)
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
ticker.Stop()
|
||||||
|
return
|
||||||
|
case <-ticker.C:
|
||||||
|
index.search.clean()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
var mux http.ServeMux
|
var mux http.ServeMux
|
||||||
uiRoutes(&mux)
|
uiRoutes(&mux)
|
||||||
index.registerAPI(&mux)
|
index.registerAPI(&mux)
|
||||||
|
|||||||
77
cmd/pkgserver/search.go
Normal file
77
cmd/pkgserver/search.go
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"cmp"
|
||||||
|
"maps"
|
||||||
|
"regexp"
|
||||||
|
"slices"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type searchCache map[string]searchCacheEntry
|
||||||
|
type searchResult struct {
|
||||||
|
NameIndices [][]int `json:"name_matches"`
|
||||||
|
DescIndices [][]int `json:"desc_matches,omitempty"`
|
||||||
|
Score float64 `json:"score"`
|
||||||
|
*metadata
|
||||||
|
}
|
||||||
|
type searchCacheEntry struct {
|
||||||
|
query string
|
||||||
|
results []searchResult
|
||||||
|
expiry time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func (index *packageIndex) performSearchQuery(limit int, i int, search string, desc bool) (int, []searchResult, error) {
|
||||||
|
entry, ok := index.search[search]
|
||||||
|
if ok {
|
||||||
|
return len(entry.results), entry.results[i:min(i+limit, len(entry.results))], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
regex, err := regexp.Compile(search)
|
||||||
|
if err != nil {
|
||||||
|
return 0, make([]searchResult, 0), err
|
||||||
|
}
|
||||||
|
res := make([]searchResult, 0)
|
||||||
|
for p := range maps.Values(index.names) {
|
||||||
|
nameIndices := regex.FindAllIndex([]byte(p.Name), -1)
|
||||||
|
var descIndices [][]int = nil
|
||||||
|
if desc {
|
||||||
|
descIndices = regex.FindAllIndex([]byte(p.Description), -1)
|
||||||
|
}
|
||||||
|
if nameIndices == nil && descIndices == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
score := float64(indexsum(nameIndices)) / (float64(len(nameIndices)) + 1)
|
||||||
|
if desc {
|
||||||
|
score += float64(indexsum(descIndices)) / (float64(len(descIndices)) + 1) / 10.0
|
||||||
|
}
|
||||||
|
res = append(res, searchResult{
|
||||||
|
NameIndices: nameIndices,
|
||||||
|
DescIndices: descIndices,
|
||||||
|
Score: score,
|
||||||
|
metadata: p,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
slices.SortFunc(res[:], func(a, b searchResult) int { return -cmp.Compare(a.Score, b.Score) })
|
||||||
|
expiry := time.Now().Add(1 * time.Minute)
|
||||||
|
entry = searchCacheEntry{
|
||||||
|
query: search,
|
||||||
|
results: res,
|
||||||
|
expiry: expiry,
|
||||||
|
}
|
||||||
|
index.search[search] = entry
|
||||||
|
|
||||||
|
return len(res), res[i:min(i+limit, len(entry.results))], nil
|
||||||
|
}
|
||||||
|
func (s *searchCache) clean() {
|
||||||
|
maps.DeleteFunc(*s, func(_ string, v searchCacheEntry) bool {
|
||||||
|
return v.expiry.Before(time.Now())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
func indexsum(in [][]int) int {
|
||||||
|
sum := 0
|
||||||
|
for i := 0; i < len(in); i++ {
|
||||||
|
sum += in[i][1] - in[i][0]
|
||||||
|
}
|
||||||
|
return sum
|
||||||
|
}
|
||||||
@@ -1,13 +1,6 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import "net/http"
|
||||||
"embed"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:generate sh -c "sass ui/static/dark.scss ui/static/dark.css && sass ui/static/light.scss ui/static/light.css && tsc -p ui/static"
|
|
||||||
//go:embed ui/*
|
|
||||||
var content embed.FS
|
|
||||||
|
|
||||||
func serveWebUI(w http.ResponseWriter, r *http.Request) {
|
func serveWebUI(w http.ResponseWriter, r *http.Request) {
|
||||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||||
@@ -28,21 +21,26 @@ func serveStaticContent(w http.ResponseWriter, r *http.Request) {
|
|||||||
} else {
|
} else {
|
||||||
http.ServeFileFS(w, r, content, "ui/static/light.css")
|
http.ServeFileFS(w, r, content, "ui/static/light.css")
|
||||||
}
|
}
|
||||||
break
|
|
||||||
case "/favicon.ico":
|
case "/favicon.ico":
|
||||||
http.ServeFileFS(w, r, content, "ui/static/favicon.ico")
|
http.ServeFileFS(w, r, content, "ui/static/favicon.ico")
|
||||||
break
|
|
||||||
case "/static/index.js":
|
case "/static/index.js":
|
||||||
http.ServeFileFS(w, r, content, "ui/static/index.js")
|
http.ServeFileFS(w, r, content, "ui/static/index.js")
|
||||||
break
|
case "/static/test.js":
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/test.js")
|
||||||
|
case "/static/test.css":
|
||||||
|
http.ServeFileFS(w, r, content, "ui/static/test.css")
|
||||||
default:
|
default:
|
||||||
http.NotFound(w, r)
|
http.NotFound(w, r)
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
func serveTester(w http.ResponseWriter, r *http.Request) {
|
||||||
|
http.ServeFileFS(w, r, content, "ui/test.html")
|
||||||
|
}
|
||||||
|
|
||||||
func uiRoutes(mux *http.ServeMux) {
|
func uiRoutes(mux *http.ServeMux) {
|
||||||
mux.HandleFunc("GET /{$}", serveWebUI)
|
mux.HandleFunc("GET /{$}", serveWebUI)
|
||||||
mux.HandleFunc("GET /favicon.ico", serveStaticContent)
|
mux.HandleFunc("GET /favicon.ico", serveStaticContent)
|
||||||
mux.HandleFunc("GET /static/", serveStaticContent)
|
mux.HandleFunc("GET /static/", serveStaticContent)
|
||||||
|
mux.HandleFunc("GET /test.html", serveTester)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,15 +16,17 @@
|
|||||||
<span class="bottom-nav"><a href="javascript:prevPage()">« Previous</a> <span id="page-number">1</span> <a href="javascript:nextPage()">Next »</a></span>
|
<span class="bottom-nav"><a href="javascript:prevPage()">« Previous</a> <span id="page-number">1</span> <a href="javascript:nextPage()">Next »</a></span>
|
||||||
<span><label for="count">Entries per page: </label><select name="count" id="count">
|
<span><label for="count">Entries per page: </label><select name="count" id="count">
|
||||||
<option value="10">10</option>
|
<option value="10">10</option>
|
||||||
<option value="25">25</option>
|
<option value="20">20</option>
|
||||||
|
<option value="30">30</option>
|
||||||
<option value="50">50</option>
|
<option value="50">50</option>
|
||||||
<option value="100">100</option>
|
|
||||||
</select></span>
|
</select></span>
|
||||||
<span><label for="sort">Sort by: </label><select name="sort" id="sort">
|
<span><label for="sort">Sort by: </label><select name="sort" id="sort">
|
||||||
<option value="0">Definition (ascending)</option>
|
<option value="0">Definition (ascending)</option>
|
||||||
<option value="1">Definition (descending)</option>
|
<option value="1">Definition (descending)</option>
|
||||||
<option value="2">Name (ascending)</option>
|
<option value="2">Name (ascending)</option>
|
||||||
<option value="3">Name (descending)</option>
|
<option value="3">Name (descending)</option>
|
||||||
|
<option value="4">Size (ascending)</option>
|
||||||
|
<option value="5">Size (descending)</option>
|
||||||
</select></span>
|
</select></span>
|
||||||
</body>
|
</body>
|
||||||
<footer>
|
<footer>
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
@use 'common';
|
|
||||||
html {
|
|
||||||
background-color: #2c2c2c;
|
|
||||||
color: ghostwhite; }
|
|
||||||
|
|
||||||
/*# sourceMappingURL=dark.css.map */
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"version": 3,
|
|
||||||
"mappings": "AAAA,aAAa;AAEb,IAAK;EACH,gBAAgB,EAAE,OAAO;EACzB,KAAK,EAAE,UAAU",
|
|
||||||
"sources": ["dark.scss"],
|
|
||||||
"names": [],
|
|
||||||
"file": "dark.css"
|
|
||||||
}
|
|
||||||
@@ -1,134 +0,0 @@
|
|||||||
class PackageIndexEntry {
|
|
||||||
name;
|
|
||||||
description;
|
|
||||||
website;
|
|
||||||
version;
|
|
||||||
report;
|
|
||||||
}
|
|
||||||
function toHTML(entry) {
|
|
||||||
let v = entry.version != null ? `<span>${escapeHtml(entry.version)}</span>` : "";
|
|
||||||
let d = entry.description != null ? `<p>${escapeHtml(entry.description)}</p>` : "";
|
|
||||||
let w = entry.website != null ? `<a href="${encodeURI(entry.website)}">Website</a>` : "";
|
|
||||||
let r = entry.report ? `Log (<a href=\"${encodeURI('/api/v1/status/' + entry.name)}\">View</a> | <a href=\"${encodeURI('/status/' + entry.name)}\">Download</a>)` : "";
|
|
||||||
let row = (document.createElement('tr'));
|
|
||||||
row.innerHTML = `<td>
|
|
||||||
<h2>${escapeHtml(entry.name)} ${v}</h2>
|
|
||||||
${d}
|
|
||||||
${w}
|
|
||||||
${r}
|
|
||||||
</td>`;
|
|
||||||
return row;
|
|
||||||
}
|
|
||||||
const API_VERSION = 1;
|
|
||||||
const ENDPOINT = `/api/v${API_VERSION}`;
|
|
||||||
class InfoPayload {
|
|
||||||
count;
|
|
||||||
hakurei_version;
|
|
||||||
}
|
|
||||||
async function infoRequest() {
|
|
||||||
const res = await fetch(`${ENDPOINT}/info`);
|
|
||||||
const payload = await res.json();
|
|
||||||
return payload;
|
|
||||||
}
|
|
||||||
class GetPayload {
|
|
||||||
count;
|
|
||||||
values;
|
|
||||||
}
|
|
||||||
var SortOrders;
|
|
||||||
(function (SortOrders) {
|
|
||||||
SortOrders[SortOrders["DeclarationAscending"] = 0] = "DeclarationAscending";
|
|
||||||
SortOrders[SortOrders["DeclarationDescending"] = 1] = "DeclarationDescending";
|
|
||||||
SortOrders[SortOrders["NameAscending"] = 2] = "NameAscending";
|
|
||||||
SortOrders[SortOrders["NameDescending"] = 3] = "NameDescending";
|
|
||||||
})(SortOrders || (SortOrders = {}));
|
|
||||||
async function getRequest(limit, index, sort) {
|
|
||||||
const res = await fetch(`${ENDPOINT}/get?limit=${limit}&index=${index}&sort=${sort.valueOf()}`);
|
|
||||||
const payload = await res.json();
|
|
||||||
return payload;
|
|
||||||
}
|
|
||||||
class State {
|
|
||||||
entriesPerPage = 10;
|
|
||||||
entryIndex = 0;
|
|
||||||
maxEntries = 0;
|
|
||||||
sort = SortOrders.DeclarationAscending;
|
|
||||||
getEntriesPerPage() {
|
|
||||||
return this.entriesPerPage;
|
|
||||||
}
|
|
||||||
setEntriesPerPage(entriesPerPage) {
|
|
||||||
this.entriesPerPage = entriesPerPage;
|
|
||||||
this.setEntryIndex(Math.floor(this.getEntryIndex() / entriesPerPage) * entriesPerPage);
|
|
||||||
}
|
|
||||||
getEntryIndex() {
|
|
||||||
return this.entryIndex;
|
|
||||||
}
|
|
||||||
setEntryIndex(entryIndex) {
|
|
||||||
this.entryIndex = entryIndex;
|
|
||||||
this.updatePage();
|
|
||||||
this.updateRange();
|
|
||||||
this.updateListings();
|
|
||||||
}
|
|
||||||
getMaxEntries() {
|
|
||||||
return this.maxEntries;
|
|
||||||
}
|
|
||||||
setMaxEntries(max) {
|
|
||||||
this.maxEntries = max;
|
|
||||||
}
|
|
||||||
getSortOrder() {
|
|
||||||
return this.sort;
|
|
||||||
}
|
|
||||||
setSortOrder(sortOrder) {
|
|
||||||
this.sort = sortOrder;
|
|
||||||
this.setEntryIndex(0);
|
|
||||||
}
|
|
||||||
updatePage() {
|
|
||||||
let page = Math.ceil(((this.getEntryIndex() + this.getEntriesPerPage()) - 1) / this.getEntriesPerPage());
|
|
||||||
document.getElementById("page-number").innerText = String(page);
|
|
||||||
}
|
|
||||||
updateRange() {
|
|
||||||
let max = Math.min(this.getEntryIndex() + this.getEntriesPerPage(), this.getMaxEntries());
|
|
||||||
document.getElementById("entry-counter").innerText = `${this.getEntryIndex() + 1}-${max} of ${this.getMaxEntries()}`;
|
|
||||||
}
|
|
||||||
updateListings() {
|
|
||||||
getRequest(this.getEntriesPerPage(), this.getEntryIndex(), this.getSortOrder())
|
|
||||||
.then(res => {
|
|
||||||
let table = document.getElementById("pkg-list");
|
|
||||||
table.innerHTML = '';
|
|
||||||
for (let i = 0; i < res.count; i++) {
|
|
||||||
table.appendChild(toHTML(res.values[i]));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let STATE;
|
|
||||||
function prevPage() {
|
|
||||||
let index = STATE.getEntryIndex();
|
|
||||||
STATE.setEntryIndex(Math.max(0, index - STATE.getEntriesPerPage()));
|
|
||||||
}
|
|
||||||
function nextPage() {
|
|
||||||
let index = STATE.getEntryIndex();
|
|
||||||
STATE.setEntryIndex(Math.min((Math.ceil(STATE.getMaxEntries() / STATE.getEntriesPerPage()) * STATE.getEntriesPerPage()) - STATE.getEntriesPerPage(), index + STATE.getEntriesPerPage()));
|
|
||||||
}
|
|
||||||
function escapeHtml(str) {
|
|
||||||
return str
|
|
||||||
.replace(/&/g, '&')
|
|
||||||
.replace(/</g, '<')
|
|
||||||
.replace(/>/g, '>')
|
|
||||||
.replace(/"/g, '"')
|
|
||||||
.replace(/'/g, ''');
|
|
||||||
}
|
|
||||||
document.addEventListener("DOMContentLoaded", () => {
|
|
||||||
STATE = new State();
|
|
||||||
infoRequest()
|
|
||||||
.then(res => {
|
|
||||||
STATE.setMaxEntries(res.count);
|
|
||||||
document.getElementById("hakurei-version").innerText = res.hakurei_version;
|
|
||||||
STATE.updateRange();
|
|
||||||
STATE.updateListings();
|
|
||||||
});
|
|
||||||
document.getElementById("count").addEventListener("change", (event) => {
|
|
||||||
STATE.setEntriesPerPage(parseInt(event.target.value));
|
|
||||||
});
|
|
||||||
document.getElementById("sort").addEventListener("change", (event) => {
|
|
||||||
STATE.setSortOrder(parseInt(event.target.value));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
class PackageIndexEntry {
|
class PackageIndexEntry {
|
||||||
name: string
|
name: string
|
||||||
|
size: number | null
|
||||||
description: string | null
|
description: string | null
|
||||||
website: string | null
|
website: string | null
|
||||||
version: string | null
|
version: string | null
|
||||||
@@ -7,6 +8,7 @@ class PackageIndexEntry {
|
|||||||
}
|
}
|
||||||
function toHTML(entry: PackageIndexEntry): HTMLTableRowElement {
|
function toHTML(entry: PackageIndexEntry): HTMLTableRowElement {
|
||||||
let v = entry.version != null ? `<span>${escapeHtml(entry.version)}</span>` : ""
|
let v = entry.version != null ? `<span>${escapeHtml(entry.version)}</span>` : ""
|
||||||
|
let s = entry.size != null ? `<p>Size: ${toByteSizeString(entry.size)} (${entry.size})</p>` : ""
|
||||||
let d = entry.description != null ? `<p>${escapeHtml(entry.description)}</p>` : ""
|
let d = entry.description != null ? `<p>${escapeHtml(entry.description)}</p>` : ""
|
||||||
let w = entry.website != null ? `<a href="${encodeURI(entry.website)}">Website</a>` : ""
|
let w = entry.website != null ? `<a href="${encodeURI(entry.website)}">Website</a>` : ""
|
||||||
let r = entry.report ? `Log (<a href=\"${encodeURI('/api/v1/status/' + entry.name)}\">View</a> | <a href=\"${encodeURI('/status/' + entry.name)}\">Download</a>)` : ""
|
let r = entry.report ? `Log (<a href=\"${encodeURI('/api/v1/status/' + entry.name)}\">View</a> | <a href=\"${encodeURI('/status/' + entry.name)}\">Download</a>)` : ""
|
||||||
@@ -14,12 +16,22 @@ function toHTML(entry: PackageIndexEntry): HTMLTableRowElement {
|
|||||||
row.innerHTML = `<td>
|
row.innerHTML = `<td>
|
||||||
<h2>${escapeHtml(entry.name)} ${v}</h2>
|
<h2>${escapeHtml(entry.name)} ${v}</h2>
|
||||||
${d}
|
${d}
|
||||||
|
${s}
|
||||||
${w}
|
${w}
|
||||||
${r}
|
${r}
|
||||||
</td>`
|
</td>`
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function toByteSizeString(bytes: number): string {
|
||||||
|
if(bytes == null || bytes < 1024) return `${bytes}B`
|
||||||
|
if(bytes < Math.pow(1024, 2)) return `${(bytes/1024).toFixed(2)}kiB`
|
||||||
|
if(bytes < Math.pow(1024, 3)) return `${(bytes/Math.pow(1024, 2)).toFixed(2)}MiB`
|
||||||
|
if(bytes < Math.pow(1024, 4)) return `${(bytes/Math.pow(1024, 3)).toFixed(2)}GiB`
|
||||||
|
if(bytes < Math.pow(1024, 5)) return `${(bytes/Math.pow(1024, 4)).toFixed(2)}TiB`
|
||||||
|
return "not only is it big, it's large"
|
||||||
|
}
|
||||||
|
|
||||||
const API_VERSION = 1
|
const API_VERSION = 1
|
||||||
const ENDPOINT = `/api/v${API_VERSION}`
|
const ENDPOINT = `/api/v${API_VERSION}`
|
||||||
class InfoPayload {
|
class InfoPayload {
|
||||||
@@ -33,12 +45,11 @@ async function infoRequest(): Promise<InfoPayload> {
|
|||||||
return payload as InfoPayload
|
return payload as InfoPayload
|
||||||
}
|
}
|
||||||
class GetPayload {
|
class GetPayload {
|
||||||
count: number
|
|
||||||
values: PackageIndexEntry[]
|
values: PackageIndexEntry[]
|
||||||
}
|
}
|
||||||
|
|
||||||
enum SortOrders {
|
enum SortOrders {
|
||||||
DeclarationAscending = 0,
|
DeclarationAscending,
|
||||||
DeclarationDescending,
|
DeclarationDescending,
|
||||||
NameAscending,
|
NameAscending,
|
||||||
NameDescending
|
NameDescending
|
||||||
@@ -96,9 +107,9 @@ class State {
|
|||||||
.then(res => {
|
.then(res => {
|
||||||
let table = document.getElementById("pkg-list")
|
let table = document.getElementById("pkg-list")
|
||||||
table.innerHTML = ''
|
table.innerHTML = ''
|
||||||
for(let i = 0; i < res.count; i++) {
|
res.values.forEach((row) => {
|
||||||
table.appendChild(toHTML(res.values[i]))
|
table.appendChild(toHTML(row))
|
||||||
}
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -116,6 +127,7 @@ function nextPage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function escapeHtml(str: string): string {
|
function escapeHtml(str: string): string {
|
||||||
|
if(str === undefined) return ""
|
||||||
return str
|
return str
|
||||||
.replace(/&/g, '&')
|
.replace(/&/g, '&')
|
||||||
.replace(/</g, '<')
|
.replace(/</g, '<')
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
@use 'common';
|
|
||||||
html {
|
|
||||||
background-color: #d3d3d3;
|
|
||||||
color: black; }
|
|
||||||
|
|
||||||
/*# sourceMappingURL=light.css.map */
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"version": 3,
|
|
||||||
"mappings": "AAAA,aAAa;AAEb,IAAK;EACH,gBAAgB,EAAE,OAAO;EACzB,KAAK,EAAE,KAAK",
|
|
||||||
"sources": ["light.scss"],
|
|
||||||
"names": [],
|
|
||||||
"file": "light.css"
|
|
||||||
}
|
|
||||||
24
cmd/pkgserver/ui/static/test.scss
Normal file
24
cmd/pkgserver/ui/static/test.scss
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
.root {
|
||||||
|
margin: 1rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
details.test-node {
|
||||||
|
margin-left: 1rem;
|
||||||
|
padding: 0.2rem 0.5rem;
|
||||||
|
border-left: 2px dashed black;
|
||||||
|
> summary {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
&.failure > summary::marker {
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p.test-desc {
|
||||||
|
margin: 0 0 0 1rem;
|
||||||
|
padding: 2px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.italic {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
131
cmd/pkgserver/ui/static/test.ts
Normal file
131
cmd/pkgserver/ui/static/test.ts
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
export interface TestResult {
|
||||||
|
success: boolean;
|
||||||
|
output: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Reporting
|
||||||
|
|
||||||
|
export interface Reporter {
|
||||||
|
update(path: string[], result: TestResult);
|
||||||
|
finalize();
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Stream {
|
||||||
|
writeln(s: string);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class StreamReporter implements Reporter {
|
||||||
|
stream: Stream;
|
||||||
|
verbose: boolean;
|
||||||
|
failures: ({ path: string[] } & TestResult)[];
|
||||||
|
counts: { successes: number, failures: number };
|
||||||
|
|
||||||
|
constructor(stream: Stream, verbose: boolean = false) {
|
||||||
|
this.stream = stream;
|
||||||
|
this.verbose = verbose;
|
||||||
|
this.failures = [];
|
||||||
|
this.counts = { successes: 0, failures: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
update(path: string[], result: TestResult) {
|
||||||
|
const pathStr = path.join(' ❯ ');
|
||||||
|
if (result.success) {
|
||||||
|
this.counts.successes++;
|
||||||
|
if (this.verbose) this.stream.writeln(`✅️ ${pathStr}`);
|
||||||
|
} else {
|
||||||
|
this.counts.failures++;
|
||||||
|
this.stream.writeln(`⚠️ ${pathStr}`);
|
||||||
|
this.failures.push({ path, ...result });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
finalize() {
|
||||||
|
// Transform [{ path: ['a', 'b', 'c'] }, { path: ['a', 'b', 'd'] }]
|
||||||
|
// into { 'a ❯ b': ['c', 'd'] }.
|
||||||
|
let pathMap = new Map<string, ({ name: string } & TestResult)[]>();
|
||||||
|
for (const f of this.failures) {
|
||||||
|
const key = f.path.slice(0, -1).join(' ❯ ');
|
||||||
|
if (!pathMap.has(key)) pathMap.set(key, []);
|
||||||
|
pathMap.get(key).push({ name: f.path.at(-1), ...f });
|
||||||
|
}
|
||||||
|
|
||||||
|
this.stream.writeln('');
|
||||||
|
this.stream.writeln('FAILURES');
|
||||||
|
this.stream.writeln('========');
|
||||||
|
|
||||||
|
for (const [path, tests] of pathMap) {
|
||||||
|
if (tests.length === 1) {
|
||||||
|
const t = tests[0];
|
||||||
|
const pathStr = path ? `${path} ❯ ` : '';
|
||||||
|
const output = t.output ? `: ${t.output}` : '';
|
||||||
|
this.stream.writeln(`${pathStr}${t.name}${output}`);
|
||||||
|
} else {
|
||||||
|
this.stream.writeln(path);
|
||||||
|
for (const t of tests) {
|
||||||
|
const output = t.output ? `: ${t.output}` : '';
|
||||||
|
this.stream.writeln(` - ${t.name}${output}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.stream.writeln('');
|
||||||
|
const { successes, failures } = this.counts;
|
||||||
|
this.stream.writeln(`${successes} succeeded, ${failures} failed`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DOMReporter implements Reporter {
|
||||||
|
update(path: string[], result: TestResult) {
|
||||||
|
const counter = document.getElementById(result.success ? 'successes' : 'failures');
|
||||||
|
counter.innerText = (Number(counter.innerText) + 1).toString();
|
||||||
|
let parent = document.getElementById('root');
|
||||||
|
for (const node of path) {
|
||||||
|
let child = null;
|
||||||
|
outer: for (const d of parent.children) {
|
||||||
|
for (const s of d.children) {
|
||||||
|
if (!(s instanceof HTMLElement)) continue;
|
||||||
|
if (s.tagName !== 'SUMMARY' || s.innerText !== node) continue;
|
||||||
|
child = d;
|
||||||
|
break outer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (child === null) {
|
||||||
|
child = document.createElement('details');
|
||||||
|
child.className = 'test-node';
|
||||||
|
const summary = document.createElement('summary');
|
||||||
|
summary.appendChild(document.createTextNode(node));
|
||||||
|
child.appendChild(summary);
|
||||||
|
parent.appendChild(child);
|
||||||
|
}
|
||||||
|
if (!result.success) {
|
||||||
|
child.open = true;
|
||||||
|
child.classList.add('failure');
|
||||||
|
}
|
||||||
|
parent = child;
|
||||||
|
}
|
||||||
|
const p = document.createElement('p');
|
||||||
|
p.classList.add('test-desc');
|
||||||
|
if (result.output) {
|
||||||
|
const code = document.createElement('code');
|
||||||
|
code.appendChild(document.createTextNode(result.output));
|
||||||
|
p.appendChild(code);
|
||||||
|
} else {
|
||||||
|
p.classList.add('italic');
|
||||||
|
p.appendChild(document.createTextNode('No output.'));
|
||||||
|
}
|
||||||
|
parent.appendChild(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
finalize() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let r = typeof document !== 'undefined' ? new DOMReporter() : new StreamReporter({ writeln: console.log });
|
||||||
|
r.update(['alien', 'can walk'], { success: false, output: 'assertion failed' });
|
||||||
|
r.update(['alien', 'can speak'], { success: false, output: 'Uncaught ReferenceError: larynx is not defined' });
|
||||||
|
r.update(['alien', 'sleep'], { success: true, output: '' });
|
||||||
|
r.update(['Tetromino', 'generate', 'tessellates'], { success: false, output: 'assertion failed: 1 != 2' });
|
||||||
|
r.update(['Tetromino', 'solve', 'works'], { success: true, output: '' });
|
||||||
|
r.update(['discombobulate'], { success: false, output: 'hippopotamonstrosesquippedaliophobia' });
|
||||||
|
r.update(['recombobulate'], { success: true, output: '' });
|
||||||
|
r.finalize();
|
||||||
24
cmd/pkgserver/ui/test.html
Normal file
24
cmd/pkgserver/ui/test.html
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<link rel="stylesheet" href="static/style.css">
|
||||||
|
<link rel="stylesheet" href="static/test.css">
|
||||||
|
<title>PkgServer Tests</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>PkgServer Tests</h1>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div id="counters">
|
||||||
|
<span id="successes">0</span> successes, <span id="failures">0</span> failures.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="root">
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script type="module" src="./static/test.js"></script>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
9
cmd/pkgserver/ui_full.go
Normal file
9
cmd/pkgserver/ui_full.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
//go:build frontend
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
//go:generate sh -c "sass ui/static/dark.scss ui/static/dark.css && sass ui/static/light.scss ui/static/light.css && sass ui/static/test.scss ui/static/test.css && tsc -p ui/static"
|
||||||
|
//go:embed ui/*
|
||||||
|
var content embed.FS
|
||||||
7
cmd/pkgserver/ui_stub.go
Normal file
7
cmd/pkgserver/ui_stub.go
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
//go:build !frontend
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import "testing/fstest"
|
||||||
|
|
||||||
|
var content fstest.MapFS
|
||||||
Reference in New Issue
Block a user