modularise backend a bit
This commit is contained in:
parent
ed18f87251
commit
7ae7166ac1
156
backend.go
156
backend.go
|
@ -1,154 +1,78 @@
|
||||||
package debanator
|
package debanator
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/md5"
|
|
||||||
"crypto/sha1"
|
|
||||||
"crypto/sha256"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"strconv"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"pault.ag/go/debian/control"
|
|
||||||
"pault.ag/go/debian/deb"
|
|
||||||
"pault.ag/go/debian/dependency"
|
|
||||||
"pault.ag/go/debian/version"
|
|
||||||
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// A backend to search for packages in
|
// A backend to search for packages in
|
||||||
type Backend interface {
|
type Backend interface {
|
||||||
GetPackages()
|
GetFiles() ([]DebFile, error)
|
||||||
|
ServeFiles(string) http.Handler
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ReaderAtCloser interface {
|
||||||
|
io.ReaderAt
|
||||||
|
io.ReadCloser
|
||||||
|
}
|
||||||
|
|
||||||
|
// An abstract interface for reading a debfile. This could be coming from the local fs,
|
||||||
|
// a remote webdav share, etc...
|
||||||
|
type DebFile interface {
|
||||||
|
GetReader() (ReaderAtCloser, error)
|
||||||
|
GetName() string
|
||||||
|
}
|
||||||
|
|
||||||
type FileBackend struct {
|
type FileBackend struct {
|
||||||
path string
|
path string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// A deb file existing on the local filesystem
|
||||||
|
type fsDebFile struct {
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f fsDebFile) GetReader() (ReaderAtCloser, error) {
|
||||||
|
return os.Open(f.path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f fsDebFile) GetName() string {
|
||||||
|
_, name := filepath.Split(f.path)
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
func NewFileBackend(path string) FileBackend {
|
func NewFileBackend(path string) FileBackend {
|
||||||
return FileBackend{path}
|
return FileBackend{path}
|
||||||
}
|
}
|
||||||
|
|
||||||
func BinaryIndexFromDeb(p string, basePath string) (*control.BinaryIndex, error) {
|
func (fb FileBackend) ServeFiles(prefix string) http.Handler {
|
||||||
f, err := os.Open(p)
|
return http.StripPrefix(path.Join(prefix, "pool"), http.FileServer(http.Dir(fb.path)))
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("open file: %w", err)
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
debFile, err := deb.Load(f, p)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("read deb: %w", err)
|
|
||||||
}
|
|
||||||
md5sum := md5.New()
|
|
||||||
sha1sum := sha1.New()
|
|
||||||
sha256sum := sha256.New()
|
|
||||||
hashWriter := io.MultiWriter(md5sum, sha1sum, sha256sum)
|
|
||||||
size, err := io.Copy(hashWriter, f)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("hash file: %w", err)
|
|
||||||
}
|
|
||||||
bi := control.BinaryIndex{
|
|
||||||
Paragraph: control.Paragraph{
|
|
||||||
Values: make(map[string]string),
|
|
||||||
},
|
|
||||||
Package: debFile.Control.Package,
|
|
||||||
Source: debFile.Control.Source,
|
|
||||||
Version: debFile.Control.Version,
|
|
||||||
InstalledSize: fmt.Sprintf("%d", debFile.Control.InstalledSize),
|
|
||||||
Size: strconv.Itoa(int(size)),
|
|
||||||
Maintainer: debFile.Control.Maintainer,
|
|
||||||
Architecture: debFile.Control.Architecture,
|
|
||||||
MultiArch: debFile.Control.MultiArch,
|
|
||||||
Description: debFile.Control.Description,
|
|
||||||
Homepage: debFile.Control.Homepage,
|
|
||||||
Section: debFile.Control.Section,
|
|
||||||
// FIXME: gross, make this more centrally managed somehow
|
|
||||||
Filename: path.Join("pool/main", strings.TrimPrefix(p, basePath)),
|
|
||||||
Priority: debFile.Control.Priority,
|
|
||||||
MD5sum: fmt.Sprintf("%x", md5sum.Sum(nil)),
|
|
||||||
SHA1: fmt.Sprintf("%x", sha1sum.Sum(nil)),
|
|
||||||
SHA256: fmt.Sprintf("%x", sha256sum.Sum(nil)),
|
|
||||||
}
|
|
||||||
if debFile.Control.Depends.String() != "" {
|
|
||||||
bi.Paragraph.Set("Depends", debFile.Control.Depends.String())
|
|
||||||
}
|
|
||||||
if debFile.Control.Recommends.String() != "" {
|
|
||||||
bi.Paragraph.Set("Recommends", debFile.Control.Recommends.String())
|
|
||||||
}
|
|
||||||
if debFile.Control.Suggests.String() != "" {
|
|
||||||
bi.Paragraph.Set("Suggests", debFile.Control.Suggests.String())
|
|
||||||
}
|
|
||||||
if debFile.Control.Breaks.String() != "" {
|
|
||||||
bi.Paragraph.Set("Breaks", debFile.Control.Breaks.String())
|
|
||||||
}
|
|
||||||
if debFile.Control.Replaces.String() != "" {
|
|
||||||
bi.Paragraph.Set("Replaces", debFile.Control.Replaces.String())
|
|
||||||
}
|
|
||||||
if debFile.Control.BuiltUsing.String() != "" {
|
|
||||||
bi.Paragraph.Set("BuiltUsing", debFile.Control.BuiltUsing.String())
|
|
||||||
}
|
|
||||||
return &bi, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func ScanDebs(debpath string) Repo {
|
func (fb FileBackend) GetFiles() ([]DebFile, error) {
|
||||||
var debs []string
|
var debs []DebFile
|
||||||
fs.WalkDir(os.DirFS(debpath), ".", func(path string, dir fs.DirEntry, err error) error {
|
fs.WalkDir(os.DirFS(fb.path), ".", func(dirpath string, dir fs.DirEntry, err error) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.WithFields(log.Fields{
|
log.WithFields(log.Fields{
|
||||||
"path": path,
|
"path": dirpath,
|
||||||
"error": err,
|
"error": err,
|
||||||
}).Warn("Error scanning for debs")
|
}).Warn("Error scanning for debs")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if !dir.IsDir() && strings.HasSuffix(dir.Name(), ".deb"){
|
if !dir.IsDir() && strings.HasSuffix(dir.Name(), ".deb") {
|
||||||
debs = append(debs, path)
|
debs = append(debs, DebFile(fsDebFile{
|
||||||
|
filepath.Join(fb.path, dirpath),
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
packs := make(map[string]LogicalPackage)
|
log.Infof("got files: %v", debs)
|
||||||
for _, d := range debs {
|
return debs, nil
|
||||||
p := path.Join(debpath, d)
|
|
||||||
bi, err := BinaryIndexFromDeb(p, debpath)
|
|
||||||
if err != nil {
|
|
||||||
log.WithFields(log.Fields{
|
|
||||||
"path": p,
|
|
||||||
"error": err,
|
|
||||||
}).Error("Error processing deb file")
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
packageName := bi.Package
|
|
||||||
if _, ok := packs[packageName]; !ok {
|
|
||||||
packs[packageName] = LogicalPackage{
|
|
||||||
Name: packageName,
|
|
||||||
Arches: make(map[dependency.Arch]map[version.Version]control.BinaryIndex),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pack := packs[packageName]
|
|
||||||
if _, ok := pack.Arches[bi.Architecture]; !ok {
|
|
||||||
pack.Arches[bi.Architecture] = make(map[version.Version]control.BinaryIndex)
|
|
||||||
}
|
|
||||||
arch := pack.Arches[bi.Architecture]
|
|
||||||
if _, ok := arch[bi.Version]; !ok {
|
|
||||||
arch[bi.Version] = *bi
|
|
||||||
} else {
|
|
||||||
log.WithFields(log.Fields{
|
|
||||||
"package": packageName,
|
|
||||||
"arch": arch,
|
|
||||||
"version": bi.Version.String(),
|
|
||||||
}).Warn("Duplicate package/arch/version found, ignoring...")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return Repo{
|
|
||||||
packages: maps.Values(packs),
|
|
||||||
cache: make(map[string]hashedFile),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
25
cmd/main.go
25
cmd/main.go
|
@ -37,17 +37,6 @@ func logMiddleware(h http.HandlerFunc) http.HandlerFunc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func md(err error) {
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func unwrap[T any](val T, err error) T {
|
|
||||||
md(err)
|
|
||||||
return val
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
listenAddr := *flag.String("listen", ":1612", "HTTP listen address")
|
listenAddr := *flag.String("listen", ":1612", "HTTP listen address")
|
||||||
debPath := *flag.String("debpath", "debs", "Path to directory containing deb files.")
|
debPath := *flag.String("debpath", "debs", "Path to directory containing deb files.")
|
||||||
|
@ -57,22 +46,23 @@ func main() {
|
||||||
kb, err := os.ReadFile("privkey.gpg")
|
kb, err := os.ReadFile("privkey.gpg")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Infof("Generating new key...")
|
log.Infof("Generating new key...")
|
||||||
ecKey = unwrap(crypto.GenerateKey("Debanator", "packager@example.com", "x25519", 0))
|
ecKey = debanator.Unwrap(crypto.GenerateKey("Debanator", "packager@example.com", "x25519", 0))
|
||||||
f := unwrap(os.Create("privkey.gpg"))
|
f := debanator.Unwrap(os.Create("privkey.gpg"))
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
armored := unwrap(ecKey.Armor())
|
armored := debanator.Unwrap(ecKey.Armor())
|
||||||
f.WriteString(armored)
|
f.WriteString(armored)
|
||||||
} else {
|
} else {
|
||||||
log.Infof("Using existing key...")
|
log.Infof("Using existing key...")
|
||||||
ecKey = unwrap(crypto.NewKeyFromArmored(string(kb)))
|
ecKey = debanator.Unwrap(crypto.NewKeyFromArmored(string(kb)))
|
||||||
}
|
}
|
||||||
|
|
||||||
signingKeyRing, err := crypto.NewKeyRing(ecKey)
|
signingKeyRing, err := crypto.NewKeyRing(ecKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
be := debanator.NewFileBackend(debPath)
|
||||||
repo := debanator.ScanDebs(debPath)
|
repo := debanator.NewRepoFromBackend(be, "/dists/stable")
|
||||||
|
debanator.Md(repo.Populate())
|
||||||
if err := repo.GenerateFiles(); err != nil {
|
if err := repo.GenerateFiles(); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -94,6 +84,5 @@ func main() {
|
||||||
io.WriteString(w, pub)
|
io.WriteString(w, pub)
|
||||||
})
|
})
|
||||||
r.Mount("/dists/stable", repo.GetHandler(signingKeyRing))
|
r.Mount("/dists/stable", repo.GetHandler(signingKeyRing))
|
||||||
r.Get("/pool/main/*", http.StripPrefix("/pool/main/", http.FileServer(http.Dir(debPath))).ServeHTTP)
|
|
||||||
http.ListenAndServe(listenAddr, r)
|
http.ListenAndServe(listenAddr, r)
|
||||||
}
|
}
|
||||||
|
|
189
package.go
189
package.go
|
@ -1,19 +1,17 @@
|
||||||
package debanator
|
package debanator
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"crypto/md5"
|
||||||
|
"crypto/sha1"
|
||||||
|
"crypto/sha256"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"strconv"
|
||||||
"time"
|
"strings"
|
||||||
|
|
||||||
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
|
||||||
"github.com/go-chi/chi/v5"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
"pault.ag/go/debian/control"
|
"pault.ag/go/debian/control"
|
||||||
|
"pault.ag/go/debian/deb"
|
||||||
"pault.ag/go/debian/dependency"
|
"pault.ag/go/debian/dependency"
|
||||||
"pault.ag/go/debian/hashio"
|
|
||||||
"pault.ag/go/debian/version"
|
"pault.ag/go/debian/version"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -24,137 +22,58 @@ type LogicalPackage struct {
|
||||||
Arches map[dependency.Arch]map[version.Version]control.BinaryIndex
|
Arches map[dependency.Arch]map[version.Version]control.BinaryIndex
|
||||||
}
|
}
|
||||||
|
|
||||||
type hashedFile struct {
|
func BinaryIndexFromDeb(r ReaderAtCloser, filePath string) (*control.BinaryIndex, error) {
|
||||||
buf []byte
|
debFile, err := deb.Load(r, "fakepath")
|
||||||
md5Hash control.MD5FileHash
|
|
||||||
sha1Hash control.SHA1FileHash
|
|
||||||
sha256Hash control.SHA256FileHash
|
|
||||||
}
|
|
||||||
|
|
||||||
type Repo struct {
|
|
||||||
packages []LogicalPackage
|
|
||||||
cache map[string]hashedFile
|
|
||||||
release []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Repo) GetArches() []dependency.Arch {
|
|
||||||
arches := make(map[dependency.Arch]struct{})
|
|
||||||
for _, lp := range r.packages {
|
|
||||||
for arch := range lp.Arches {
|
|
||||||
arches[arch] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return maps.Keys(arches)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the latest versions of all packages for the given arch
|
|
||||||
func (r *Repo) GetPackagesForArch(a dependency.Arch) []control.BinaryIndex {
|
|
||||||
out := []control.BinaryIndex{}
|
|
||||||
for _, p := range r.packages {
|
|
||||||
if versions, ok := p.Arches[a]; ok {
|
|
||||||
var latest version.Version
|
|
||||||
for v := range versions {
|
|
||||||
if version.Compare(v, latest) > 0 {
|
|
||||||
latest = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out = append(out, p.Arches[a][latest])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Repo) makePackagesFileForArch(arch dependency.Arch) error {
|
|
||||||
var b bytes.Buffer
|
|
||||||
w, hashers, err := hashio.NewHasherWriters([]string{"md5", "sha256", "sha1"}, &b)
|
|
||||||
enc, _ := control.NewEncoder(w)
|
|
||||||
for _, d := range r.GetPackagesForArch(arch) {
|
|
||||||
if err = enc.Encode(d); err != nil {
|
|
||||||
return fmt.Errorf("encoding package %s: %w", d.Package, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fname := fmt.Sprintf("main/binary-%s/Packages", arch)
|
|
||||||
hashes := make(map[string]control.FileHash)
|
|
||||||
for _, h := range hashers {
|
|
||||||
hashes[h.Name()] = control.FileHashFromHasher(fname, *h)
|
|
||||||
}
|
|
||||||
r.cache[fname] = hashedFile{
|
|
||||||
buf: b.Bytes(),
|
|
||||||
sha256Hash: control.SHA256FileHash{hashes["sha256"]},
|
|
||||||
sha1Hash: control.SHA1FileHash{hashes["sha1"]},
|
|
||||||
md5Hash: control.MD5FileHash{hashes["md5"]},
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate and cache all the Package/Repo files
|
|
||||||
func (r *Repo) GenerateFiles() error {
|
|
||||||
for _, arch := range r.GetArches() {
|
|
||||||
if err := r.makePackagesFileForArch(arch); err != nil {
|
|
||||||
return fmt.Errorf("generating files for arch %s: %w", arch, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
r.makeRelease()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Repo) makeRelease() {
|
|
||||||
var rel bytes.Buffer
|
|
||||||
enc, _ := control.NewEncoder(&rel)
|
|
||||||
const dateFmt = "Mon, 02 Jan 2006 15:04:05 MST"
|
|
||||||
var md5s []control.MD5FileHash
|
|
||||||
var sha1s []control.SHA1FileHash
|
|
||||||
var sha256s []control.SHA256FileHash
|
|
||||||
for _, f := range r.cache {
|
|
||||||
md5s = append(md5s, f.md5Hash)
|
|
||||||
sha1s = append(sha1s, f.sha1Hash)
|
|
||||||
sha256s = append(sha256s, f.sha256Hash)
|
|
||||||
}
|
|
||||||
if err := enc.Encode(Release{
|
|
||||||
Suite: "stable",
|
|
||||||
Architectures: r.GetArches(),
|
|
||||||
Components: "main",
|
|
||||||
Date: time.Now().UTC().Format(dateFmt),
|
|
||||||
MD5Sum: md5s,
|
|
||||||
SHA1: sha1s,
|
|
||||||
SHA256: sha256s,
|
|
||||||
}); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
r.release = rel.Bytes()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Handle a deb/apt repository http request
|
|
||||||
func (r *Repo) GetHandler(keyring *crypto.KeyRing) http.Handler {
|
|
||||||
router := chi.NewRouter()
|
|
||||||
router.Get("/Release", func(w http.ResponseWriter, req *http.Request) {
|
|
||||||
if _, err := w.Write(r.release); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
router.Get("/Release.gpg", func(w http.ResponseWriter, req *http.Request) {
|
|
||||||
msg := crypto.NewPlainMessage(r.release)
|
|
||||||
sig, err := keyring.SignDetached(msg)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
return nil, fmt.Errorf("read deb: %w", err)
|
||||||
}
|
}
|
||||||
sigStr, err := sig.GetArmored()
|
md5sum := md5.New()
|
||||||
|
sha1sum := sha1.New()
|
||||||
|
sha256sum := sha256.New()
|
||||||
|
hashWriter := io.MultiWriter(md5sum, sha1sum, sha256sum)
|
||||||
|
size, err := io.Copy(hashWriter, r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
return nil, fmt.Errorf("hash file: %w", err)
|
||||||
}
|
}
|
||||||
io.WriteString(w, sigStr)
|
bi := control.BinaryIndex{
|
||||||
})
|
Paragraph: control.Paragraph{
|
||||||
router.Get("/main/{arch}/Packages", func(w http.ResponseWriter, req *http.Request) {
|
Values: make(map[string]string),
|
||||||
h, ok := r.cache[fmt.Sprintf("main/%s/Packages", chi.URLParam(req, "arch"))]
|
},
|
||||||
if !ok {
|
Package: debFile.Control.Package,
|
||||||
w.WriteHeader(http.StatusBadRequest)
|
Source: debFile.Control.Source,
|
||||||
return
|
Version: debFile.Control.Version,
|
||||||
|
InstalledSize: fmt.Sprintf("%d", debFile.Control.InstalledSize),
|
||||||
|
Size: strconv.Itoa(int(size)),
|
||||||
|
Maintainer: debFile.Control.Maintainer,
|
||||||
|
Architecture: debFile.Control.Architecture,
|
||||||
|
MultiArch: debFile.Control.MultiArch,
|
||||||
|
Description: debFile.Control.Description,
|
||||||
|
Homepage: debFile.Control.Homepage,
|
||||||
|
Section: debFile.Control.Section,
|
||||||
|
// FIXME: gross, make this more centrally managed somehow
|
||||||
|
Filename: strings.TrimPrefix(filePath, "/"),
|
||||||
|
Priority: debFile.Control.Priority,
|
||||||
|
MD5sum: fmt.Sprintf("%x", md5sum.Sum(nil)),
|
||||||
|
SHA1: fmt.Sprintf("%x", sha1sum.Sum(nil)),
|
||||||
|
SHA256: fmt.Sprintf("%x", sha256sum.Sum(nil)),
|
||||||
}
|
}
|
||||||
_, err := w.Write(h.buf); if err != nil {
|
if debFile.Control.Depends.String() != "" {
|
||||||
log.Error(err)
|
bi.Paragraph.Set("Depends", debFile.Control.Depends.String())
|
||||||
}
|
}
|
||||||
})
|
if debFile.Control.Recommends.String() != "" {
|
||||||
return router
|
bi.Paragraph.Set("Recommends", debFile.Control.Recommends.String())
|
||||||
|
}
|
||||||
|
if debFile.Control.Suggests.String() != "" {
|
||||||
|
bi.Paragraph.Set("Suggests", debFile.Control.Suggests.String())
|
||||||
|
}
|
||||||
|
if debFile.Control.Breaks.String() != "" {
|
||||||
|
bi.Paragraph.Set("Breaks", debFile.Control.Breaks.String())
|
||||||
|
}
|
||||||
|
if debFile.Control.Replaces.String() != "" {
|
||||||
|
bi.Paragraph.Set("Replaces", debFile.Control.Replaces.String())
|
||||||
|
}
|
||||||
|
if debFile.Control.BuiltUsing.String() != "" {
|
||||||
|
bi.Paragraph.Set("BuiltUsing", debFile.Control.BuiltUsing.String())
|
||||||
|
}
|
||||||
|
return &bi, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,6 @@ type Release struct {
|
||||||
Components string
|
Components string
|
||||||
Date string
|
Date string
|
||||||
SHA1 []control.SHA1FileHash `delim:"\n" strip:"\n\r\t "`
|
SHA1 []control.SHA1FileHash `delim:"\n" strip:"\n\r\t "`
|
||||||
SHA256 []control.SHA256FileHash`delim:"\n" strip:"\n\r\t "`
|
SHA256 []control.SHA256FileHash `delim:"\n" strip:"\n\r\t "`
|
||||||
MD5Sum []control.MD5FileHash`delim:"\n" strip:"\n\r\t "`
|
MD5Sum []control.MD5FileHash `delim:"\n" strip:"\n\r\t "`
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,208 @@
|
||||||
|
package debanator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/ProtonMail/gopenpgp/v2/crypto"
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
"pault.ag/go/debian/control"
|
||||||
|
"pault.ag/go/debian/dependency"
|
||||||
|
"pault.ag/go/debian/hashio"
|
||||||
|
"pault.ag/go/debian/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
const FILESPREFIX = "pool"
|
||||||
|
|
||||||
|
type hashedFile struct {
|
||||||
|
buf []byte
|
||||||
|
md5Hash control.MD5FileHash
|
||||||
|
sha1Hash control.SHA1FileHash
|
||||||
|
sha256Hash control.SHA256FileHash
|
||||||
|
}
|
||||||
|
|
||||||
|
type Repo struct {
|
||||||
|
// The prefix to serving http paths to the files provided by this package.
|
||||||
|
// This is needed so that we can give absolute paths in Package files.
|
||||||
|
filePrefix string
|
||||||
|
be Backend
|
||||||
|
packages []LogicalPackage
|
||||||
|
cache map[string]hashedFile
|
||||||
|
release []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRepoFromBackend(backend Backend, filePrefix string) Repo {
|
||||||
|
return Repo{
|
||||||
|
be: backend,
|
||||||
|
cache: make(map[string]hashedFile),
|
||||||
|
filePrefix: filePrefix,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Repo) GetArches() []dependency.Arch {
|
||||||
|
arches := make(map[dependency.Arch]struct{})
|
||||||
|
for _, lp := range r.packages {
|
||||||
|
for arch := range lp.Arches {
|
||||||
|
arches[arch] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return maps.Keys(arches)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the latest versions of all packages for the given arch
|
||||||
|
func (r *Repo) GetPackagesForArch(a dependency.Arch) []control.BinaryIndex {
|
||||||
|
out := []control.BinaryIndex{}
|
||||||
|
for _, p := range r.packages {
|
||||||
|
if versions, ok := p.Arches[a]; ok {
|
||||||
|
var latest version.Version
|
||||||
|
for v := range versions {
|
||||||
|
if version.Compare(v, latest) > 0 {
|
||||||
|
latest = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out = append(out, p.Arches[a][latest])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Repo) makePackagesFileForArch(arch dependency.Arch) error {
|
||||||
|
var b bytes.Buffer
|
||||||
|
w, hashers, err := hashio.NewHasherWriters([]string{"md5", "sha256", "sha1"}, &b)
|
||||||
|
enc, _ := control.NewEncoder(w)
|
||||||
|
for _, d := range r.GetPackagesForArch(arch) {
|
||||||
|
if err = enc.Encode(d); err != nil {
|
||||||
|
return fmt.Errorf("encoding package %s: %w", d.Package, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fname := fmt.Sprintf("main/binary-%s/Packages", arch)
|
||||||
|
hashes := make(map[string]control.FileHash)
|
||||||
|
for _, h := range hashers {
|
||||||
|
hashes[h.Name()] = control.FileHashFromHasher(fname, *h)
|
||||||
|
}
|
||||||
|
r.cache[fname] = hashedFile{
|
||||||
|
buf: b.Bytes(),
|
||||||
|
sha256Hash: control.SHA256FileHash{
|
||||||
|
FileHash: hashes["sha256"],
|
||||||
|
},
|
||||||
|
sha1Hash: control.SHA1FileHash{
|
||||||
|
FileHash: hashes["sha1"]},
|
||||||
|
md5Hash: control.MD5FileHash{FileHash: hashes["md5"]},
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate and cache all the Package/Repo files
|
||||||
|
func (r *Repo) GenerateFiles() error {
|
||||||
|
for _, arch := range r.GetArches() {
|
||||||
|
if err := r.makePackagesFileForArch(arch); err != nil {
|
||||||
|
return fmt.Errorf("generating files for arch %s: %w", arch, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.makeRelease()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Repo) makeRelease() {
|
||||||
|
var rel bytes.Buffer
|
||||||
|
enc, _ := control.NewEncoder(&rel)
|
||||||
|
const dateFmt = "Mon, 02 Jan 2006 15:04:05 MST"
|
||||||
|
var md5s []control.MD5FileHash
|
||||||
|
var sha1s []control.SHA1FileHash
|
||||||
|
var sha256s []control.SHA256FileHash
|
||||||
|
for _, f := range r.cache {
|
||||||
|
md5s = append(md5s, f.md5Hash)
|
||||||
|
sha1s = append(sha1s, f.sha1Hash)
|
||||||
|
sha256s = append(sha256s, f.sha256Hash)
|
||||||
|
}
|
||||||
|
if err := enc.Encode(Release{
|
||||||
|
Suite: "stable",
|
||||||
|
Architectures: r.GetArches(),
|
||||||
|
Components: "main",
|
||||||
|
Date: time.Now().UTC().Format(dateFmt),
|
||||||
|
MD5Sum: md5s,
|
||||||
|
SHA1: sha1s,
|
||||||
|
SHA256: sha256s,
|
||||||
|
}); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
r.release = rel.Bytes()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle a deb/apt repository http request
|
||||||
|
func (r *Repo) GetHandler(keyring *crypto.KeyRing) http.Handler {
|
||||||
|
router := chi.NewRouter()
|
||||||
|
router.Get("/Release", func(w http.ResponseWriter, req *http.Request) {
|
||||||
|
if _, err := w.Write(r.release); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
router.Get("/Release.gpg", func(w http.ResponseWriter, req *http.Request) {
|
||||||
|
msg := crypto.NewPlainMessage(r.release)
|
||||||
|
sig, err := keyring.SignDetached(msg)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
sigStr, err := sig.GetArmored()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
io.WriteString(w, sigStr)
|
||||||
|
})
|
||||||
|
router.Get("/main/{arch}/Packages", func(w http.ResponseWriter, req *http.Request) {
|
||||||
|
h, ok := r.cache[fmt.Sprintf("main/%s/Packages", chi.URLParam(req, "arch"))]
|
||||||
|
if !ok {
|
||||||
|
w.WriteHeader(http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_, err := w.Write(h.buf)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
router.Get(fmt.Sprintf("/%s/*", FILESPREFIX), r.be.ServeFiles(r.filePrefix).ServeHTTP)
|
||||||
|
return router
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Repo) Populate() error {
|
||||||
|
packs := make(map[string]LogicalPackage)
|
||||||
|
files := Unwrap(r.be.GetFiles())
|
||||||
|
for _, f := range files {
|
||||||
|
rd := Unwrap(f.GetReader())
|
||||||
|
bi, err := BinaryIndexFromDeb(rd, path.Join(r.filePrefix, FILESPREFIX, f.GetName()))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("processing deb file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
packageName := bi.Package
|
||||||
|
if _, ok := packs[packageName]; !ok {
|
||||||
|
packs[packageName] = LogicalPackage{
|
||||||
|
Name: packageName,
|
||||||
|
Arches: make(map[dependency.Arch]map[version.Version]control.BinaryIndex),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pack := packs[packageName]
|
||||||
|
if _, ok := pack.Arches[bi.Architecture]; !ok {
|
||||||
|
pack.Arches[bi.Architecture] = make(map[version.Version]control.BinaryIndex)
|
||||||
|
}
|
||||||
|
arch := pack.Arches[bi.Architecture]
|
||||||
|
if _, ok := arch[bi.Version]; !ok {
|
||||||
|
arch[bi.Version] = *bi
|
||||||
|
} else {
|
||||||
|
log.WithFields(log.Fields{
|
||||||
|
"package": packageName,
|
||||||
|
"arch": arch,
|
||||||
|
"version": bi.Version.String(),
|
||||||
|
}).Warn("Duplicate package/arch/version found, ignoring...")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.packages = maps.Values(packs)
|
||||||
|
return nil
|
||||||
|
}
|
Loading…
Reference in New Issue