modularise backend a bit

This commit is contained in:
Sam W 2023-05-01 21:28:28 +01:00
parent ed18f87251
commit 7ae7166ac1
6 changed files with 334 additions and 278 deletions

View File

@ -1,154 +1,78 @@
package debanator
import (
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"fmt"
"io"
"io/fs"
"net/http"
"os"
"path"
"strconv"
"path/filepath"
"strings"
log "github.com/sirupsen/logrus"
"pault.ag/go/debian/control"
"pault.ag/go/debian/deb"
"pault.ag/go/debian/dependency"
"pault.ag/go/debian/version"
"golang.org/x/exp/maps"
)
// A backend to search for packages in
type Backend interface {
GetPackages()
GetFiles() ([]DebFile, error)
ServeFiles(string) http.Handler
}
type ReaderAtCloser interface {
io.ReaderAt
io.ReadCloser
}
// An abstract interface for reading a debfile. This could be coming from the local fs,
// a remote webdav share, etc...
type DebFile interface {
GetReader() (ReaderAtCloser, error)
GetName() string
}
type FileBackend struct {
path string
}
// A deb file existing on the local filesystem
type fsDebFile struct {
path string
}
func (f fsDebFile) GetReader() (ReaderAtCloser, error) {
return os.Open(f.path)
}
func (f fsDebFile) GetName() string {
_, name := filepath.Split(f.path)
return name
}
func NewFileBackend(path string) FileBackend {
return FileBackend{path}
}
func BinaryIndexFromDeb(p string, basePath string) (*control.BinaryIndex, error) {
f, err := os.Open(p)
if err != nil {
return nil, fmt.Errorf("open file: %w", err)
}
defer f.Close()
debFile, err := deb.Load(f, p)
if err != nil {
return nil, fmt.Errorf("read deb: %w", err)
}
md5sum := md5.New()
sha1sum := sha1.New()
sha256sum := sha256.New()
hashWriter := io.MultiWriter(md5sum, sha1sum, sha256sum)
size, err := io.Copy(hashWriter, f)
if err != nil {
return nil, fmt.Errorf("hash file: %w", err)
}
bi := control.BinaryIndex{
Paragraph: control.Paragraph{
Values: make(map[string]string),
},
Package: debFile.Control.Package,
Source: debFile.Control.Source,
Version: debFile.Control.Version,
InstalledSize: fmt.Sprintf("%d", debFile.Control.InstalledSize),
Size: strconv.Itoa(int(size)),
Maintainer: debFile.Control.Maintainer,
Architecture: debFile.Control.Architecture,
MultiArch: debFile.Control.MultiArch,
Description: debFile.Control.Description,
Homepage: debFile.Control.Homepage,
Section: debFile.Control.Section,
// FIXME: gross, make this more centrally managed somehow
Filename: path.Join("pool/main", strings.TrimPrefix(p, basePath)),
Priority: debFile.Control.Priority,
MD5sum: fmt.Sprintf("%x", md5sum.Sum(nil)),
SHA1: fmt.Sprintf("%x", sha1sum.Sum(nil)),
SHA256: fmt.Sprintf("%x", sha256sum.Sum(nil)),
}
if debFile.Control.Depends.String() != "" {
bi.Paragraph.Set("Depends", debFile.Control.Depends.String())
}
if debFile.Control.Recommends.String() != "" {
bi.Paragraph.Set("Recommends", debFile.Control.Recommends.String())
}
if debFile.Control.Suggests.String() != "" {
bi.Paragraph.Set("Suggests", debFile.Control.Suggests.String())
}
if debFile.Control.Breaks.String() != "" {
bi.Paragraph.Set("Breaks", debFile.Control.Breaks.String())
}
if debFile.Control.Replaces.String() != "" {
bi.Paragraph.Set("Replaces", debFile.Control.Replaces.String())
}
if debFile.Control.BuiltUsing.String() != "" {
bi.Paragraph.Set("BuiltUsing", debFile.Control.BuiltUsing.String())
}
return &bi, nil
func (fb FileBackend) ServeFiles(prefix string) http.Handler {
return http.StripPrefix(path.Join(prefix, "pool"), http.FileServer(http.Dir(fb.path)))
}
func ScanDebs(debpath string) Repo {
var debs []string
fs.WalkDir(os.DirFS(debpath), ".", func(path string, dir fs.DirEntry, err error) error {
func (fb FileBackend) GetFiles() ([]DebFile, error) {
var debs []DebFile
fs.WalkDir(os.DirFS(fb.path), ".", func(dirpath string, dir fs.DirEntry, err error) error {
if err != nil {
log.WithFields(log.Fields{
"path": path,
"path": dirpath,
"error": err,
}).Warn("Error scanning for debs")
return nil
}
if !dir.IsDir() && strings.HasSuffix(dir.Name(), ".deb") {
debs = append(debs, path)
debs = append(debs, DebFile(fsDebFile{
filepath.Join(fb.path, dirpath),
}))
}
return nil
})
packs := make(map[string]LogicalPackage)
for _, d := range debs {
p := path.Join(debpath, d)
bi, err := BinaryIndexFromDeb(p, debpath)
if err != nil {
log.WithFields(log.Fields{
"path": p,
"error": err,
}).Error("Error processing deb file")
continue
}
packageName := bi.Package
if _, ok := packs[packageName]; !ok {
packs[packageName] = LogicalPackage{
Name: packageName,
Arches: make(map[dependency.Arch]map[version.Version]control.BinaryIndex),
}
}
pack := packs[packageName]
if _, ok := pack.Arches[bi.Architecture]; !ok {
pack.Arches[bi.Architecture] = make(map[version.Version]control.BinaryIndex)
}
arch := pack.Arches[bi.Architecture]
if _, ok := arch[bi.Version]; !ok {
arch[bi.Version] = *bi
} else {
log.WithFields(log.Fields{
"package": packageName,
"arch": arch,
"version": bi.Version.String(),
}).Warn("Duplicate package/arch/version found, ignoring...")
}
}
return Repo{
packages: maps.Values(packs),
cache: make(map[string]hashedFile),
}
log.Infof("got files: %v", debs)
return debs, nil
}

View File

@ -37,17 +37,6 @@ func logMiddleware(h http.HandlerFunc) http.HandlerFunc {
}
}
func md(err error) {
if err != nil {
log.Fatal(err)
}
}
func unwrap[T any](val T, err error) T {
md(err)
return val
}
func main() {
listenAddr := *flag.String("listen", ":1612", "HTTP listen address")
debPath := *flag.String("debpath", "debs", "Path to directory containing deb files.")
@ -57,22 +46,23 @@ func main() {
kb, err := os.ReadFile("privkey.gpg")
if err != nil {
log.Infof("Generating new key...")
ecKey = unwrap(crypto.GenerateKey("Debanator", "packager@example.com", "x25519", 0))
f := unwrap(os.Create("privkey.gpg"))
ecKey = debanator.Unwrap(crypto.GenerateKey("Debanator", "packager@example.com", "x25519", 0))
f := debanator.Unwrap(os.Create("privkey.gpg"))
defer f.Close()
armored := unwrap(ecKey.Armor())
armored := debanator.Unwrap(ecKey.Armor())
f.WriteString(armored)
} else {
log.Infof("Using existing key...")
ecKey = unwrap(crypto.NewKeyFromArmored(string(kb)))
ecKey = debanator.Unwrap(crypto.NewKeyFromArmored(string(kb)))
}
signingKeyRing, err := crypto.NewKeyRing(ecKey)
if err != nil {
log.Fatal(err)
}
repo := debanator.ScanDebs(debPath)
be := debanator.NewFileBackend(debPath)
repo := debanator.NewRepoFromBackend(be, "/dists/stable")
debanator.Md(repo.Populate())
if err := repo.GenerateFiles(); err != nil {
log.Fatal(err)
}
@ -94,6 +84,5 @@ func main() {
io.WriteString(w, pub)
})
r.Mount("/dists/stable", repo.GetHandler(signingKeyRing))
r.Get("/pool/main/*", http.StripPrefix("/pool/main/", http.FileServer(http.Dir(debPath))).ServeHTTP)
http.ListenAndServe(listenAddr, r)
}

View File

@ -1,19 +1,17 @@
package debanator
import (
"bytes"
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"fmt"
"io"
"net/http"
"time"
"strconv"
"strings"
"github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/go-chi/chi/v5"
log "github.com/sirupsen/logrus"
"golang.org/x/exp/maps"
"pault.ag/go/debian/control"
"pault.ag/go/debian/deb"
"pault.ag/go/debian/dependency"
"pault.ag/go/debian/hashio"
"pault.ag/go/debian/version"
)
@ -24,137 +22,58 @@ type LogicalPackage struct {
Arches map[dependency.Arch]map[version.Version]control.BinaryIndex
}
type hashedFile struct {
buf []byte
md5Hash control.MD5FileHash
sha1Hash control.SHA1FileHash
sha256Hash control.SHA256FileHash
}
type Repo struct {
packages []LogicalPackage
cache map[string]hashedFile
release []byte
}
func (r *Repo) GetArches() []dependency.Arch {
arches := make(map[dependency.Arch]struct{})
for _, lp := range r.packages {
for arch := range lp.Arches {
arches[arch] = struct{}{}
}
}
return maps.Keys(arches)
}
// Find the latest versions of all packages for the given arch
func (r *Repo) GetPackagesForArch(a dependency.Arch) []control.BinaryIndex {
out := []control.BinaryIndex{}
for _, p := range r.packages {
if versions, ok := p.Arches[a]; ok {
var latest version.Version
for v := range versions {
if version.Compare(v, latest) > 0 {
latest = v
}
}
out = append(out, p.Arches[a][latest])
}
}
return out
}
func (r *Repo) makePackagesFileForArch(arch dependency.Arch) error {
var b bytes.Buffer
w, hashers, err := hashio.NewHasherWriters([]string{"md5", "sha256", "sha1"}, &b)
enc, _ := control.NewEncoder(w)
for _, d := range r.GetPackagesForArch(arch) {
if err = enc.Encode(d); err != nil {
return fmt.Errorf("encoding package %s: %w", d.Package, err)
}
}
fname := fmt.Sprintf("main/binary-%s/Packages", arch)
hashes := make(map[string]control.FileHash)
for _, h := range hashers {
hashes[h.Name()] = control.FileHashFromHasher(fname, *h)
}
r.cache[fname] = hashedFile{
buf: b.Bytes(),
sha256Hash: control.SHA256FileHash{hashes["sha256"]},
sha1Hash: control.SHA1FileHash{hashes["sha1"]},
md5Hash: control.MD5FileHash{hashes["md5"]},
}
return nil
}
// Generate and cache all the Package/Repo files
func (r *Repo) GenerateFiles() error {
for _, arch := range r.GetArches() {
if err := r.makePackagesFileForArch(arch); err != nil {
return fmt.Errorf("generating files for arch %s: %w", arch, err)
}
}
r.makeRelease()
return nil
}
func (r *Repo) makeRelease() {
var rel bytes.Buffer
enc, _ := control.NewEncoder(&rel)
const dateFmt = "Mon, 02 Jan 2006 15:04:05 MST"
var md5s []control.MD5FileHash
var sha1s []control.SHA1FileHash
var sha256s []control.SHA256FileHash
for _, f := range r.cache {
md5s = append(md5s, f.md5Hash)
sha1s = append(sha1s, f.sha1Hash)
sha256s = append(sha256s, f.sha256Hash)
}
if err := enc.Encode(Release{
Suite: "stable",
Architectures: r.GetArches(),
Components: "main",
Date: time.Now().UTC().Format(dateFmt),
MD5Sum: md5s,
SHA1: sha1s,
SHA256: sha256s,
}); err != nil {
log.Fatal(err)
}
r.release = rel.Bytes()
return
}
// Handle a deb/apt repository http request
func (r *Repo) GetHandler(keyring *crypto.KeyRing) http.Handler {
router := chi.NewRouter()
router.Get("/Release", func(w http.ResponseWriter, req *http.Request) {
if _, err := w.Write(r.release); err != nil {
log.Fatal(err)
}
})
router.Get("/Release.gpg", func(w http.ResponseWriter, req *http.Request) {
msg := crypto.NewPlainMessage(r.release)
sig, err := keyring.SignDetached(msg)
func BinaryIndexFromDeb(r ReaderAtCloser, filePath string) (*control.BinaryIndex, error) {
debFile, err := deb.Load(r, "fakepath")
if err != nil {
log.Fatal(err)
return nil, fmt.Errorf("read deb: %w", err)
}
sigStr, err := sig.GetArmored()
md5sum := md5.New()
sha1sum := sha1.New()
sha256sum := sha256.New()
hashWriter := io.MultiWriter(md5sum, sha1sum, sha256sum)
size, err := io.Copy(hashWriter, r)
if err != nil {
log.Fatal(err)
return nil, fmt.Errorf("hash file: %w", err)
}
io.WriteString(w, sigStr)
})
router.Get("/main/{arch}/Packages", func(w http.ResponseWriter, req *http.Request) {
h, ok := r.cache[fmt.Sprintf("main/%s/Packages", chi.URLParam(req, "arch"))]
if !ok {
w.WriteHeader(http.StatusBadRequest)
return
bi := control.BinaryIndex{
Paragraph: control.Paragraph{
Values: make(map[string]string),
},
Package: debFile.Control.Package,
Source: debFile.Control.Source,
Version: debFile.Control.Version,
InstalledSize: fmt.Sprintf("%d", debFile.Control.InstalledSize),
Size: strconv.Itoa(int(size)),
Maintainer: debFile.Control.Maintainer,
Architecture: debFile.Control.Architecture,
MultiArch: debFile.Control.MultiArch,
Description: debFile.Control.Description,
Homepage: debFile.Control.Homepage,
Section: debFile.Control.Section,
// FIXME: gross, make this more centrally managed somehow
Filename: strings.TrimPrefix(filePath, "/"),
Priority: debFile.Control.Priority,
MD5sum: fmt.Sprintf("%x", md5sum.Sum(nil)),
SHA1: fmt.Sprintf("%x", sha1sum.Sum(nil)),
SHA256: fmt.Sprintf("%x", sha256sum.Sum(nil)),
}
_, err := w.Write(h.buf); if err != nil {
log.Error(err)
if debFile.Control.Depends.String() != "" {
bi.Paragraph.Set("Depends", debFile.Control.Depends.String())
}
})
return router
if debFile.Control.Recommends.String() != "" {
bi.Paragraph.Set("Recommends", debFile.Control.Recommends.String())
}
if debFile.Control.Suggests.String() != "" {
bi.Paragraph.Set("Suggests", debFile.Control.Suggests.String())
}
if debFile.Control.Breaks.String() != "" {
bi.Paragraph.Set("Breaks", debFile.Control.Breaks.String())
}
if debFile.Control.Replaces.String() != "" {
bi.Paragraph.Set("Replaces", debFile.Control.Replaces.String())
}
if debFile.Control.BuiltUsing.String() != "" {
bi.Paragraph.Set("BuiltUsing", debFile.Control.BuiltUsing.String())
}
return &bi, nil
}

208
repo.go Normal file
View File

@ -0,0 +1,208 @@
package debanator
import (
"bytes"
"fmt"
"io"
"net/http"
"path"
"time"
"github.com/ProtonMail/gopenpgp/v2/crypto"
"github.com/go-chi/chi/v5"
log "github.com/sirupsen/logrus"
"golang.org/x/exp/maps"
"pault.ag/go/debian/control"
"pault.ag/go/debian/dependency"
"pault.ag/go/debian/hashio"
"pault.ag/go/debian/version"
)
const FILESPREFIX = "pool"
type hashedFile struct {
buf []byte
md5Hash control.MD5FileHash
sha1Hash control.SHA1FileHash
sha256Hash control.SHA256FileHash
}
type Repo struct {
// The prefix to serving http paths to the files provided by this package.
// This is needed so that we can give absolute paths in Package files.
filePrefix string
be Backend
packages []LogicalPackage
cache map[string]hashedFile
release []byte
}
func NewRepoFromBackend(backend Backend, filePrefix string) Repo {
return Repo{
be: backend,
cache: make(map[string]hashedFile),
filePrefix: filePrefix,
}
}
func (r *Repo) GetArches() []dependency.Arch {
arches := make(map[dependency.Arch]struct{})
for _, lp := range r.packages {
for arch := range lp.Arches {
arches[arch] = struct{}{}
}
}
return maps.Keys(arches)
}
// Find the latest versions of all packages for the given arch
func (r *Repo) GetPackagesForArch(a dependency.Arch) []control.BinaryIndex {
out := []control.BinaryIndex{}
for _, p := range r.packages {
if versions, ok := p.Arches[a]; ok {
var latest version.Version
for v := range versions {
if version.Compare(v, latest) > 0 {
latest = v
}
}
out = append(out, p.Arches[a][latest])
}
}
return out
}
func (r *Repo) makePackagesFileForArch(arch dependency.Arch) error {
var b bytes.Buffer
w, hashers, err := hashio.NewHasherWriters([]string{"md5", "sha256", "sha1"}, &b)
enc, _ := control.NewEncoder(w)
for _, d := range r.GetPackagesForArch(arch) {
if err = enc.Encode(d); err != nil {
return fmt.Errorf("encoding package %s: %w", d.Package, err)
}
}
fname := fmt.Sprintf("main/binary-%s/Packages", arch)
hashes := make(map[string]control.FileHash)
for _, h := range hashers {
hashes[h.Name()] = control.FileHashFromHasher(fname, *h)
}
r.cache[fname] = hashedFile{
buf: b.Bytes(),
sha256Hash: control.SHA256FileHash{
FileHash: hashes["sha256"],
},
sha1Hash: control.SHA1FileHash{
FileHash: hashes["sha1"]},
md5Hash: control.MD5FileHash{FileHash: hashes["md5"]},
}
return nil
}
// Generate and cache all the Package/Repo files
func (r *Repo) GenerateFiles() error {
for _, arch := range r.GetArches() {
if err := r.makePackagesFileForArch(arch); err != nil {
return fmt.Errorf("generating files for arch %s: %w", arch, err)
}
}
r.makeRelease()
return nil
}
func (r *Repo) makeRelease() {
var rel bytes.Buffer
enc, _ := control.NewEncoder(&rel)
const dateFmt = "Mon, 02 Jan 2006 15:04:05 MST"
var md5s []control.MD5FileHash
var sha1s []control.SHA1FileHash
var sha256s []control.SHA256FileHash
for _, f := range r.cache {
md5s = append(md5s, f.md5Hash)
sha1s = append(sha1s, f.sha1Hash)
sha256s = append(sha256s, f.sha256Hash)
}
if err := enc.Encode(Release{
Suite: "stable",
Architectures: r.GetArches(),
Components: "main",
Date: time.Now().UTC().Format(dateFmt),
MD5Sum: md5s,
SHA1: sha1s,
SHA256: sha256s,
}); err != nil {
log.Fatal(err)
}
r.release = rel.Bytes()
return
}
// Handle a deb/apt repository http request
func (r *Repo) GetHandler(keyring *crypto.KeyRing) http.Handler {
router := chi.NewRouter()
router.Get("/Release", func(w http.ResponseWriter, req *http.Request) {
if _, err := w.Write(r.release); err != nil {
log.Fatal(err)
}
})
router.Get("/Release.gpg", func(w http.ResponseWriter, req *http.Request) {
msg := crypto.NewPlainMessage(r.release)
sig, err := keyring.SignDetached(msg)
if err != nil {
log.Fatal(err)
}
sigStr, err := sig.GetArmored()
if err != nil {
log.Fatal(err)
}
io.WriteString(w, sigStr)
})
router.Get("/main/{arch}/Packages", func(w http.ResponseWriter, req *http.Request) {
h, ok := r.cache[fmt.Sprintf("main/%s/Packages", chi.URLParam(req, "arch"))]
if !ok {
w.WriteHeader(http.StatusBadRequest)
return
}
_, err := w.Write(h.buf)
if err != nil {
log.Error(err)
}
})
router.Get(fmt.Sprintf("/%s/*", FILESPREFIX), r.be.ServeFiles(r.filePrefix).ServeHTTP)
return router
}
func (r *Repo) Populate() error {
packs := make(map[string]LogicalPackage)
files := Unwrap(r.be.GetFiles())
for _, f := range files {
rd := Unwrap(f.GetReader())
bi, err := BinaryIndexFromDeb(rd, path.Join(r.filePrefix, FILESPREFIX, f.GetName()))
if err != nil {
return fmt.Errorf("processing deb file: %w", err)
}
packageName := bi.Package
if _, ok := packs[packageName]; !ok {
packs[packageName] = LogicalPackage{
Name: packageName,
Arches: make(map[dependency.Arch]map[version.Version]control.BinaryIndex),
}
}
pack := packs[packageName]
if _, ok := pack.Arches[bi.Architecture]; !ok {
pack.Arches[bi.Architecture] = make(map[version.Version]control.BinaryIndex)
}
arch := pack.Arches[bi.Architecture]
if _, ok := arch[bi.Version]; !ok {
arch[bi.Version] = *bi
} else {
log.WithFields(log.Fields{
"package": packageName,
"arch": arch,
"version": bi.Version.String(),
}).Warn("Duplicate package/arch/version found, ignoring...")
}
}
r.packages = maps.Values(packs)
return nil
}

16
util.go Normal file
View File

@ -0,0 +1,16 @@
package debanator
import (
log "github.com/sirupsen/logrus"
)
func Md(err error) {
if err != nil {
log.Fatal(err)
}
}
func Unwrap[T any](val T, err error) T {
Md(err)
return val
}