Start on api

This commit is contained in:
a 2022-11-27 14:12:15 -06:00
commit 2045db7c05
32 changed files with 4422 additions and 0 deletions

4
Dockerfile Normal file
View File

@ -0,0 +1,4 @@
from debian:11.5
RUN apt update && apt upgrade

7
go.mod Normal file
View File

@ -0,0 +1,7 @@
module tuxpa.in/k/apt-mirror
go 1.19
require github.com/ulikunitz/xz v0.5.10
require github.com/go-chi/chi/v5 v5.0.7 // indirect

4
go.sum Normal file
View File

@ -0,0 +1,4 @@
github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8=
github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/ulikunitz/xz v0.5.10 h1:t92gobL9l3HE202wg3rlk19F6X+JOxl9BBrCCMYEYd8=
github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=

56
src/api/api.go Normal file
View File

@ -0,0 +1,56 @@
package api
import (
"encoding/json"
"net/http"
"github.com/go-chi/chi/v5"
)
type ApiHandler struct {
sources SourcesImpl
copyright CopyrightImpl
patch PatchImpl
}
func NewApi(
sources SourcesImpl,
copyright CopyrightImpl,
patch PatchImpl,
) *ApiHandler {
return &ApiHandler{
sources: sources,
copyright: copyright,
patch: patch,
}
}
func returnJson[REQ any, RESP any](w http.ResponseWriter, req REQ, fn func(REQ) (RESP, error)) {
resp, err := fn(req)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
ans, err := json.Marshal(resp)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Write(ans)
}
type PingResp struct {
HttpCode int `json:"http_status_code"`
LastUpdate string `json:"last_update"`
Status string `json:"status"`
}
type NamedItem struct {
Name string `json:"name"`
}
func (a *ApiHandler) Route(r chi.Router) {
r.Route("/api", a.RouteSource)
r.Route("/copyright/api", a.RouteCopyright)
r.Route("/patches/api", a.RoutePatch)
}

64
src/api/copyright.go Normal file
View File

@ -0,0 +1,64 @@
package api
import (
"encoding/json"
"net/http"
"github.com/go-chi/chi/v5"
)
type CopyrightImpl interface {
Search(*CopyrightSearchReq) (*CopyrightSearchResp, error)
SearchPath(*CopyrightSearchPathReq) (*CopyrightSearchPathResp, error)
SearchBatch(*CopyrightSearchBatchReq) (*CopyrightSearchBatchResp, error)
List(*CopyrightListReq) (*CopyrightListResp, error)
ListPrefix(*CopyrightListPrefixReq) (*CopyrightListPrefixResp, error)
Status() *PingResp
}
type CopyrightSearchReq struct{}
type CopyrightSearchResp struct{}
type CopyrightSearchPathReq struct{}
type CopyrightSearchPathResp struct{}
type CopyrightSearchBatchReq struct{}
type CopyrightSearchBatchResp struct{}
type CopyrightListReq struct{}
type CopyrightListResp struct{}
type CopyrightListPrefixReq struct{}
type CopyrightListPrefixResp struct{}
func (a *ApiHandler) RouteCopyright(r chi.Router) {
//search
r.Get("/sha256", func(w http.ResponseWriter, r *http.Request) {
req := &CopyrightSearchReq{}
returnJson(w, req, a.copyright.Search)
})
r.Post("/sha256", func(w http.ResponseWriter, r *http.Request) {
req := &CopyrightSearchBatchReq{}
returnJson(w, req, a.copyright.SearchBatch)
})
r.Get("/file/{packagename}/{version}/*", func(w http.ResponseWriter, r *http.Request) {
req := &CopyrightSearchPathReq{}
returnJson(w, req, a.copyright.SearchPath)
})
//list
r.Get("/list", func(w http.ResponseWriter, r *http.Request) {
req := &CopyrightListReq{}
returnJson(w, req, a.copyright.List)
})
r.Get("/prefix/{prefix}", func(w http.ResponseWriter, r *http.Request) {
req := &CopyrightListPrefixReq{}
returnJson(w, req, a.copyright.ListPrefix)
})
// status
r.Get("/ping", func(w http.ResponseWriter, r *http.Request) {
json.NewEncoder(w).Encode(a.copyright.Status())
})
}

59
src/api/patches.go Normal file
View File

@ -0,0 +1,59 @@
package api
import (
"encoding/json"
"net/http"
"github.com/go-chi/chi/v5"
)
type PatchImpl interface {
Summary(*PatchSummaryReq) (*PatchSummaryResp, error)
View(*PatchViewReq) (*PatchViewResp, error)
List(*PatchListReq) (*PatchListResp, error)
ListPrefix(*PatchListPrefixReq) (*PatchListPrefixResp, error)
Status() *PingResp
}
type PatchSummaryReq struct{}
type PatchSummaryResp struct{}
type PatchViewReq struct{}
type PatchViewResp struct{}
type PatchSummaryBatchReq struct{}
type PatchSummaryBatchResp struct{}
type PatchListReq struct{}
type PatchListResp struct{}
type PatchListPrefixReq struct{}
type PatchListPrefixResp struct{}
func (a *ApiHandler) RoutePatch(r chi.Router) {
//search
r.Get("/file/{packagename}/{version}", func(w http.ResponseWriter, r *http.Request) {
req := &PatchSummaryReq{}
returnJson(w, req, a.patch.Summary)
})
r.Get("/file/{packagename}/{version}/*", func(w http.ResponseWriter, r *http.Request) {
req := &PatchViewReq{}
returnJson(w, req, a.patch.View)
})
//list
r.Get("/list", func(w http.ResponseWriter, r *http.Request) {
req := &PatchListReq{}
returnJson(w, req, a.patch.List)
})
r.Get("/prefix/{prefix}", func(w http.ResponseWriter, r *http.Request) {
req := &PatchListPrefixReq{}
returnJson(w, req, a.patch.ListPrefix)
})
// status
r.Get("/ping", func(w http.ResponseWriter, r *http.Request) {
json.NewEncoder(w).Encode(a.patch.Status())
})
}

97
src/api/sources.go Normal file
View File

@ -0,0 +1,97 @@
package api
import (
"encoding/json"
"net/http"
"github.com/go-chi/chi/v5"
)
type SourcesImpl interface {
SearchQuery(*SourcesSearchQueryReq) (*SourcesSearchQueryResp, error)
SearchSha(*SourcesSearchShaReq) (*SourcesSearchShaResp, error)
SearchTags(*SourcesSearchTagsReq) (*SourcesSearchTagsResp, error)
List(*SourcesListReq) (*SourcesListResp, error)
ListPrefix(*SourcesListPrefixReq) (*SourcesListPrefixResp, error)
Versions(*SourcesVersionsReq) (*SourcesVersionsResp, error)
Navigate(*SourcesNavigateReq) (*SourcesNavigateResp, error)
Status() *PingResp
}
type SourcesSearchQueryReq struct {
Query string `json:"query"`
}
type SourcesSearchQueryResp struct {
Results struct {
Exact NamedItem `json:"exact"`
Other []NamedItem `json:"other"`
} `json:"results"`
Suite string `json:"suite"`
}
type SourcesSearchShaReq struct{}
type SourcesSearchShaResp struct{}
type SourcesSearchTagsReq struct{}
type SourcesSearchTagsResp struct{}
type SourcesListReq struct{}
type SourcesListResp struct{}
type SourcesListPrefixReq struct{}
type SourcesListPrefixResp struct{}
type SourcesVersionsReq struct{}
type SourcesVersionsResp struct{}
type SourcesNavigateReq struct{}
type SourcesNavigateResp struct{}
func (a *ApiHandler) RouteSource(r chi.Router) {
// searching
r.Get("/search/{query}", func(w http.ResponseWriter, r *http.Request) {
query := chi.URLParam(r, "query")
req := &SourcesSearchQueryReq{
Query: query,
}
returnJson(w, req, a.sources.SearchQuery)
})
r.Get("/sha256", func(w http.ResponseWriter, r *http.Request) {
req := &SourcesSearchShaReq{}
returnJson(w, req, a.sources.SearchSha)
})
r.Get("/ctag", func(w http.ResponseWriter, r *http.Request) {
req := &SourcesSearchTagsReq{}
returnJson(w, req, a.sources.SearchTags)
})
//package lists
r.Get("/list", func(w http.ResponseWriter, r *http.Request) {
req := &SourcesListReq{}
returnJson(w, req, a.sources.List)
})
r.Get("/prefix/{prefix}", func(w http.ResponseWriter, r *http.Request) {
req := &SourcesListPrefixReq{}
returnJson(w, req, a.sources.ListPrefix)
})
// package info
r.Get("/src/{packagename}", func(w http.ResponseWriter, r *http.Request) {
req := &SourcesVersionsReq{}
returnJson(w, req, a.sources.Versions)
})
r.Get("/info/package/{packagename}/{version}", func(w http.ResponseWriter, r *http.Request) {
req := &SourcesNavigateReq{}
returnJson(w, req, a.sources.Navigate)
})
// navigation
r.Get("/src/{packagename}/{version}/*", func(w http.ResponseWriter, r *http.Request) {
req := &SourcesNavigateReq{}
returnJson(w, req, a.sources.Navigate)
})
// status
r.Get("/ping", func(w http.ResponseWriter, r *http.Request) {
d, _ := json.Marshal(a.sources.Status())
w.Write(d)
})
}

3
src/apt/README.md Normal file
View File

@ -0,0 +1,3 @@
# apt
based off https://github.com/cybozu-go/aptuti0al

195
src/apt/fileinfo.go Normal file
View File

@ -0,0 +1,195 @@
package apt
import (
"bytes"
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"path"
)
// FileInfo is a set of meta data of a file.
type FileInfo struct {
path string
size uint64
md5sum []byte // nil means no MD5 checksum to be checked.
sha1sum []byte // nil means no SHA1 ...
sha256sum []byte // nil means no SHA256 ...
}
// Same returns true if t has the same checksum values.
func (fi *FileInfo) Same(t *FileInfo) bool {
if fi == t {
return true
}
if fi.path != t.path {
return false
}
if fi.size != t.size {
return false
}
if fi.md5sum != nil && bytes.Compare(fi.md5sum, t.md5sum) != 0 {
return false
}
if fi.sha1sum != nil && bytes.Compare(fi.sha1sum, t.sha1sum) != 0 {
return false
}
if fi.sha256sum != nil && bytes.Compare(fi.sha256sum, t.sha256sum) != 0 {
return false
}
return true
}
// Path returns the indentifying path string of the file.
func (fi *FileInfo) Path() string {
return fi.path
}
// Size returns the number of bytes of the file body.
func (fi *FileInfo) Size() uint64 {
return fi.size
}
// HasChecksum returns true if fi has checksums.
func (fi *FileInfo) HasChecksum() bool {
return fi.md5sum != nil
}
// CalcChecksums calculates checksums and stores them in fi.
func (fi *FileInfo) CalcChecksums(data []byte) {
md5sum := md5.Sum(data)
sha1sum := sha1.Sum(data)
sha256sum := sha256.Sum256(data)
fi.size = uint64(len(data))
fi.md5sum = md5sum[:]
fi.sha1sum = sha1sum[:]
fi.sha256sum = sha256sum[:]
}
// AddPrefix creates a new FileInfo by prepending prefix to the path.
func (fi *FileInfo) AddPrefix(prefix string) *FileInfo {
newFI := *fi
newFI.path = path.Join(path.Clean(prefix), fi.path)
return &newFI
}
// MD5SumPath returns the filepath for "by-hash" with md5 checksum.
// If fi has no checksum, an empty string will be returned.
func (fi *FileInfo) MD5SumPath() string {
if fi.md5sum == nil {
return ""
}
return path.Join(path.Dir(fi.path),
"by-hash",
"MD5Sum",
hex.EncodeToString(fi.md5sum))
}
// SHA1Path returns the filepath for "by-hash" with sha1 checksum.
// If fi has no checksum, an empty string will be returned.
func (fi *FileInfo) SHA1Path() string {
if fi.sha1sum == nil {
return ""
}
return path.Join(path.Dir(fi.path),
"by-hash",
"SHA1",
hex.EncodeToString(fi.sha1sum))
}
// SHA256Path returns the filepath for "by-hash" with sha256 checksum.
// If fi has no checksum, an empty string will be returned.
func (fi *FileInfo) SHA256Path() string {
if fi.sha256sum == nil {
return ""
}
return path.Join(path.Dir(fi.path),
"by-hash",
"SHA256",
hex.EncodeToString(fi.sha256sum))
}
type fileInfoJSON struct {
Path string
Size int64
MD5Sum string
SHA1Sum string
SHA256Sum string
}
// MarshalJSON implements json.Marshaler
func (fi *FileInfo) MarshalJSON() ([]byte, error) {
var fij fileInfoJSON
fij.Path = fi.path
fij.Size = int64(fi.size)
if fi.md5sum != nil {
fij.MD5Sum = hex.EncodeToString(fi.md5sum)
}
if fi.sha1sum != nil {
fij.SHA1Sum = hex.EncodeToString(fi.sha1sum)
}
if fi.sha256sum != nil {
fij.SHA256Sum = hex.EncodeToString(fi.sha256sum)
}
return json.Marshal(&fij)
}
// UnmarshalJSON implements json.Unmarshaler
func (fi *FileInfo) UnmarshalJSON(data []byte) error {
var fij fileInfoJSON
if err := json.Unmarshal(data, &fij); err != nil {
return err
}
fi.path = fij.Path
fi.size = uint64(fij.Size)
md5sum, err := hex.DecodeString(fij.MD5Sum)
if err != nil {
return fmt.Errorf("UnmarshalJson For %s: %w", fij.Path, err)
}
sha1sum, err := hex.DecodeString(fij.SHA1Sum)
if err != nil {
return fmt.Errorf("UnmarshalJson For %s: %w", fij.Path, err)
}
sha256sum, err := hex.DecodeString(fij.SHA256Sum)
if err != nil {
return fmt.Errorf("UnmarshalJson For %s: %w", fij.Path, err)
}
fi.md5sum = md5sum
fi.sha1sum = sha1sum
fi.sha256sum = sha256sum
return nil
}
// CopyWithFileInfo copies from src to dst until either EOF is reached
// on src or an error occurs, and returns FileInfo calculated while copying.
func CopyWithFileInfo(dst io.Writer, src io.Reader, p string) (*FileInfo, error) {
md5hash := md5.New()
sha1hash := sha1.New()
sha256hash := sha256.New()
w := io.MultiWriter(md5hash, sha1hash, sha256hash, dst)
n, err := io.Copy(w, src)
if err != nil {
return nil, err
}
return &FileInfo{
path: p,
size: uint64(n),
md5sum: md5hash.Sum(nil),
sha1sum: sha1hash.Sum(nil),
sha256sum: sha256hash.Sum(nil),
}, nil
}
// MakeFileInfoNoChecksum constructs a FileInfo without calculating checksums.
func MakeFileInfoNoChecksum(path string, size uint64) *FileInfo {
return &FileInfo{
path: path,
size: size,
}
}

244
src/apt/fileinfo_test.go Normal file
View File

@ -0,0 +1,244 @@
package apt
import (
"bytes"
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"strings"
"testing"
)
func testFileInfoSame(t *testing.T) {
t.Parallel()
data := []byte{'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'}
md5sum := md5.Sum(data)
sha1sum := sha1.Sum(data)
sha256sum := sha256.Sum256(data)
data2 := []byte{'1', '2', '3'}
md5sum2 := md5.Sum(data2)
sha1sum2 := sha1.Sum(data2)
fi := &FileInfo{
path: "/data",
size: uint64(len(data)),
md5sum: md5sum[:],
sha1sum: sha1sum[:],
sha256sum: sha256sum[:],
}
if fi.Path() != "/data" {
t.Error(`fi.Path() != "/data"`)
}
badpath := &FileInfo{
path: "bad",
size: uint64(len(data)),
}
if badpath.Same(fi) {
t.Error(`badpath.Same(fi)`)
}
pathonly := &FileInfo{
path: "/data",
size: uint64(len(data)),
}
if !pathonly.Same(fi) {
t.Error(`!pathonly.Same(fi)`)
}
sizemismatch := &FileInfo{
path: "/data",
size: 0,
}
if sizemismatch.Same(fi) {
t.Error(`sizemismatch.Same(fi)`)
}
md5mismatch := &FileInfo{
path: "/data",
size: uint64(len(data)),
md5sum: md5sum2[:],
}
if md5mismatch.Same(fi) {
t.Error(`md5mismatch.Same(fi)`)
}
md5match := &FileInfo{
path: "/data",
size: uint64(len(data)),
md5sum: md5sum[:],
}
if !md5match.Same(fi) {
t.Error(`!md5match.Same(fi)`)
}
sha1mismatch := &FileInfo{
path: "/data",
size: uint64(len(data)),
md5sum: md5sum[:],
sha1sum: sha1sum2[:],
}
if sha1mismatch.Same(fi) {
t.Error(`sha1mismatch.Same(fi)`)
}
sha1match := &FileInfo{
path: "/data",
size: uint64(len(data)),
md5sum: md5sum[:],
sha1sum: sha1sum[:],
}
if !sha1match.Same(fi) {
t.Error(`!sha1match.Same(fi)`)
}
sha1matchmd5mismatch := &FileInfo{
path: "/data",
size: uint64(len(data)),
md5sum: md5sum2[:],
sha1sum: sha1sum[:],
}
if sha1matchmd5mismatch.Same(fi) {
t.Error(`sha1matchmd5mismatch.Same(fi)`)
}
allmatch := &FileInfo{
path: "/data",
size: uint64(len(data)),
md5sum: md5sum[:],
sha1sum: sha1sum[:],
sha256sum: sha256sum[:],
}
if !allmatch.Same(fi) {
t.Error(`!allmatch.Same(fi)`)
}
}
func testFileInfoJSON(t *testing.T) {
t.Parallel()
r := strings.NewReader("hello world")
w := new(bytes.Buffer)
p := "/abc/def"
fi, err := CopyWithFileInfo(w, r, p)
if err != nil {
t.Fatal(err)
}
j, err := json.Marshal(fi)
if err != nil {
t.Fatal(err)
}
fi2 := new(FileInfo)
err = json.Unmarshal(j, fi2)
if err != nil {
t.Fatal(err)
}
if !fi.Same(fi2) {
t.Error(`!fi.Same(fi2)`)
t.Log(fmt.Sprintf("%#v", fi2))
}
}
func testFileInfoAddPrefix(t *testing.T) {
t.Parallel()
r := strings.NewReader("hello world")
w := new(bytes.Buffer)
p := "/abc/def"
fi, err := CopyWithFileInfo(w, r, p)
if err != nil {
t.Fatal(err)
}
if fi.Path() != "/abc/def" {
t.Error(`fi.Path() != "/abc/def"`)
}
fi = fi.AddPrefix("/prefix")
if fi.Path() != "/prefix/abc/def" {
t.Error(`fi.Path() != "/prefix/abc/def"`)
}
}
func testFileInfoChecksum(t *testing.T) {
t.Parallel()
text := "hello world"
r := strings.NewReader(text)
w := new(bytes.Buffer)
p := "/abc/def"
md5sum := md5.Sum([]byte(text))
sha1sum := sha1.Sum([]byte(text))
sha256sum := sha256.Sum256([]byte(text))
m5 := hex.EncodeToString(md5sum[:])
s1 := hex.EncodeToString(sha1sum[:])
s256 := hex.EncodeToString(sha256sum[:])
fi, err := CopyWithFileInfo(w, r, p)
if err != nil {
t.Fatal(err)
}
if fi.MD5SumPath() != "/abc/by-hash/MD5Sum/"+m5 {
t.Error(`fi.MD5SumPath() != "/abc/by-hash/MD5Sum/" + md5`)
}
if fi.SHA1Path() != "/abc/by-hash/SHA1/"+s1 {
t.Error(`fi.SHA1Path() != "/abc/by-hash/SHA1/" + s1`)
}
if fi.SHA256Path() != "/abc/by-hash/SHA256/"+s256 {
t.Error(`fi.SHA256Path() != "/abc/by-hash/SHA256/" + s256`)
}
}
func testFileInfoCopy(t *testing.T) {
t.Parallel()
text := "hello world"
r := strings.NewReader(text)
w := new(bytes.Buffer)
p := "/abc/def"
md5sum := md5.Sum([]byte(text))
sha1sum := sha1.Sum([]byte(text))
sha256sum := sha256.Sum256([]byte(text))
fi := &FileInfo{
path: p,
size: uint64(r.Size()),
md5sum: md5sum[:],
sha1sum: sha1sum[:],
sha256sum: sha256sum[:],
}
fi2, err := CopyWithFileInfo(w, r, p)
if err != nil {
t.Fatal(err)
}
if w.String() != text {
t.Errorf(
"Copy did not work properly, expected: %s, actual: %s",
text, w.String(),
)
}
if !fi.Same(fi2) {
t.Error("Generated FileInfo is invalid")
}
}
func TestFileInfo(t *testing.T) {
t.Run("Same", testFileInfoSame)
t.Run("JSON", testFileInfoJSON)
t.Run("AddPrefix", testFileInfoAddPrefix)
t.Run("Checksum", testFileInfoChecksum)
t.Run("Copy", testFileInfoCopy)
}

370
src/apt/meta.go Normal file
View File

@ -0,0 +1,370 @@
package apt
// This file provides utilities for debian repository indices.
import (
"compress/bzip2"
"compress/gzip"
"encoding/hex"
"fmt"
"io"
"path"
"strconv"
"strings"
"github.com/ulikunitz/xz"
)
// IsMeta returns true if p points a debian repository index file
// containing checksums for other files.
func IsMeta(p string) bool {
base := path.Base(p)
// https://wiki.debian.org/RepositoryFormat#Compression_of_indices
switch {
case strings.HasSuffix(base, ".gz"):
base = base[0 : len(base)-3]
case strings.HasSuffix(base, ".bz2"):
base = base[0 : len(base)-4]
case strings.HasSuffix(base, ".xz"):
base = base[0 : len(base)-3]
case strings.HasSuffix(base, ".lzma"):
base = base[0 : len(base)-5]
case strings.HasSuffix(base, ".lz"):
base = base[0 : len(base)-3]
}
switch base {
case "Release", "Release.gpg", "InRelease":
return true
case "Packages", "Sources", "Index":
return true
}
return false
}
// IsSupported returns true if the meta data is compressed that can be
// decompressed by ExtractFileInfo.
func IsSupported(p string) bool {
switch path.Ext(p) {
case "", ".gz", ".bz2", ".gpg", ".xz":
return true
}
return false
}
// SupportByHash returns true if paragraph from Release indicates
// support for indices acquisition via hash values (by-hash).
// See https://wiki.debian.org/DebianRepository/Format#indices_acquisition_via_hashsums_.28by-hash.29
func SupportByHash(d Paragraph) bool {
p := d["Acquire-By-Hash"]
if len(p) != 1 {
return false
}
return p[0] == "yes"
}
func parseChecksum(l string) (p string, size uint64, csum []byte, err error) {
flds := strings.Fields(l)
if len(flds) != 3 {
err = fmt.Errorf("invalid checksum line: %s", l)
return
}
size, err = strconv.ParseUint(flds[1], 10, 64)
if err != nil {
return
}
csum, err = hex.DecodeString(flds[0])
if err != nil {
return
}
p = flds[2]
return
}
// getFilesFromRelease parses Release or InRelease file and
// returns a list of *FileInfo pointed in the file.
func getFilesFromRelease(p string, r io.Reader) ([]*FileInfo, Paragraph, error) {
dir := path.Dir(p)
d, err := NewParser(r).Read()
if err != nil {
return nil, nil, fmt.Errorf("NewParser(r).Read(): %w", err)
}
md5sums := d["MD5Sum"]
sha1sums := d["SHA1"]
sha256sums := d["SHA256"]
if len(md5sums) == 0 && len(sha1sums) == 0 && len(sha256sums) == 0 {
return nil, d, nil
}
m := make(map[string]*FileInfo)
for _, l := range md5sums {
p, size, csum, err := parseChecksum(l)
p = path.Join(dir, path.Clean(p))
if err != nil {
return nil, nil, fmt.Errorf("parseChecksum for md5sums: %w", err)
}
fi := &FileInfo{
path: p,
size: size,
md5sum: csum,
}
m[p] = fi
}
for _, l := range sha1sums {
p, size, csum, err := parseChecksum(l)
p = path.Join(dir, path.Clean(p))
if err != nil {
return nil, nil, fmt.Errorf("parseChecksum for sha1sums: %w", err)
}
fi, ok := m[p]
if ok {
fi.sha1sum = csum
} else {
fi := &FileInfo{
path: p,
size: size,
sha1sum: csum,
}
m[p] = fi
}
}
for _, l := range sha256sums {
p, size, csum, err := parseChecksum(l)
p = path.Join(dir, path.Clean(p))
if err != nil {
return nil, nil, fmt.Errorf("parseChecksum for sha256sums: %w", err)
}
fi, ok := m[p]
if ok {
fi.sha256sum = csum
} else {
fi := &FileInfo{
path: p,
size: size,
sha256sum: csum,
}
m[p] = fi
}
}
// WORKAROUND: some (e.g. dell) repositories have invalid Release
// that contains wrong checksum for Release itself. Ignore them.
delete(m, path.Join(dir, "Release"))
delete(m, path.Join(dir, "Release.gpg"))
delete(m, path.Join(dir, "InRelease"))
l := make([]*FileInfo, 0, len(m))
for _, fi := range m {
l = append(l, fi)
}
return l, d, nil
}
// getFilesFromPackages parses Packages file and returns
// a list of *FileInfo pointed in the file.
func getFilesFromPackages(p string, r io.Reader) ([]*FileInfo, Paragraph, error) {
var l []*FileInfo
parser := NewParser(r)
for {
d, err := parser.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, nil, fmt.Errorf("parser.Read: %w", err)
}
filename, ok := d["Filename"]
if !ok {
return nil, nil, fmt.Errorf("no Filename in " + p)
}
fpath := path.Clean(filename[0])
strsize, ok := d["Size"]
if !ok {
return nil, nil, fmt.Errorf("no Size in " + p)
}
size, err := strconv.ParseUint(strsize[0], 10, 64)
if err != nil {
return nil, nil, err
}
fi := &FileInfo{
path: fpath,
size: size,
}
if csum, ok := d["MD5sum"]; ok {
b, err := hex.DecodeString(csum[0])
if err != nil {
return nil, nil, err
}
fi.md5sum = b
}
if csum, ok := d["SHA1"]; ok {
b, err := hex.DecodeString(csum[0])
if err != nil {
return nil, nil, err
}
fi.sha1sum = b
}
if csum, ok := d["SHA256"]; ok {
b, err := hex.DecodeString(csum[0])
if err != nil {
return nil, nil, err
}
fi.sha256sum = b
}
l = append(l, fi)
}
return l, nil, nil
}
// getFilesFromSources parses Sources file and returns
// a list of *FileInfo pointed in the file.
func getFilesFromSources(p string, r io.Reader) ([]*FileInfo, Paragraph, error) {
var l []*FileInfo
parser := NewParser(r)
for {
d, err := parser.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, nil, fmt.Errorf("parser.Read: %w", err)
}
dir, ok := d["Directory"]
if !ok {
return nil, nil, fmt.Errorf("no Directory in " + p)
}
files, ok := d["Files"]
if !ok {
return nil, nil, fmt.Errorf("no Files in " + p)
}
m := make(map[string]*FileInfo)
for _, l := range files {
fname, size, csum, err := parseChecksum(l)
if err != nil {
return nil, nil, fmt.Errorf("parseChecksum for Files: %w", err)
}
fpath := path.Clean(path.Join(dir[0], fname))
fi := &FileInfo{
path: fpath,
size: size,
md5sum: csum,
}
m[fpath] = fi
}
for _, l := range d["Checksums-Sha1"] {
fname, _, csum, err := parseChecksum(l)
if err != nil {
return nil, nil, fmt.Errorf("parseChecksum for Checksums-Sha1: %w", err)
}
fpath := path.Clean(path.Join(dir[0], fname))
fi, ok := m[fpath]
if !ok {
return nil, nil, fmt.Errorf("mismatch between Files and Checksums-Sha1 in " + p)
}
fi.sha1sum = csum
}
for _, l := range d["Checksums-Sha256"] {
fname, _, csum, err := parseChecksum(l)
if err != nil {
return nil, nil, fmt.Errorf("parseChecksum for Checksums-Sha256: %w", err)
}
fpath := path.Clean(path.Join(dir[0], fname))
fi, ok := m[fpath]
if !ok {
return nil, nil, fmt.Errorf("mismatch between Files and Checksums-Sha256 in " + p)
}
fi.sha256sum = csum
}
for _, fi := range m {
l = append(l, fi)
}
}
return l, nil, nil
}
// getFilesFromIndex parses i18n/Index file and returns
// a list of *FileInfo pointed in the file.
func getFilesFromIndex(p string, r io.Reader) ([]*FileInfo, Paragraph, error) {
return getFilesFromRelease(p, r)
}
// ExtractFileInfo parses debian repository index files such as
// Release, Packages, or Sources and return a list of *FileInfo
// listed in the file.
//
// If the index is Release, InRelease, or Index, this function
// also returns non-nil Paragraph data of the index.
//
// p is the relative path of the file.
func ExtractFileInfo(p string, r io.Reader) ([]*FileInfo, Paragraph, error) {
if !IsMeta(p) {
return nil, nil, fmt.Errorf("not a meta data file: " + p)
}
base := path.Base(p)
ext := path.Ext(base)
switch ext {
case "", ".gpg":
// do nothing
case ".gz":
gz, err := gzip.NewReader(r)
if err != nil {
return nil, nil, err
}
defer gz.Close()
r = gz
base = base[:len(base)-3]
case ".bz2":
r = bzip2.NewReader(r)
base = base[:len(base)-4]
case ".xz":
xzr, err := xz.NewReader(r)
if err != nil {
return nil, nil, err
}
r = xzr
base = base[:len(base)-3]
default:
return nil, nil, fmt.Errorf("unsupported file extension: " + ext)
}
switch base {
case "Release", "InRelease":
return getFilesFromRelease(p, r)
case "Packages":
return getFilesFromPackages(p, r)
case "Sources":
return getFilesFromSources(p, r)
case "Index":
return getFilesFromIndex(p, r)
}
return nil, nil, nil
}

320
src/apt/meta_test.go Normal file
View File

@ -0,0 +1,320 @@
package apt
import (
"encoding/hex"
"os"
"testing"
)
func TestIsMeta(t *testing.T) {
if IsMeta("hoge.deb") {
t.Error(`IsMeta("hoge.deb")`)
}
if IsMeta("Release/hoge") {
t.Error(`IsMeta("Release/hoge")`)
}
if !IsMeta("Release") {
t.Error(`!IsMeta("Release")`)
}
if !IsMeta("Release.gpg") {
t.Error(`!IsMeta("Release.gpg")`)
}
if !IsMeta("InRelease") {
t.Error(`!IsMeta("InRelease")`)
}
if !IsMeta("Packages") {
t.Error(`!IsMeta("Packages")`)
}
if !IsMeta("Packages.gz") {
t.Error(`!IsMeta("Packages.gz")`)
}
if !IsMeta("Packages.bz2") {
t.Error(`!IsMeta("Packages.bz2")`)
}
if !IsMeta("Packages.xz") {
t.Error(`!IsMeta("Packages.xz")`)
}
if IsMeta("Packages.gz.xz") {
t.Error(`IsMeta("Packages.gz.xz")`)
}
if !IsMeta("a/b/c/Sources.gz") {
t.Error(`!IsMeta("a/b/c/Sources.gz")`)
}
if !IsMeta("Index") {
t.Error(`!IsMeta("Index")`)
}
}
func containsFileInfo(fi *FileInfo, l []*FileInfo) bool {
for _, fi2 := range l {
if fi.Same(fi2) {
return true
}
}
return false
}
func TestAcquireByHash(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/hash/Release")
if err != nil {
t.Fatal(err)
}
defer f.Close()
_, d, err := ExtractFileInfo("ubuntu/dists/trusty/Release", f)
if err != nil {
t.Fatal(err)
}
if !SupportByHash(d) {
t.Error(`!SupportByHash(d)`)
}
}
func TestGetFilesFromRelease(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Release")
if err != nil {
t.Fatal(err)
}
defer f.Close()
fil, d, err := ExtractFileInfo("ubuntu/dists/trusty/Release", f)
if err != nil {
t.Fatal(err)
}
if len(fil) != 9 {
t.Error(`len(fil) != 9`)
}
if SupportByHash(d) {
t.Error(`SupportByHash(d)`)
}
md5sum, _ := hex.DecodeString("5c30f072d01cde094a5c07fccd217cf3")
sha1sum, _ := hex.DecodeString("e3c9a2028a6938e49fc240cdd55c2f4b0b75dfde")
sha256sum, _ := hex.DecodeString("e3b1e5a6951881bca3ee230e5f3215534eb07f602a2f0415af3b182468468104")
fi := &FileInfo{
path: "ubuntu/dists/trusty/main/binary-all/Packages",
size: 3098,
md5sum: md5sum,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`ubuntu/dists/trusty/main/binary-all/Packages`)
}
md5sum, _ = hex.DecodeString("3f71c3b19ec6f926c71504cf147f3574")
sha1sum, _ = hex.DecodeString("64a566a5b6a92c1fefde9630d1b8ecb6e9352523")
sha256sum, _ = hex.DecodeString("78fa82404a432d7b56761ccdbf275f4a338c8779a9cec17480b91672c28682aa")
fi = &FileInfo{
path: "ubuntu/dists/trusty/main/binary-amd64/Packages.gz",
size: 4418,
md5sum: md5sum,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`ubuntu/dists/trusty/main/binary-amd64/Packages.gz`)
}
}
func TestGetFilesFromPackages(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Packages")
if err != nil {
t.Fatal(err)
}
defer f.Close()
fil, _, err := ExtractFileInfo("ubuntu/dists/testing/main/binary-amd64/Packages", f)
if err != nil {
t.Fatal(err)
}
if len(fil) != 3 {
t.Error(`len(fil) != 3`)
}
sha1sum, _ := hex.DecodeString("903b3305c86e872db25985f2b686ef8d1c3760cf")
sha256sum, _ := hex.DecodeString("cebb641f03510c2c350ea2e94406c4c09708364fa296730e64ecdb1107b380b7")
fi := &FileInfo{
path: "pool/c/cybozu-abc_0.2.2-1_amd64.deb",
size: 102369852,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !fi.Same(fil[0]) {
t.Error(`!fi.Same(fil[0])`)
}
sha1sum, _ = hex.DecodeString("b89e2f1a9f5efb8b7c1e2e2d8abbab05d7981187")
sha256sum, _ = hex.DecodeString("814cec015067fb083e14d95d77c5ec41c11de99180ea518813b7abc88805fa24")
fi = &FileInfo{
path: "pool/c/cybozu-fuga_2.0.0.2-1_all.deb",
size: 1018650,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !fi.Same(fil[1]) {
t.Error(`!fi.Same(fil[1])`)
}
}
func TestGetFilesFromSources(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Sources.gz")
if err != nil {
t.Fatal(err)
}
defer f.Close()
fil, _, err := ExtractFileInfo("ubuntu/dists/testing/main/source/Sources.gz", f)
if err != nil {
t.Fatal(err)
}
if len(fil) < 2 {
t.Error(`len(fil) < 2`)
}
md5sum, _ := hex.DecodeString("6cfe5a56e3b0fc25edf653084c24c238")
sha1sum, _ := hex.DecodeString("d89f409cae51a5d424a769560fc1688d2a636d73")
sha256sum, _ := hex.DecodeString("3a126eec194457778a477d95a9dd4b8c03d6a95b9c064cddcae63eba2e674797")
fi := &FileInfo{
path: "pool/main/a/aalib/aalib_1.4p5-41.dsc",
size: 2078,
md5sum: md5sum,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`pool/main/a/aalib/aalib_1.4p5-41.dsc`)
}
md5sum, _ = hex.DecodeString("9801095c42bba12edebd1902bcf0a990")
sha1sum, _ = hex.DecodeString("a23269e950a249d2ef93625837cace45ddbce03b")
sha256sum, _ = hex.DecodeString("fbddda9230cf6ee2a4f5706b4b11e2190ae45f5eda1f0409dc4f99b35e0a70ee")
fi = &FileInfo{
path: "pool/main/a/aalib/aalib_1.4p5.orig.tar.gz",
size: 391028,
md5sum: md5sum,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`pool/main/a/aalib/aalib_1.4p5.orig.tar.gz`)
}
md5sum, _ = hex.DecodeString("1d276558e27a29e2d0bbe6deac1788dc")
sha1sum, _ = hex.DecodeString("bfe56ce2a2171c6602f4d34a4d548a20deb2e628")
sha256sum, _ = hex.DecodeString("0b606e2bf1826e77c73c0efb9b0cb2f5f89ea422cc02a10fa00866075635cf2c")
fi = &FileInfo{
path: "pool/main/a/aalib/aalib_1.4p5-41.debian.tar.gz",
size: 16718,
md5sum: md5sum,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`pool/main/a/aalib/aalib_1.4p5-41.debian.tar.gz`)
}
md5sum, _ = hex.DecodeString("7dedd7a510fcf4cd2b0def4b45ab94a7")
sha1sum, _ = hex.DecodeString("fcaf0374f5f054c2884dbab6f126b8187ba66181")
sha256sum, _ = hex.DecodeString("8f44b8be08a562ac7bee3bd5e0273e6a860bfe1a434ea2d93d42e94d339cacf4")
fi = &FileInfo{
path: "pool/main/z/zsh/zsh_5.0.2-3ubuntu6.dsc",
size: 2911,
md5sum: md5sum,
sha1sum: sha1sum,
sha256sum: sha256sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`pool/main/z/zsh/zsh_5.0.2-3ubuntu6.dsc`)
}
}
func TestGetFilesFromIndex(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Index")
if err != nil {
t.Fatal(err)
}
defer f.Close()
fil, _, err := ExtractFileInfo("ubuntu/dists/trusty/main/i18n/Index", f)
if err != nil {
t.Fatal(err)
}
if len(fil) != 53 {
t.Error(`len(fil) != 53`)
}
sha1sum, _ := hex.DecodeString("f03d5f043a7daea0662a110d6e5d3f85783a5a1b")
fi := &FileInfo{
path: "ubuntu/dists/trusty/main/i18n/Translation-bg.bz2",
size: 7257,
sha1sum: sha1sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`ubuntu/dists/trusty/main/i18n/Translation-bg.bz2`)
}
sha1sum, _ = hex.DecodeString("1572e835b4a67a49f79bbee408c82af2357662a7")
fi = &FileInfo{
path: "ubuntu/dists/trusty/main/i18n/Translation-zh_TW.bz2",
size: 85235,
sha1sum: sha1sum,
}
if !containsFileInfo(fi, fil) {
t.Error(`ubuntu/dists/trusty/main/i18n/Translation-zh_TW.bz2`)
}
}
func TestExtractFileInfo(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Packages")
if err != nil {
t.Fatal(err)
}
defer f.Close()
fil, _, err := ExtractFileInfo("ubuntu/dists/testing/Release.gpg", f)
if err != nil {
t.Fatal(err)
}
if len(fil) != 0 {
t.Error(`len(fil) != 0`)
}
}
func TestExtractFileInfoWithXZ(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Packages.xz")
if err != nil {
t.Fatal(err)
}
defer f.Close()
fil, _, err := ExtractFileInfo("ubuntu/dists/testing/Packages.xz", f)
if err != nil {
t.Fatal(err)
}
sha1sum, _ := hex.DecodeString("903b3305c86e872db25985f2b686ef8d1c3760cf")
fi := &FileInfo{
path: "pool/c/cybozu-abc_0.2.2-1_amd64.deb",
size: 102369852,
sha1sum: sha1sum,
}
if !containsFileInfo(fi, fil) {
t.Error("pool/c/cybozu-abc_0.2.2-1_amd64.deb")
}
}

114
src/apt/parser.go Normal file
View File

@ -0,0 +1,114 @@
package apt
// This file implements a generic debian control file parser.
//
// Specifications of files are:
// https://wiki.debian.org/RepositoryFormat
// https://www.debian.org/doc/debian-policy/ch-controlfields.html
//
// According to Debian policy 5.1, folded fields are used in few
// fields such as Uploaders or Binary that we are not insterested in.
// This parser treats them just the same as multiline fields.
import (
"bufio"
"errors"
"io"
"strings"
)
const (
maxScanTokenSize = 1 * 1024 * 1024 // 1 MiB
startBufSize = 4096 // Default buffer allocation size in bufio
)
// Paragraph is a mapping between field names and values.
//
// Values are a list of strings. For simple fields, the list has only
// one element. Newlines are stripped from (multiline) strings.
// Folded fields are treated just the same as multiline fields.
type Paragraph map[string][]string
// Parser reads debian control file and return Paragraph one by one.
//
// PGP preambles and signatures are ignored if any.
type Parser struct {
s *bufio.Scanner
lastField string
err error
isPGP bool
}
// NewParser creates a parser from a io.Reader.
func NewParser(r io.Reader) *Parser {
p := &Parser{
s: bufio.NewScanner(r),
isPGP: false,
}
b := make([]byte, startBufSize)
p.s.Buffer(b, maxScanTokenSize)
return p
}
// Read reads a paragraph.
//
// It returns io.EOF if no more paragraph can be read.
func (p *Parser) Read() (Paragraph, error) {
if p.err != nil {
return nil, p.err
}
ret := make(Paragraph)
L:
for p.s.Scan() {
switch l := p.s.Text(); {
case len(l) == 0:
break L
case l[0] == '#':
continue
case l == "-----BEGIN PGP SIGNED MESSAGE-----":
p.isPGP = true
for p.s.Scan() {
if l2 := p.s.Text(); len(l2) == 0 {
break
}
}
continue
case p.isPGP && l == "-----BEGIN PGP SIGNATURE-----":
// skip to EOF
for p.s.Scan() {
}
break L
case l[0] == ' ' || l[0] == '\t':
// multiline
if p.lastField == "" {
p.err = errors.New("invalid line: " + l)
return nil, p.err
}
ret[p.lastField] = append(ret[p.lastField], strings.Trim(l, " \t"))
case strings.ContainsRune(l, ':'):
t := strings.SplitN(l, ":", 2)
k := t[0]
v := strings.Trim(t[1], " \t")
p.lastField = k
if len(v) == 0 {
// ignore empty value field
continue
}
ret[k] = append(ret[k], v)
default:
p.err = errors.New("invalid line: " + l)
return nil, p.err
}
}
p.lastField = ""
if err := p.s.Err(); err != nil {
p.err = err
} else if len(ret) == 0 {
p.err = io.EOF
}
if p.err != nil {
return nil, p.err
}
return ret, nil
}

180
src/apt/parser_test.go Normal file
View File

@ -0,0 +1,180 @@
package apt
import (
"io"
"os"
"testing"
)
func TestParserRelease(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Release")
if err != nil {
t.Fatal(err)
}
defer f.Close()
p := NewParser(f)
d, err := p.Read()
if err != nil {
t.Fatal(err)
}
if codename, ok := d["Codename"]; !ok {
t.Error(`codename, ok := d["Codename"]; !ok`)
} else if codename[0] != "testing" {
t.Error(`codename != "testing"`)
}
if archs, ok := d["Architectures"]; !ok {
t.Error(`archs, ok := d["Architectures"]; !ok`)
} else if archs[0] != "amd64 i386" {
t.Error(`archs[0] != "amd64 i386"`)
}
if md5, ok := d["MD5Sum"]; !ok {
t.Error(`md5, ok := d["MD5Sum"]; !ok`)
} else {
if len(md5) != 9 {
t.Fatal(`len(md5) != 9`)
}
if md5[0] != "5c30f072d01cde094a5c07fccd217cf3 3098 main/binary-all/Packages" {
t.Error(`md5[0] != "5c30f072d01cde094a5c07fccd217cf3 3098 main/binary-all/Packages"`)
}
if md5[1] != "4ed86bda6871fd3825a65e95bb714ef0 1259 main/binary-all/Packages.bz2" {
t.Error(`md5[1] != "4ed86bda6871fd3825a65e95bb714ef0 1259 main/binary-all/Packages.bz2"`)
}
}
if sha1, ok := d["SHA1"]; !ok {
t.Error(`sha1, ok := d["SHA1"]; !ok`)
} else {
if len(sha1) != 9 {
t.Fatal(`len(sha1) != 9`)
}
if sha1[0] != "e3c9a2028a6938e49fc240cdd55c2f4b0b75dfde 3098 main/binary-all/Packages" {
t.Error(`sha1[0] != "e3c9a2028a6938e49fc240cdd55c2f4b0b75dfde 3098 main/binary-all/Packages"`)
}
if sha1[1] != "eb2c25b19facbc8c103a7e14ae5b768e5e47157e 1259 main/binary-all/Packages.bz2" {
t.Error(`sha1[1] != "eb2c25b19facbc8c103a7e14ae5b768e5e47157e 1259 main/binary-all/Packages.bz2"`)
}
}
if sha256, ok := d["SHA256"]; !ok {
t.Error(`sha256, ok := d["SHA256"]; !ok`)
} else {
if len(sha256) != 9 {
t.Fatal(`len(sha256) != 9`)
}
if sha256[0] != "e3b1e5a6951881bca3ee230e5f3215534eb07f602a2f0415af3b182468468104 3098 main/binary-all/Packages" {
t.Error(`sha256[0] != "e3b1e5a6951881bca3ee230e5f3215534eb07f602a2f0415af3b182468468104 3098 main/binary-all/Packages"`)
}
if sha256[8] != "a6972328347cc787f4f8c2e20a930ec965bd520380b0449e610995b6b0f1e3c5 1059 main/binary-i386/Packages.gz" {
t.Error(`sha256[8] != "a6972328347cc787f4f8c2e20a930ec965bd520380b0449e610995b6b0f1e3c5 1059 main/binary-i386/Packages.gz"`)
}
}
_, err = p.Read()
if err != io.EOF {
t.Error(`err != io.EOF`)
}
}
func TestParserInRelease(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/InRelease")
if err != nil {
t.Fatal(err)
}
defer f.Close()
p := NewParser(f)
d, err := p.Read()
if err != nil {
t.Fatal(err)
}
if codename, ok := d["Codename"]; !ok {
t.Error(`codename, ok := d["Codename"]; !ok`)
} else if codename[0] != "xenial" {
t.Error(`codename != "xenial"`)
}
if components, ok := d["Components"]; !ok {
t.Error(`components, ok := d["Components"]; !ok`)
} else if components[0] != "main restricted universe multiverse" {
t.Error(`components[0] != "main restricted universe multiverse"`)
}
if sha256, ok := d["SHA256"]; !ok {
t.Error(`sha256, ok := d["SHA256"]; !ok`)
} else {
if sha256[len(sha256)-1] != "aefe5a7388a3e638df10ac8f0cd42e6c2947cc766c2f33a3944a5b4900369d1e 7727612 universe/source/Sources.xz" {
t.Error(`sha256[len(sha256)-1] != "aefe5a7388a3e638df10ac8f0cd42e6c2947cc766c2f33a3944a5b4900369d1e 7727612 universe/source/Sources.xz"`)
}
}
_, err = p.Read()
if err != io.EOF {
t.Error(`err != io.EOF`)
}
}
func TestParserPackages(t *testing.T) {
t.Parallel()
f, err := os.Open("testdata/af/Packages")
if err != nil {
t.Fatal(err)
}
defer f.Close()
p := NewParser(f)
d, err := p.Read()
if err != nil {
t.Fatal(err)
}
if pkg, ok := d["Package"]; !ok {
t.Error(`pkg, ok := d["Package"]; !ok`)
} else if pkg[0] != "cybozu-abc" {
t.Error(`pkg[0] != "cybozu-abc"`)
}
if filename, ok := d["Filename"]; !ok {
t.Error(`filename, ok := d["Filename"]; !ok`)
} else if filename[0] != "pool/c/cybozu-abc_0.2.2-1_amd64.deb" {
t.Error(`filename[0] != "pool/c/cybozu-abc_0.2.2-1_amd64.deb"`)
}
if size, ok := d["Size"]; !ok {
t.Error(`size, ok := d["Size"]; !ok`)
} else if size[0] != "102369852" {
t.Error(`size[0] != "102369852"`)
}
d, err = p.Read()
if err != nil {
t.Fatal(err)
}
if pkg, ok := d["Package"]; !ok {
t.Error(`pkg, ok := d["Package"]; !ok`)
} else if pkg[0] != "cybozu-fuga" {
t.Error(`pkg[0] != "cybozu-fuga"`)
}
if filename, ok := d["Filename"]; !ok {
t.Error(`filename, ok := d["Filename"]; !ok`)
} else if filename[0] != "pool/c/cybozu-fuga_2.0.0.2-1_all.deb" {
t.Error(`filename[0] != "pool/c/cybozu-fuga_2.0.0.2-1_all.deb"`)
}
if size, ok := d["Size"]; !ok {
t.Error(`size, ok := d["Size"]; !ok`)
} else if size[0] != "1018650" {
t.Error(`size[0] != "1018650"`)
}
_, err = p.Read()
if err != nil {
t.Fatal(err)
}
_, err = p.Read()
if err != io.EOF {
t.Error(`err != io.EOF`)
}
}

2548
src/apt/testdata/af/InRelease vendored Normal file

File diff suppressed because it is too large Load Diff

54
src/apt/testdata/af/Index vendored Normal file
View File

@ -0,0 +1,54 @@
SHA1:
8b868183cd667d017818dda0a8599d76fe5ca74b 856 Translation-ar.bz2
453637d99a3343dde00046b8c360e491d047f859 14149 Translation-ast.bz2
f03d5f043a7daea0662a110d6e5d3f85783a5a1b 7257 Translation-bg.bz2
5a4386d4b4c8dfb5faa9550848f0b7afa78e6309 916 Translation-bs.bz2
6eb50da0085b50bdfc87cbbd53be99ddc287ef9d 10967 Translation-ca.bz2
53762b3a01d1c8103154b5e2ff34961cc40fc93f 19581 Translation-cs.bz2
50eca8689bea2f00b965f515250d2a11947e6c05 490494 Translation-da.bz2
a9aae2eaeb43d57e49981343e3be7d82611a715a 672104 Translation-de.bz2
03968fd323510fea1950644e91622c310c05009e 59765 Translation-el.bz2
8aa7a170afdf02c587c700b63d090c6edd794a02 762361 Translation-en.bz2
c938df80101cabca0522c2b59200b8fe41f19214 2649 Translation-en_AU.bz2
068cf4f59df8b71cd49fe873662a9620634f8004 7324 Translation-en_CA.bz2
4a35cc5c9c73de746a768aa8d609677196d535c1 96784 Translation-en_GB.bz2
2ac3830674f0033d114aa4cb5193a14d76057986 2760 Translation-eo.bz2
762492c94dd863b8fa0b27488acaeab9ed96ac86 590670 Translation-es.bz2
227c9e5b32cda41aa974e8c3c5da60d69fc41da5 13712 Translation-eu.bz2
96fe2ec3fac7a13ec4fa4148f4d58acee0b2d4d0 931 Translation-fa.bz2
7a093e820cd73aaf2c4eccb3f409a66532481703 120574 Translation-fi.bz2
199d40698a7a184acc22670532eea8d6bbd9dbfc 813647 Translation-fr.bz2
07e018025d0d99c0e6e2cb124b8d418e39ff52cd 562467 Translation-gl.bz2
678fb41919ad9e9b34a0cc74757dc959f8b5264a 13252 Translation-hr.bz2
b8f91185c4c1eb46d26ff99c9b854c4e66f9d5df 95638 Translation-hu.bz2
b4297ff6412fa3e4165ad6fa4b81890fd8483b12 8569 Translation-id.bz2
f870c92531897864413e15fb807087ac101a5d60 4990 Translation-is.bz2
8d5bb61f3a4e0beecf7034a7a4898be48a321beb 593382 Translation-it.bz2
a1ddaafb181d13e9a5f35b9665abebb380294fff 352875 Translation-ja.bz2
aa828d90cb60a17c8a98dcf7388c4a39dd0c0871 365 Translation-ka.bz2
6baa740db8d2ecdba8c068a648bc78663a8bc402 7312 Translation-km.bz2
7e6d6a5021dbfc3a3d404cd3767266290c6b1b08 85604 Translation-ko.bz2
7d7e6e9d6bf1ef646b7da63d2501410bc8756a49 391 Translation-lt.bz2
f3816be65bd89ef7dd1d413638af67d3fc255466 5059 Translation-ms.bz2
a8c1adef2df957cc978c13606181c38be13897cf 2183 Translation-my.bz2
c41d78e7a20ead0cfb0993b76b656e8d3a22bfb0 8653 Translation-nb.bz2
3efb989bf381afc515e75aae07f771c316a298d2 132864 Translation-nl.bz2
9b09bbad3b265d32a6536987489aff64384d988f 5267 Translation-oc.bz2
099821e538ad51c1cf0f6be457d4cb2724244239 314845 Translation-pl.bz2
9416b913cf3e3803cef97db551cabffcdd38f108 264814 Translation-pt.bz2
4b2be6b049f05e978e414a291383ee6d92b7678d 293620 Translation-pt_BR.bz2
509e0f31e975de692c88fece54b8affccd10ed44 9666 Translation-ro.bz2
1d6c7a600a18c37c54be2807a76eb53a335cb263 522671 Translation-ru.bz2
7472cdf90caa4207eb8164597077fba3c301d415 1624 Translation-si.bz2
f5703105da6114593cd19cefde8abca6a4053f03 265973 Translation-sk.bz2
aa8e1a3d3d9d1400d800d55a931fd41847e54421 500377 Translation-sl.bz2
c9cac7ac1b13bfc98b6f685b29e441f355515e1a 873 Translation-sq.bz2
e7ee84e1d2fa874703486eda8512d1b58a3cfe01 61689 Translation-sr.bz2
38e456e31b38f658beda73392b1b9cf297bb3b41 70215 Translation-sv.bz2
1b287697c473ca8bde1baf1e35a827f369aa969b 848 Translation-th.bz2
654f0931faa5a1bc0a9c212a22d6eea73867827c 77768 Translation-tr.bz2
fdc965a77b3b8145ea41c8a51fd9cffa39ef2854 403661 Translation-uk.bz2
b48d9906528d4c1de56bcecaca959e10d130e81c 19929 Translation-vi.bz2
6671d636a19dfdfae44ddb00985e2814ca072303 84392 Translation-zh_CN.bz2
5f9d1a9379aec804502d6b087a7613a9bfbecfee 419 Translation-zh_HK.bz2
1572e835b4a67a49f79bbee408c82af2357662a7 85235 Translation-zh_TW.bz2

36
src/apt/testdata/af/Packages vendored Normal file

File diff suppressed because one or more lines are too long

BIN
src/apt/testdata/af/Packages.xz vendored Normal file

Binary file not shown.

37
src/apt/testdata/af/Release vendored Normal file
View File

@ -0,0 +1,37 @@
Origin: Artifactory
Label: Artifactory
Suite: testing
Codename: testing
Date: Fri, 03 Jun 2016 00:38:19 UTC
Component: main
Architectures: amd64 i386
MD5Sum:
5c30f072d01cde094a5c07fccd217cf3 3098 main/binary-all/Packages
4ed86bda6871fd3825a65e95bb714ef0 1259 main/binary-all/Packages.bz2
c451dc7898107a946f3bd085e602a11e 1059 main/binary-all/Packages.gz
181e90d6c94c35d22d931625612bdb49 15278 main/binary-amd64/Packages
6c35d0e61d74a9a533652b13052fe28c 4588 main/binary-amd64/Packages.bz2
3f71c3b19ec6f926c71504cf147f3574 4418 main/binary-amd64/Packages.gz
5c30f072d01cde094a5c07fccd217cf3 3098 main/binary-i386/Packages
4ed86bda6871fd3825a65e95bb714ef0 1259 main/binary-i386/Packages.bz2
c451dc7898107a946f3bd085e602a11e 1059 main/binary-i386/Packages.gz
SHA1:
e3c9a2028a6938e49fc240cdd55c2f4b0b75dfde 3098 main/binary-all/Packages
eb2c25b19facbc8c103a7e14ae5b768e5e47157e 1259 main/binary-all/Packages.bz2
85740a100431a4b71d4ddf5f9290b0a6d4737959 1059 main/binary-all/Packages.gz
10c64fafa30f1a79ec1e09c877e7dd26d53239f1 15278 main/binary-amd64/Packages
70324c75e0a9814dab9ccf66348c1e77f0d96f32 4588 main/binary-amd64/Packages.bz2
64a566a5b6a92c1fefde9630d1b8ecb6e9352523 4418 main/binary-amd64/Packages.gz
e3c9a2028a6938e49fc240cdd55c2f4b0b75dfde 3098 main/binary-i386/Packages
eb2c25b19facbc8c103a7e14ae5b768e5e47157e 1259 main/binary-i386/Packages.bz2
85740a100431a4b71d4ddf5f9290b0a6d4737959 1059 main/binary-i386/Packages.gz
SHA256:
e3b1e5a6951881bca3ee230e5f3215534eb07f602a2f0415af3b182468468104 3098 main/binary-all/Packages
bb870366ea454d3e809604b0e5ca2bd978244bd08d115e03eac467d1d1fe5533 1259 main/binary-all/Packages.bz2
a6972328347cc787f4f8c2e20a930ec965bd520380b0449e610995b6b0f1e3c5 1059 main/binary-all/Packages.gz
0a530f1364240f5bf922fc26404589d171b30f18494a5143e251fa3b8239f86b 15278 main/binary-amd64/Packages
6e0c6d858e81de1b0cb142a47b99a3dce1c0a68a1988cefad370aeabdba14729 4588 main/binary-amd64/Packages.bz2
78fa82404a432d7b56761ccdbf275f4a338c8779a9cec17480b91672c28682aa 4418 main/binary-amd64/Packages.gz
e3b1e5a6951881bca3ee230e5f3215534eb07f602a2f0415af3b182468468104 3098 main/binary-i386/Packages
bb870366ea454d3e809604b0e5ca2bd978244bd08d115e03eac467d1d1fe5533 1259 main/binary-i386/Packages.bz2
a6972328347cc787f4f8c2e20a930ec965bd520380b0449e610995b6b0f1e3c5 1059 main/binary-i386/Packages.gz

BIN
src/apt/testdata/af/Sources.gz vendored Normal file

Binary file not shown.

BIN
src/apt/testdata/hash/Contents-amd64.gz vendored Normal file

Binary file not shown.

BIN
src/apt/testdata/hash/Contents-i386.gz vendored Normal file

Binary file not shown.

25
src/apt/testdata/hash/Release vendored Normal file
View File

@ -0,0 +1,25 @@
Origin: Ubuntu
Label: Ubuntu
Suite: xenial-updates
Version: 16.04
Codename: xenial
Date: Wed, 12 Jul 2017 1:20:54 UTC
Architectures: amd64 arm64 armhf i386 powerpc ppc64el s390x
Components: main restricted universe multiverse
Description: Ubuntu Xenial Updates
MD5Sum:
d9b3b8be9917380c20557ec175ffcaf9 20458922 Contents-amd64.gz
b9215f9d24105a27435ae757403588de 19037680 Contents-i386.gz
71dd618e3737fffdcdd8169ead197e8c 730354 main/binary-amd64/Packages.gz
228f8d529179a601e0f896789529ea44 705363 main/binary-i386/Packages.gz
SHA1:
03b46cc8d2ca61612e55f4c83678c18c4db6b4bd 20458922 Contents-amd64.gz
443562673c6a668395774934ddd32e74e8867131 19037680 Contents-i386.gz
ccca199fc619992718df18bc8ad75b8987860ae5 730354 main/binary-amd64/Packages.gz
d055bb3cf1ffe4badbeee69fc7d4c304266b820c 705363 main/binary-i386/Packages.gz
SHA256:
eb6fce18a77014c3f68b93805056f539f3c7dded3ff1d14ce3460dec75833304 20458922 Contents-amd64.gz
78d74be881a5595e199f893e2e4b674d9e2c7dcecd691a852e1812310f229d07 19037680 Contents-i386.gz
bb1ad06735cf1f0961e16dfdae78dee649ecb43a19b0f1aee1782178bbb45273 730354 main/binary-amd64/Packages.gz
d7c16ab8eb172d1c53d3a7d1076e4e1684b27217fe3a3dfb207a18fab81eed28 705363 main/binary-i386/Packages.gz
Acquire-By-Hash: yes

View File

@ -0,0 +1 @@
../../Contents-i386.gz

View File

@ -0,0 +1 @@
../../Contents-amd64.gz

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1 @@
../../Packages-old.gz

View File

@ -0,0 +1 @@
../../Packages-old.gz

Binary file not shown.