From fece2c1a66b168b7db606bed98e0a3c0236e830e Mon Sep 17 00:00:00 2001 From: Will Roden Date: Tue, 4 Jul 2023 13:48:43 -0500 Subject: [PATCH] fix compressed files --- internal/bindown/extract.go | 16 +++++++-- internal/builddep/builddep.go | 65 ++++++++++++++++++++++++----------- internal/builddep/depgroup.go | 2 +- internal/builddep/dlfile.go | 15 +++++++- 4 files changed, 74 insertions(+), 24 deletions(-) diff --git a/internal/bindown/extract.go b/internal/bindown/extract.go index a74901f..7846cb7 100644 --- a/internal/bindown/extract.go +++ b/internal/bindown/extract.go @@ -3,6 +3,7 @@ package bindown import ( "os" "path/filepath" + "strings" "github.com/mholt/archiver/v3" "github.com/willabides/bindown/v4/internal/cache" @@ -55,9 +56,20 @@ func extract(archivePath, extractDir string) error { return err } tarPath := filepath.Join(downloadDir, dlName) - _, err = archiver.ByExtension(dlName) + byExt, err := archiver.ByExtension(dlName) if err != nil { return copyFile(tarPath, filepath.Join(extractDir, dlName)) } - return archiver.Unarchive(tarPath, extractDir) + switch x := byExt.(type) { + case archiver.Unarchiver: + return x.Unarchive(tarPath, extractDir) + case archiver.Decompressor: + dest := filepath.Join( + extractDir, + strings.TrimSuffix(dlName, filepath.Ext(dlName)), + ) + return archiver.FileCompressor{Decompressor: x}.DecompressFile(tarPath, dest) + default: + return copyFile(tarPath, filepath.Join(extractDir, dlName)) + } } diff --git a/internal/builddep/builddep.go b/internal/builddep/builddep.go index 85b0eb9..d8d385a 100644 --- a/internal/builddep/builddep.go +++ b/internal/builddep/builddep.go @@ -244,6 +244,9 @@ var archiveSuffixes = []string{ ".tzst", ".rar", ".zip", +} + +var compressSuffixes = []string{ ".br", ".gz", ".bz2", @@ -278,6 +281,7 @@ func parseDownload(dlURL, version string, systems []bindown.System) (*dlFile, bo return nil, false } isArchive := false + isCompress := false suffix := "" for _, s := range archiveSuffixes { if strings.HasSuffix(dlURL, s) { @@ -286,6 +290,15 @@ func parseDownload(dlURL, version string, systems []bindown.System) (*dlFile, bo break } } + if !isArchive { + for _, s := range compressSuffixes { + if strings.HasSuffix(dlURL, s) { + suffix = s + isCompress = true + break + } + } + } if strings.HasSuffix(dlURL, ".exe") { suffix = ".exe" } @@ -301,13 +314,14 @@ func parseDownload(dlURL, version string, systems []bindown.System) (*dlFile, bo priority += archSub.priority } return &dlFile{ - origUrl: dlURL, - url: tmpl, - osSub: osSub, - archSub: archSub, - suffix: suffix, - isArchive: isArchive, - priority: priority, + origUrl: dlURL, + url: tmpl, + osSub: osSub, + archSub: archSub, + suffix: suffix, + isArchive: isArchive, + isCompress: isCompress, + priority: priority, }, true } @@ -361,24 +375,35 @@ func parseDownloads(dlUrls []string, binName, version string, allowedSystems []b if len(systemFiles[system]) == 1 { continue } - // prefer archives - slices.SortFunc(systemFiles[system], func(a, b *dlFile) bool { - return a.isArchive && !b.isArchive - }) - cutOff = slices.IndexFunc(systemFiles[system], func(f *dlFile) bool { - return !f.isArchive - }) - if cutOff != -1 { - systemFiles[system] = systemFiles[system][:cutOff] + // prefer archives then compresses + var archiveSystems, compressSystems, plainSystems []*dlFile + for _, file := range systemFiles[system] { + switch { + case file.isArchive: + archiveSystems = append(archiveSystems, file) + case file.isCompress: + compressSystems = append(compressSystems, file) + default: + plainSystems = append(plainSystems, file) + } } - if len(systemFiles[system]) == 1 { - continue + var sf []*dlFile + switch { + case len(archiveSystems) > 0: + sf = archiveSystems + case len(compressSystems) > 0: + sf = compressSystems + default: + sf = plainSystems + } + if len(sf) < 2 { + systemFiles[system] = sf } // now arbitrarily pick the first one alphabetically by origUrl - slices.SortFunc(systemFiles[system], func(a, b *dlFile) bool { + slices.SortFunc(sf, func(a, b *dlFile) bool { return a.origUrl < b.origUrl }) - systemFiles[system] = systemFiles[system][:1] + systemFiles[system] = sf[:1] } templates := maps.Keys(urlFrequency) diff --git a/internal/builddep/depgroup.go b/internal/builddep/depgroup.go index 2c6722d..edfbe9d 100644 --- a/internal/builddep/depgroup.go +++ b/internal/builddep/depgroup.go @@ -50,7 +50,7 @@ func (g *depGroup) regroupByArchivePath(ctx context.Context, binName, version st return []*depGroup{gr}, nil } // trust that if the first isn't an archive, none of them are - if !gr.files[0].isArchive { + if !gr.files[0].isArchive && !gr.files[0].isCompress { gr.archivePath = path.Base(gr.files[0].url) return []*depGroup{gr}, nil } diff --git a/internal/builddep/dlfile.go b/internal/builddep/dlfile.go index a454bd4..5ff86c0 100644 --- a/internal/builddep/dlfile.go +++ b/internal/builddep/dlfile.go @@ -25,6 +25,7 @@ type dlFile struct { archSub *systemSub suffix string isArchive bool + isCompress bool priority int archiveFiles []*archiveFile checksum string @@ -45,7 +46,7 @@ func (f *dlFile) clone() *dlFile { } func (f *dlFile) setArchiveFiles(ctx context.Context, binName, version string) error { - if !f.isArchive { + if !f.isArchive && !f.isCompress { return nil } parsedUrl, err := url.Parse(f.origUrl) @@ -53,6 +54,18 @@ func (f *dlFile) setArchiveFiles(ctx context.Context, binName, version string) e return err } filename := path.Base(parsedUrl.EscapedPath()) + if f.isCompress { + f.archiveFiles = []*archiveFile{parseArchiveFile( + strings.TrimSuffix(filename, f.suffix), + binName, + f.osSub.val, + f.archSub.val, + version, + true, + )} + return nil + } + req, err := http.NewRequestWithContext(ctx, http.MethodGet, f.origUrl, http.NoBody) if err != nil { return err