2023-07-29 01:05:33 +01:00
|
|
|
//go:build mage
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
|
|
|
|
"github.com/magefile/mage/sh"
|
|
|
|
)
|
|
|
|
|
|
|
|
// Download downloads the binaries for a given service.
|
|
|
|
func Download(name string) error {
|
|
|
|
cfg, err := newConfig(configFile)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("unable to load the configuration; %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
switch name {
|
|
|
|
case "forgejo":
|
|
|
|
if err := downloadForgejo(cfg.Forgejo.Version); err != nil {
|
|
|
|
return fmt.Errorf("an error occurred whilst getting the forgejo binary; %w", err)
|
|
|
|
}
|
|
|
|
case "gotosocial":
|
|
|
|
if err := downloadGoToSocial(cfg.GoToSocial.Version); err != nil {
|
|
|
|
return fmt.Errorf("an error occurred whilst getting the packages for GoToSocial; %w", err)
|
|
|
|
}
|
2023-07-31 00:40:19 +01:00
|
|
|
case "woodpecker":
|
|
|
|
if err := downloadWoodpecker(cfg.Woodpecker.Version); err != nil {
|
|
|
|
return fmt.Errorf("an error occurred whilst getting the packages for Woodpecker; %w", err)
|
|
|
|
}
|
2023-07-29 01:05:33 +01:00
|
|
|
default:
|
2023-08-24 15:30:14 +01:00
|
|
|
fmt.Printf("There's no files to download for %q.\n", name)
|
2023-07-29 01:05:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-31 00:40:19 +01:00
|
|
|
// downloadWoodpecker downloads and validates the files for the Woodpecker deployment.
|
|
|
|
func downloadWoodpecker(version string) error {
|
|
|
|
destinationDir := filepath.Join(rootBuildDir, "woodpecker")
|
|
|
|
|
|
|
|
binaryTarUrl := fmt.Sprintf(
|
|
|
|
"https://github.com/woodpecker-ci/woodpecker/releases/download/v%s/woodpecker-server_linux_amd64.tar.gz",
|
|
|
|
version,
|
|
|
|
)
|
|
|
|
|
|
|
|
binaryTarFilepath := filepath.Join(
|
|
|
|
destinationDir,
|
|
|
|
fmt.Sprintf("woodpecker-server-%s_linux_amd64.tar.gz", version),
|
|
|
|
)
|
|
|
|
|
|
|
|
checksumUrl := fmt.Sprintf(
|
|
|
|
"https://github.com/woodpecker-ci/woodpecker/releases/download/v%s/checksums.txt",
|
|
|
|
version,
|
|
|
|
)
|
|
|
|
|
|
|
|
checksumFilePath := filepath.Join(
|
|
|
|
destinationDir,
|
|
|
|
fmt.Sprintf("woodpecker_%s_checksums.txt", version),
|
|
|
|
)
|
|
|
|
|
|
|
|
pack := downloadPack{
|
|
|
|
destinationDir: destinationDir,
|
|
|
|
packages: []pack{
|
|
|
|
{
|
|
|
|
file: object{
|
|
|
|
source: binaryTarUrl,
|
|
|
|
destination: binaryTarFilepath,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
validateGPGSignature: false,
|
|
|
|
checksum: object{
|
|
|
|
source: checksumUrl,
|
|
|
|
destination: checksumFilePath,
|
|
|
|
},
|
|
|
|
validateChecksum: false,
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := download(pack); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
// downloadForgejo downloads and validates the Forgejo files.
|
2023-07-29 01:05:33 +01:00
|
|
|
func downloadForgejo(version string) error {
|
2023-07-30 22:37:51 +01:00
|
|
|
var (
|
|
|
|
forgejoBinaryFileFormat = "forgejo-%s-linux-amd64"
|
|
|
|
forgejoDigestExtension = ".sha256"
|
|
|
|
forgejoSignatureExtension = ".asc"
|
|
|
|
forgejoJson = "./magefiles/forgejo.json"
|
|
|
|
)
|
2023-07-29 01:05:33 +01:00
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
destinationDir := filepath.Join(rootBuildDir, "forgejo")
|
2023-07-29 01:05:33 +01:00
|
|
|
|
|
|
|
binaryPath := filepath.Join(
|
2023-07-30 22:37:51 +01:00
|
|
|
destinationDir,
|
2023-07-29 01:05:33 +01:00
|
|
|
fmt.Sprintf(forgejoBinaryFileFormat, version),
|
|
|
|
)
|
2023-07-30 22:37:51 +01:00
|
|
|
|
2023-07-29 01:05:33 +01:00
|
|
|
signaturePath := binaryPath + forgejoSignatureExtension
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
checksumPath := binaryPath + forgejoDigestExtension
|
2023-07-29 01:05:33 +01:00
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
data, err := newForgejoInfo(forgejoJson)
|
2023-07-29 01:05:33 +01:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
pack := downloadPack{
|
|
|
|
destinationDir: destinationDir,
|
|
|
|
packages: []pack{
|
|
|
|
{
|
|
|
|
file: object{
|
|
|
|
source: data.Downloads[version].Binary,
|
|
|
|
destination: binaryPath,
|
|
|
|
},
|
|
|
|
gpgSignature: object{
|
|
|
|
source: data.Downloads[version].Signature,
|
|
|
|
destination: signaturePath,
|
|
|
|
},
|
|
|
|
},
|
2023-07-29 01:05:33 +01:00
|
|
|
},
|
2023-07-30 22:37:51 +01:00
|
|
|
validateGPGSignature: true,
|
|
|
|
checksum: object{
|
|
|
|
source: data.Downloads[version].Digest,
|
|
|
|
destination: checksumPath,
|
2023-07-29 01:05:33 +01:00
|
|
|
},
|
2023-07-30 22:37:51 +01:00
|
|
|
validateChecksum: true,
|
2023-07-29 01:05:33 +01:00
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
if err := download(pack); err != nil {
|
2023-07-29 01:05:33 +01:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
// downloadGoToSocial downloads and validates the files for GoToSocial.
|
2023-07-29 01:05:33 +01:00
|
|
|
func downloadGoToSocial(version string) error {
|
2023-07-30 22:37:51 +01:00
|
|
|
destinationDir := filepath.Join(rootBuildDir, "gotosocial")
|
2023-07-29 01:05:33 +01:00
|
|
|
|
|
|
|
binaryTarUrl := fmt.Sprintf(
|
|
|
|
"https://github.com/superseriousbusiness/gotosocial/releases/download/v%s/gotosocial_%s_linux_amd64.tar.gz",
|
|
|
|
version,
|
|
|
|
version,
|
|
|
|
)
|
2023-07-30 22:37:51 +01:00
|
|
|
binaryTarFilepath := filepath.Join(destinationDir, fmt.Sprintf("gotosocial_%s_linux_amd64.tar.gz", version))
|
2023-07-29 01:05:33 +01:00
|
|
|
|
|
|
|
webAssetsTarUrl := fmt.Sprintf(
|
|
|
|
"https://github.com/superseriousbusiness/gotosocial/releases/download/v%s/gotosocial_%s_web-assets.tar.gz",
|
|
|
|
version,
|
|
|
|
version,
|
|
|
|
)
|
2023-07-30 22:37:51 +01:00
|
|
|
webAssetsFilepath := filepath.Join(destinationDir, fmt.Sprintf("gotosocial_%s_web-assets.tar.gz", version))
|
2023-07-29 01:05:33 +01:00
|
|
|
|
|
|
|
checksumUrl := fmt.Sprintf(
|
|
|
|
"https://github.com/superseriousbusiness/gotosocial/releases/download/v%s/checksums.txt",
|
|
|
|
version,
|
|
|
|
)
|
2023-07-30 22:37:51 +01:00
|
|
|
checksumFilePath := filepath.Join(destinationDir, fmt.Sprintf("gotosocial_%s_checksums.txt", version))
|
|
|
|
|
|
|
|
pack := downloadPack{
|
|
|
|
destinationDir: destinationDir,
|
|
|
|
packages: []pack{
|
|
|
|
{
|
|
|
|
file: object{
|
|
|
|
source: binaryTarUrl,
|
|
|
|
destination: binaryTarFilepath,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
file: object{
|
|
|
|
source: webAssetsTarUrl,
|
|
|
|
destination: webAssetsFilepath,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
validateGPGSignature: false,
|
|
|
|
checksum: object{
|
|
|
|
source: checksumUrl,
|
|
|
|
destination: checksumFilePath,
|
|
|
|
},
|
|
|
|
validateChecksum: true,
|
|
|
|
}
|
2023-07-29 01:05:33 +01:00
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
if err := download(pack); err != nil {
|
|
|
|
return err
|
2023-07-29 01:05:33 +01:00
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type downloadPack struct {
|
|
|
|
destinationDir string
|
|
|
|
packages []pack
|
|
|
|
checksum object
|
|
|
|
validateGPGSignature bool
|
|
|
|
validateChecksum bool
|
|
|
|
}
|
|
|
|
|
|
|
|
type pack struct {
|
|
|
|
file object
|
|
|
|
gpgSignature object
|
|
|
|
}
|
|
|
|
|
|
|
|
type object struct {
|
|
|
|
source string
|
|
|
|
destination string
|
|
|
|
}
|
|
|
|
|
|
|
|
// download downloads all the files in the download pack,
|
|
|
|
// verifies all the GPG signatures (if enabled) and
|
|
|
|
// verifies the checksums (if enabled).
|
|
|
|
func download(pack downloadPack) error {
|
|
|
|
if err := os.MkdirAll(pack.destinationDir, 0o750); err != nil {
|
|
|
|
return fmt.Errorf("unable to make '%s'; %w", pack.destinationDir, err)
|
2023-07-29 01:05:33 +01:00
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
var objects []object
|
|
|
|
|
|
|
|
for i := range pack.packages {
|
|
|
|
objects = append(objects, pack.packages[i].file)
|
|
|
|
if pack.validateGPGSignature {
|
|
|
|
objects = append(objects, pack.packages[i].gpgSignature)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if pack.validateChecksum {
|
|
|
|
objects = append(objects, pack.checksum)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, object := range objects {
|
2023-07-29 01:05:33 +01:00
|
|
|
if err := func() error {
|
2023-07-30 22:37:51 +01:00
|
|
|
_, err := os.Stat(object.destination)
|
|
|
|
if err == nil {
|
|
|
|
fmt.Printf("%s is already downloaded.\n", object.destination)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
file, err := os.Create(object.destination)
|
2023-07-29 01:05:33 +01:00
|
|
|
if err != nil {
|
2023-07-30 22:37:51 +01:00
|
|
|
return fmt.Errorf("unable to create %s; %w", object.destination, err)
|
2023-07-29 01:05:33 +01:00
|
|
|
}
|
2023-07-30 22:37:51 +01:00
|
|
|
defer file.Close()
|
2023-07-29 01:05:33 +01:00
|
|
|
|
|
|
|
client := http.Client{
|
|
|
|
CheckRedirect: func(r *http.Request, _ []*http.Request) error {
|
|
|
|
r.URL.Opaque = r.URL.Path
|
|
|
|
return nil
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
resp, err := client.Get(object.source)
|
2023-07-29 01:05:33 +01:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
size, err := io.Copy(file, resp.Body)
|
2023-07-29 01:05:33 +01:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
fmt.Printf("Downloaded %s with size %d.\n", object.destination, size)
|
2023-07-29 01:05:33 +01:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
if pack.validateGPGSignature {
|
|
|
|
for i := range pack.packages {
|
|
|
|
if err := sh.Run("gpg", "--verify", pack.packages[i].gpgSignature.destination, pack.packages[i].file.destination); err != nil {
|
|
|
|
return fmt.Errorf("GPG verification failed for '%s'; %w", pack.packages[i].file.destination, err)
|
|
|
|
}
|
|
|
|
}
|
2023-07-29 01:05:33 +01:00
|
|
|
}
|
|
|
|
|
2023-07-30 22:37:51 +01:00
|
|
|
if pack.validateChecksum {
|
|
|
|
startDir, err := os.Getwd()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := os.Chdir(pack.destinationDir); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
checksum := filepath.Base(pack.checksum.destination)
|
|
|
|
|
|
|
|
if err := sh.Run("sha256sum", "--check", "--ignore-missing", checksum); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := os.Chdir(startDir); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-29 01:05:33 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2023-07-30 22:37:51 +01:00
|
|
|
|
|
|
|
type forgejoInfo struct {
|
|
|
|
Downloads map[string]forgejoFiles `json:"downloads"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type forgejoFiles struct {
|
|
|
|
Binary string `json:"binary"`
|
|
|
|
Signature string `json:"signature"`
|
|
|
|
Digest string `json:"digest"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func newForgejoInfo(path string) (forgejoInfo, error) {
|
|
|
|
var info forgejoInfo
|
|
|
|
|
|
|
|
f, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return info, err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
|
|
|
decoder := json.NewDecoder(f)
|
|
|
|
|
|
|
|
if err = decoder.Decode(&info); err != nil {
|
|
|
|
return info, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return info, nil
|
|
|
|
}
|