Update release find to go-bds

This commit is contained in:
2024-04-25 22:29:47 -03:00
parent b0429fb00e
commit 0698775e9a
11 changed files with 3865 additions and 4491 deletions

1
.gitignore vendored Normal file

@ -0,0 +1 @@
/dists/

@ -1,233 +0,0 @@
package bds
import (
"archive/tar"
"archive/zip"
"compress/gzip"
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal/convert"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal/request"
gitea "sirherobrine23.org/minecraft-server/bedrockfetch/internal/tea"
)
type BedrockVersionsTarget struct {
NodePlatform string `json:"Platform"` // Nodejs Platform string
NodeArch string `json:"Arch"` // Nodejs Archs
ZipFile string `json:"zip"` // Local file and http to remote
ZipSHA1 string `json:"zipSHA1"` // File SHA1
TarFile string `json:"tar"` // Local file and http to remote
TarSHA1 string `json:"tarSHA1"` // File SHA1
}
type BedrockVersions struct {
Version string `json:"version"`
DateRelease time.Time `json:"releaseDate"`
ReleaseType string `json:"type"`
Targets []BedrockVersionsTarget `json:"targets"`
}
var (
VersionsPathFile, _ = filepath.Abs("./versions.json")
Versions = make(map[string]BedrockVersions)
)
func init() {
LoadLocalVersions()
}
func AddNewVersion(v BedrockVersions) {
Versions[v.Version] = v
}
func LoadLocalVersions() error {
file, err := os.Open(VersionsPathFile)
if err != nil {
return err
}
defer file.Close()
localEnv := []BedrockVersions{}
err = json.NewDecoder(file).Decode(&localEnv)
if err != nil {
return err
}
for _, ver := range localEnv {
Versions[ver.Version] = ver
}
return nil
}
func SaveLocalVersions() error {
doArray := []BedrockVersions{}
for _, ver := range Versions {
doArray = append(doArray, ver)
}
data, err := json.MarshalIndent(&doArray, "", " ")
if err != nil {
return err
}
return os.WriteFile(VersionsPathFile, data, os.FileMode(0o666))
}
func (bdsRelease *BedrockVersions) UploadRelease() error {
tea, err := gitea.CreateRelease(gitea.GiteaRelease{
AuthToken: gitea.Token,
TagName: bdsRelease.Version,
Name: bdsRelease.Version,
IsPrerelease: bdsRelease.ReleaseType != "release",
IsDraft: true,
FileAssets: []gitea.GiteaAsset{},
})
if err != nil {
return err
}
fmt.Printf("Uploading %s\n", bdsRelease.Version)
for targetIndex, target := range bdsRelease.Targets {
// Zip upload
zipStr, err := os.Open(target.ZipFile)
if err != nil {
return err
}
filename := fmt.Sprintf("%s-%s.zip", target.NodeArch, target.NodePlatform)
fmt.Printf("Uploading %s: %s\n", bdsRelease.Version, filename)
infoZip, err := tea.UploadAsset(filename, zipStr)
if err != nil {
return err
}
bdsRelease.Targets[targetIndex].ZipFile = infoZip.FileUrl
// Tar upload
tarStr, err := os.Open(target.TarFile)
if err != nil {
return err
}
filename = fmt.Sprintf("%s-%s.tgz", target.NodeArch, target.NodePlatform)
fmt.Printf("Uploading %s: %s\n", bdsRelease.Version, filename)
infoTar, err := tea.UploadAsset(filename, tarStr)
if err != nil {
return err
}
bdsRelease.Targets[targetIndex].TarFile = infoTar.FileUrl
}
tea.IsDraft = false
err = tea.UpdateRelease()
if err != nil {
return err
}
return nil
}
// Download zip file and convert to .tgz and set file path to ZipFile and TarFile
func (assert *BedrockVersionsTarget) Download() error {
zipSplited := strings.Split(assert.ZipFile, "/")[len(strings.Split(assert.ZipFile, "/"))-2:]
zipFile := filepath.Join(os.TempDir(), "bdsfetch", filepath.Join(zipSplited...))
tarFile := strings.Replace(zipFile, ".zip", ".tgz", 1)
os.MkdirAll(filepath.Dir(zipFile), os.FileMode(0o666))
// Get file stream
_, res, err := request.Request(request.RequestOptions{
Url: assert.ZipFile,
HttpError: true,
Method: "GET",
})
defer res.Body.Close()
if err != nil {
return err
}
fmt.Printf("Downloading %s, size %s\n", assert.ZipFile, convert.ByteCountSI(res.ContentLength))
// Create zip file
zipFileStream, err := os.Create(zipFile)
if err != nil {
return err
}
// Copy to file
_, err = io.Copy(zipFileStream, res.Body)
if err != nil {
return err
}
fmt.Printf("Download %q complete\n", assert.ZipFile)
// Create zip file
tarFileStream, err := os.Create(tarFile)
if err != nil {
return err
}
gzStr := gzip.NewWriter(tarFileStream)
tarStr := tar.NewWriter(gzStr)
defer (func() {
tarStr.Close()
gzStr.Close()
tarFileStream.Close()
})()
zipStats, _ := zipFileStream.Stat()
zipStr, err := zip.NewReader(zipFileStream, zipStats.Size())
if err != nil {
return err
}
fmt.Printf("zip (%q) to tgz (%q)\n", filepath.Base(zipFile), filepath.Base(tarFile))
for _, fileEntry := range zipStr.File {
fileInfo := fileEntry.FileInfo()
tarhead := tar.Header{
Name: fileEntry.Name,
ChangeTime: fileEntry.Modified,
ModTime: fileEntry.Modified,
Size: fileInfo.Size(),
Mode: int64(fileInfo.Mode()),
}
if fileInfo.IsDir() {
tarhead.Size = 0
}
if err = tarStr.WriteHeader(&tarhead); err != nil {
return fmt.Errorf("unable to get entry content: %s", err)
}
// fmt.Printf("zip (%q) to tgz (%q): %s, size: %s\n", filepath.Base(zipFile), filepath.Base(tarFile), fileEntry.Name, convert.ByteCountSI(fileInfo.Size()))
if !fileInfo.IsDir() {
fileContent, _err := fileEntry.Open()
if _err != nil {
return fmt.Errorf("unable to get entry content: %s", _err)
}
_, err = io.Copy(tarStr, fileContent)
if !(err == nil || err == io.EOF) {
return fmt.Errorf("cannot copy file (%s) content: %s", fileEntry.Name, err)
}
fileContent.Close()
}
}
// Save zip info
assert.ZipFile = zipFile
assert.ZipSHA1, _ = convert.FileSHA1(zipFile)
// Save tgz info
assert.TarFile = tarFile
assert.TarSHA1, _ = convert.FileSHA1(tarFile)
return nil
}

@ -1,63 +0,0 @@
package bds
import (
"regexp"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal/request"
)
var fileMatch = regexp.MustCompile(`(?m)bin-(?P<Platform>win|linux)-?(?P<isPreview>preview)?\/bedrock-server-(?P<Version>[0-9\.]+)\.zip$`)
// FindAllGroups returns a map with each match group. The map key corresponds to the match group name.
// A nil return value indicates no matches.
func FindAllGroups(re *regexp.Regexp, s string) map[string]string {
matches := re.FindStringSubmatch(s)
subnames := re.SubexpNames()
if matches == nil || len(matches) != len(subnames) {
return nil
}
matchMap := map[string]string{}
for i := 1; i < len(matches); i++ {
matchMap[subnames[i]] = matches[i]
}
return matchMap
}
type FileInfo struct {
FileUrl string // Zip file url
Platform string // Target
IsPreview bool // Is preview server release
}
func MinecraftFetch() (map[string][]FileInfo, error) {
links, _, _, err := request.RequestHtmlLinks(request.RequestOptions{
Url: "https://www.minecraft.net/en-us/download/server/bedrock",
HttpError: true,
})
if err != nil {
return nil, err
}
// Filter files
zipFiles := map[string][]FileInfo{}
for _, file := range links {
if fileMatch.MatchString(file) {
info := FindAllGroups(fileMatch, file)
if _, existVer := zipFiles[info["Version"]]; !existVer {
zipFiles[info["Version"]] = []FileInfo{}
}
zipFiles[info["Version"]] = append(zipFiles[info["Version"]], FileInfo{
FileUrl: file,
Platform: info["Platform"],
IsPreview: info["isPreview"] != "",
})
}
}
return zipFiles, nil
}

@ -1,11 +1,8 @@
package convert
import (
"bytes"
"compress/gzip"
"crypto/sha1"
"encoding/hex"
"fmt"
"io"
"os"
)
@ -23,51 +20,3 @@ func FileSHA1(filePath string) (string, error) {
}
return SHA1(file), nil
}
func GunzipData(data []byte) (resData []byte, err error) {
b := bytes.NewBuffer(data)
var r io.Reader
r, err = gzip.NewReader(b)
if err != nil {
return
}
var resB bytes.Buffer
_, err = resB.ReadFrom(r)
if err != nil {
return
}
resData = resB.Bytes()
return
}
func ByteCountSI(b int64) string {
const unit = 1000
if b < unit {
return fmt.Sprintf("%d B", b)
}
div, exp := int64(unit), 0
for n := b / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %cB",
float64(b)/float64(div), "kMGTPE"[exp])
}
func ByteCountIEC(b int64) string {
const unit = 1024
if b < unit {
return fmt.Sprintf("%d B", b)
}
div, exp := int64(unit), 0
for n := b / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %ciB",
float64(b)/float64(div), "KMGTPE"[exp])
}

175
internal/fetch.go Normal file

@ -0,0 +1,175 @@
package internal
import (
"archive/tar"
"archive/zip"
"compress/gzip"
"crypto/sha1"
"encoding/hex"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal/convert"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal/regex"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal/request"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal/tea"
)
var fileMatch = regex.MustCompile(`(?m)bin-(?P<Platform>win|linux)-?(?P<isPreview>preview)?\/bedrock-server-(?P<Version>[0-9\.]+)\.zip$`)
var bedrockRequest *request.RequestOptions = &request.RequestOptions{
HttpError: true,
Method: "GET",
Url: "https://minecraft.net/en-us/download/server/bedrock",
}
func (versions Versions) Find() error {
links, _, err := bedrockRequest.Links()
if err != nil {
return err
}
for _, urlString := range links {
req := request.RequestOptions{HttpError: true, Url: urlString}
if !strings.HasSuffix(urlString, ".zip") {
continue
} else if fileMatch.MatchString(urlString) {
match := fileMatch.FindAllGroups(urlString)
if match == nil {
continue
}
session, ok := versions[match["Version"]]
if !ok {
session = Version{
Platforms: map[string]VersionPlatform{},
}
}
if _, ok := match["isPreview"]; ok {
session.IsPreview = true
}
if len(match["Platform"]) == 0 {
match["Platform"] = "unknown"
}
zipPath := filepath.Join("./dists", match["Version"], fmt.Sprintf("%s%s", match["Platform"], filepath.Base(urlString)))
versionInfo := VersionPlatform{}
versionInfo.ZipFile = urlString
fmt.Printf("Downloading %q ...", urlString)
if err = req.File(zipPath); err != nil {
return err
} else if versionInfo.ZipSHA1, err = convert.FileSHA1(zipPath); err != nil {
return err
}
fmt.Printf(" done\nconverting to tar.gz ... ")
ziped, err := zip.OpenReader(zipPath)
if err != nil {
return err
}
tarFilePath := strings.Replace(zipPath, ".zip", ".tgz", 1)
tgzFile, err := os.Create(tarFilePath)
if err != nil {
return err
}
sha1Tgz := sha1.New()
gz := gzip.NewWriter(io.MultiWriter(tgzFile, sha1Tgz))
tarWriter := tar.NewWriter(gz)
defer tarWriter.Close()
defer gz.Close()
defer tgzFile.Close()
for _, file := range ziped.File {
info := file.FileInfo()
if strings.HasSuffix(file.Name, "bedrock_server") || strings.HasSuffix(file.Name, "bedrock-server") {
versionInfo.ReleaseDate = info.ModTime()
}
if err = tarWriter.WriteHeader(&tar.Header{
Name: file.Name,
Size: info.Size(),
ModTime: info.ModTime(),
Mode: int64(info.Mode()),
Typeflag: byte(info.Mode().Type()),
}); err != nil {
return err
}
if !info.IsDir() {
fileRaw, err := file.Open()
if err != nil {
return err
} else if _, err = io.Copy(tarWriter, fileRaw); err != nil {
return err
}
}
}
tarWriter.Close() // End tar writer
gz.Close() // Close Gzip
tgzFile.Close() // Close file
versionInfo.TarSHA1 = hex.EncodeToString(sha1Tgz.Sum(nil)) // Get sha1 and convert to hex string
versionInfo.TarFile = tarFilePath
fmt.Println("Done")
if match["Platform"] == "linux" {
session.Platforms["linux/amd64"] = versionInfo
} else if match["Platform"] == "win" {
session.Platforms["windows/amd64"] = versionInfo
} else if match["Platform"] == "darwin" {
session.Platforms["darwin/arm64"] = versionInfo
}
versions[match["Version"]] = session
} else {
fmt.Println(urlString)
}
}
// Upload tar release
fmt.Println("Checking to upload release files")
for version, release := range versions {
postUpdate := false
giteaRelease := tea.GiteaRelease{TagName: version, IsDraft: true, IsPrerelease: release.IsPreview}
if giteaRelease.GetRelease() == nil {
err = giteaRelease.CreateRelease()
if err != nil {
return err
}
postUpdate = true
}
for platform, info := range release.Platforms {
// Skip if file is exist in releases
if info.TarFile[0:4] == "http" {
continue
}
// Open file
localFile, err := os.Open(info.TarFile)
if err != nil {
return err
}
defer localFile.Close()
asset, err := giteaRelease.UploadAsset(fmt.Sprintf("%s.tgz", strings.ReplaceAll(platform, "/", "_")), localFile)
if err != nil {
return err
}
info.TarFile = asset.FileUrl
}
if postUpdate {
giteaRelease.IsDraft = false
giteaRelease.UpdateRelease()
}
}
return nil
}

79
internal/regex/regex.go Normal file

@ -0,0 +1,79 @@
package regex
import "regexp"
type Xpp struct {
regexp.Regexp
}
// FindAllGroups returns a map with each match group. The map key corresponds to the match group name.
// A nil return value indicates no matches.
func (re *Xpp) FindAllGroups(s string) map[string]string {
matches := re.FindStringSubmatch(s)
subnames := re.SubexpNames()
if matches == nil || len(matches) != len(subnames) {
return nil
}
matchMap := map[string]string{}
for i := 1; i < len(matches); i++ {
matchMap[subnames[i]] = matches[i]
}
return matchMap
}
// Compile parses a regular expression and returns, if successful,
// a [Regexp] object that can be used to match against text.
//
// When matching against text, the regexp returns a match that
// begins as early as possible in the input (leftmost), and among those
// it chooses the one that a backtracking search would have found first.
// This so-called leftmost-first matching is the same semantics
// that Perl, Python, and other implementations use, although this
// package implements it without the expense of backtracking.
// For POSIX leftmost-longest matching, see [CompilePOSIX].
func Compiler(expr string) (*Xpp, error) {
re, err := regexp.Compile(expr)
if err != nil {
return nil, err
}
return &Xpp{*re}, nil
}
// CompilePOSIX is like [Compile] but restricts the regular expression
// to POSIX ERE (egrep) syntax and changes the match semantics to
// leftmost-longest.
//
// That is, when matching against text, the regexp returns a match that
// begins as early as possible in the input (leftmost), and among those
// it chooses a match that is as long as possible.
// This so-called leftmost-longest matching is the same semantics
// that early regular expression implementations used and that POSIX
// specifies.
//
// However, there can be multiple leftmost-longest matches, with different
// submatch choices, and here this package diverges from POSIX.
// Among the possible leftmost-longest matches, this package chooses
// the one that a backtracking search would have found first, while POSIX
// specifies that the match be chosen to maximize the length of the first
// subexpression, then the second, and so on from left to right.
// The POSIX rule is computationally prohibitive and not even well-defined.
// See https://swtch.com/~rsc/regexp/regexp2.html#posix for details.
func CompilePOSIX(expr string) (*Xpp, error) {
re, err := regexp.CompilePOSIX(expr)
if err != nil {
return nil, err
}
return &Xpp{*re}, nil
}
// MustCompile is like [Compile] but panics if the expression cannot be parsed.
// It simplifies safe initialization of global variables holding compiled regular
// expressions.
func MustCompile(expr string) *Xpp {
re := regexp.MustCompile(expr)
return &Xpp{*re}
}

@ -1,19 +1,32 @@
package request
import (
"compress/flate"
"archive/tar"
"compress/gzip"
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"os"
"path/filepath"
"strings"
"golang.org/x/net/html"
)
var (
ErrNoUrl = errors.New("no url informed")
ErrPageNotExist = errors.New("page not exists")
)
var DefaultHeader = http.Header{
"Accept": {"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7"},
"Accept": {"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7"},
// "Accept-Encoding": {"gzip, deflate"},
"Accept-Language": {"en-US;q=0.9,en;q=0.8"},
"Sec-Ch-Ua": {`"Google Chrome";v="123", "Not:A-Brand";v="8", "Chromium";v="123\"`},
@ -27,26 +40,68 @@ var DefaultHeader = http.Header{
"User-Agent": {"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36"},
}
type RequestOptions struct {
Url string // Request url
Method string // Request Method
Body io.Reader // Body Reader
Headers http.Header // Extra HTTP Headers
HttpError bool // Return if status code is equal or then 300
func arrayHas(arr []int, is int) bool {
for _, k := range arr {
if k == is {
return true
}
}
return false
}
// Make custom request and return request, response and error if exist
func Request(opt RequestOptions) (http.Request, http.Response, error) {
type RequestOptions struct {
Url string // Request url
HttpError bool // Return if status code is equal or then 300
Method string // Request Method
Body io.Reader // Body Reader
Headers http.Header // Extra HTTP Headers
Querys map[string]string // Default querys to set in url
CodesRetrys []int // Code to retrys in GET method
}
func (w *RequestOptions) ToUrl() (*url.URL, error) {
if len(w.Url) == 0 {
return nil, ErrNoUrl
}
urlParsed, err := url.Parse(w.Url)
if err != nil {
return nil, err
}
if len(w.Querys) > 0 {
query := urlParsed.Query()
for key, value := range w.Querys {
query.Set(key, value)
}
urlParsed.RawQuery = query.Encode()
}
return urlParsed, nil
}
func (w *RequestOptions) String() (string, error) {
s, err := w.ToUrl()
if err != nil {
return "", err
}
return s.String(), nil
}
func (opt *RequestOptions) Request() (http.Response, error) {
if len(opt.Method) == 0 {
opt.Method = "GET"
}
// Create request
req, err := http.NewRequest(opt.Method, opt.Url, opt.Body)
// Check for request error
urlRequest, err := opt.ToUrl()
if err != nil {
return *req, http.Response{}, err
return http.Response{}, err
}
// Create request
req, err := http.NewRequest(opt.Method, urlRequest.String(), opt.Body)
if err != nil {
return http.Response{}, err
}
// Project headers
@ -60,63 +115,193 @@ func Request(opt RequestOptions) (http.Request, http.Response, error) {
}
// Create response from request
client := &http.Client{}
res, err := client.Do(req)
// Check that the server actually sent compressed data
var reader io.ReadCloser
switch res.Header.Get("Content-Encoding") {
case "gzip":
reader, err = gzip.NewReader(res.Body)
// defer reader.Close()
case "deflate":
reader = flate.NewReader(res.Body)
// defer reader.Close()
default:
reader = res.Body
res, err := (&http.Client{}).Do(req)
if err != nil {
return *res, err
}
// Replace reader
res.Body = reader
if opt.HttpError && res.StatusCode >= 300 {
err = fmt.Errorf("response non < 299, code %d", res.StatusCode)
if opt.Method == "GET" && arrayHas(opt.CodesRetrys, res.StatusCode) {
return opt.Request()
} else if res.StatusCode == 404 {
err = ErrPageNotExist
} else {
err = fmt.Errorf("response non < 299, code %d, url: %q", res.StatusCode, opt.Url)
}
}
// User tratement
return *req, *res, err
return *res, err
}
func RequestHtmlLinks(opt RequestOptions) ([]string, http.Request, http.Response, error) {
req, res, err := Request(opt)
func (opt *RequestOptions) Do(jsonInterface any) (http.Response, error) {
res, err := opt.Request()
if err != nil {
return res, err
}
defer res.Body.Close()
return res, json.NewDecoder(res.Body).Decode(jsonInterface)
}
func (opt *RequestOptions) WriteStream(writer io.Writer) error {
res, err := opt.Request()
if err != nil {
return err
}
defer res.Body.Close()
_, err = io.Copy(writer, res.Body)
if err != nil {
return err
}
return nil
}
func (opt *RequestOptions) File(pathToSave string) error {
os.MkdirAll(filepath.Dir(pathToSave), os.FileMode(0o666))
// Create file
localFile, err := os.Create(pathToSave)
if err != nil {
return err
}
defer localFile.Close()
// Create Request
res, err := opt.Request()
if err != nil {
return err
}
defer res.Body.Close()
// Copy data
if _, err = io.Copy(localFile, res.Body); err != nil {
return err
}
return nil
}
// Create request and calculate MD5 for body response
func (opt *RequestOptions) MD5() (string, error) {
res, err := opt.Request()
if err != nil {
return "", err
}
defer res.Body.Close()
sumWriter := md5.New()
if _, err = io.Copy(sumWriter, res.Body); err != nil {
return "", err
}
return hex.EncodeToString(sumWriter.Sum(nil)), nil
}
// Create request and calculate SHA1 for body response, recomends use SHA256
func (opt *RequestOptions) SHA1() (string, error) {
res, err := opt.Request()
if err != nil {
return "", err
}
defer res.Body.Close()
sumWriter := sha1.New()
if _, err = io.Copy(sumWriter, res.Body); err != nil {
return "", err
}
return hex.EncodeToString(sumWriter.Sum(nil)), nil
}
// Create request and calculate SHA256 for body response
func (opt *RequestOptions) SHA256() (string, error) {
res, err := opt.Request()
if err != nil {
return "", err
}
defer res.Body.Close()
sumWriter := sha256.New()
if _, err = io.Copy(sumWriter, res.Body); err != nil {
return "", err
}
return hex.EncodeToString(sumWriter.Sum(nil)), nil
}
func (opt *RequestOptions) Links() ([]string, http.Response, error) {
res, err := opt.Request()
if err != nil {
return []string{}, res, err
}
defer res.Body.Close()
doc, err := html.Parse(res.Body)
if err != nil {
return []string{}, res, err
}
urls := []string{}
if err == nil {
doc, err := html.Parse(res.Body)
if err != nil {
return urls, req, res, err
}
var find func(*html.Node)
find = func(n *html.Node) {
for _, v := range n.Attr {
if v.Key == "src" || v.Key == "href" {
urls = append(urls, v.Val)
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
find(c)
var find func(*html.Node)
find = func(n *html.Node) {
for _, v := range n.Attr {
if v.Key == "src" || v.Key == "href" {
urls = append(urls, v.Val)
}
}
find(doc)
for c := n.FirstChild; c != nil; c = c.NextSibling {
find(c)
}
}
find(doc)
urlParsed, _ := url.Parse(opt.Url)
for urlIndex, value := range urls {
if strings.HasPrefix(value, "/") {
urls[urlIndex] = fmt.Sprintf("%s://%s%s", urlParsed.Scheme, urlParsed.Host, value)
rootUrl, _ := opt.ToUrl()
for urlIndex, reqPath := range urls {
if strings.HasPrefix(reqPath, "/") {
reqPath, err = url.JoinPath(rootUrl.Path, reqPath)
if err != nil {
return []string{}, res, err
}
urls[urlIndex] = fmt.Sprintf("%s://%s%s", rootUrl.Scheme, rootUrl.Host, reqPath)
}
}
return urls, req, res, err
return urls, res, err
}
func RequestGithub(Repo, Release, Root string) error {
os.MkdirAll(Root, os.FileMode(0o666))
req := RequestOptions{HttpError: true, Url: fmt.Sprintf("https://github.com/%s/archive/%s.tar.gz", Repo, Release)}
res, err := req.Request()
if err != nil {
return err
}
gz, err := gzip.NewReader(res.Body)
if err != nil {
return err
}
defer gz.Close()
tar := tar.NewReader(gz)
for {
head, err := tar.Next()
if err != nil {
if io.EOF == err {
break
}
return err
}
fileinfo := head.FileInfo()
var file *os.File
if file, err = os.OpenFile(filepath.Join(Root, filepath.Join(filepath.SplitList(head.Name)[:1]...)), os.O_RDWR|os.O_CREATE|os.O_TRUNC, fileinfo.Mode()); err != nil {
return err
} else if _, err = io.Copy(file, tar); err != nil {
return err
}
}
return nil
}

@ -1,4 +1,4 @@
package gitea
package tea
import (
"bytes"
@ -51,54 +51,61 @@ type GiteaRelease struct {
FileAssets []GiteaAsset `json:"assets"`
}
func CreateRelease(Release GiteaRelease) (GiteaRelease, error) {
func (Release GiteaRelease) GetRelease() error {
if len(Release.TagName) == 0 {
return GiteaRelease{}, fmt.Errorf("set release tag name")
return fmt.Errorf("set release tag name")
}
if len(Release.AuthToken) == 0 {
Release.AuthToken = Token
}
_, err := (&request.RequestOptions{
HttpError: true,
Method: "GET",
Url: fmt.Sprintf("%s/tags/%s", GiteaReleaseAPI, Release.TagName),
Headers: http.Header{"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)}},
}).Do(&Release)
return err
}
func (Release GiteaRelease) CreateRelease() error {
if len(Release.AuthToken) == 0 {
Release.AuthToken = Token
}
if len(Release.TagName) == 0 {
return fmt.Errorf("set release tag name")
}
req := request.RequestOptions{
HttpError: false,
Method: "GET",
Url: fmt.Sprintf("%s/tags/%s", GiteaReleaseAPI, Release.TagName),
Headers: http.Header{"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)}},
}
if res, _ := req.Request(); res.StatusCode == 200 {
req.Method = "DELETE"
req.Request()
}
bodyBytes, err := json.Marshal(&Release)
if err != nil {
return GiteaRelease{}, err
return err
}
if _, res, _ := request.Request(request.RequestOptions{Url: fmt.Sprintf("%s/tags/%s", GiteaReleaseAPI, Release.TagName), Method: "GET", HttpError: false, Headers: http.Header{"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)}}}); res.StatusCode == 200 {
request.Request(request.RequestOptions{
Url: fmt.Sprintf("%s/tags/%s", GiteaReleaseAPI, Release.TagName),
Method: "DELETE",
HttpError: false,
Headers: http.Header{
"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)},
},
})
}
_, res, err := request.Request(request.RequestOptions{
Url: GiteaReleaseAPI,
Method: "POST",
Body: bytes.NewReader(bodyBytes),
HttpError: true,
Headers: http.Header{
"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)},
"Content-Type": {"application/json"},
},
})
defer res.Body.Close()
if res.StatusCode < 300 {
data, err := io.ReadAll(res.Body)
if err != nil {
return Release, err
}
err = json.Unmarshal(data, &Release)
if err != nil {
return Release, err
}
}
return Release, err
req.HttpError = true
req.Method = "POST"
req.Body = bytes.NewReader(bodyBytes)
req.Headers.Set("Content-Type", "application/json")
_, err = req.Do(&Release)
return err
}
// Upade release
func (Release *GiteaRelease) UpdateRelease() error {
if len(Release.AuthToken) == 0 {
Release.AuthToken = Token
}
if len(Release.TagName) == 0 {
return fmt.Errorf("set release tag name")
}
@ -108,32 +115,20 @@ func (Release *GiteaRelease) UpdateRelease() error {
return err
}
_, res, err := request.Request(request.RequestOptions{
Url: fmt.Sprintf("%s/%d", GiteaReleaseAPI, Release.ReleaseId),
_, err = (&request.RequestOptions{
HttpError: true,
Method: "PATCH",
Body: bytes.NewReader(bodyBytes),
Url: fmt.Sprintf("%s/%d", GiteaReleaseAPI, Release.ReleaseId),
Headers: http.Header{
"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)},
"Content-Type": {"application/json"},
},
})
defer res.Body.Close()
if res.StatusCode < 300 {
data, err := io.ReadAll(res.Body)
if err != nil {
return err
}
err = json.Unmarshal(data, &Release)
if err != nil {
return err
}
}
}).Do(&Release)
return err
}
// Get asset file index if not exists return -1
func (Release *GiteaRelease) FileAssetExist(fileName string) int {
for assetIndex, v := range Release.FileAssets {
if v.FileName == fileName {
@ -144,8 +139,9 @@ func (Release *GiteaRelease) FileAssetExist(fileName string) int {
}
func (Release *GiteaRelease) UploadAsset(fileName string, fileStream io.Reader) (GiteaAsset, error) {
assetExistIndex := Release.FileAssetExist(fileName)
if len(Release.AuthToken) == 0 {
Release.AuthToken = Token
}
body := new(bytes.Buffer)
writer := multipart.NewWriter(body)
part, err := writer.CreateFormFile("attachment", fileName)
@ -161,36 +157,31 @@ func (Release *GiteaRelease) UploadAsset(fileName string, fileStream io.Reader)
}
var ResData GiteaAsset
_, res, err := request.Request(request.RequestOptions{
Url: fmt.Sprintf("%s/%d/assets", GiteaReleaseAPI, Release.ReleaseId),
HttpError: true,
Method: "POST",
Body: body,
Headers: http.Header{
"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)},
"Content-Type": {writer.FormDataContentType()},
},
})
defer res.Body.Close()
for i := 5; i > 0; i-- {
res, err := (&request.RequestOptions{
Url: fmt.Sprintf("%s/%d/assets", GiteaReleaseAPI, Release.ReleaseId),
HttpError: false,
Method: "POST",
Body: body,
Headers: http.Header{
"Authorization": {fmt.Sprintf("token %s", Release.AuthToken)},
"Content-Type": {writer.FormDataContentType()},
},
}).Do(&ResData)
// Attempt to upload
if err != nil {
io.Copy(os.Stderr, res.Body)
return GiteaAsset{}, err
}
if res.StatusCode < 300 {
data, err := io.ReadAll(res.Body)
if err != nil {
return GiteaAsset{}, err
}
err = json.Unmarshal(data, &ResData)
// Attempt to upload
if err != nil {
return GiteaAsset{}, err
} else if res.StatusCode != 201 {
if res.StatusCode >= 500 {
continue
}
return GiteaAsset{}, fmt.Errorf("cannot upload release, code status: %d (%s)", res.StatusCode, res.Status)
}
}
assetExistIndex := Release.FileAssetExist(fileName)
Release.FileAssets = append(Release.FileAssets, ResData)
if assetExistIndex != -1 {
Release.FileAssets = append(Release.FileAssets[:assetExistIndex], Release.FileAssets[:assetExistIndex+1]...)

17
internal/versions.go Normal file

@ -0,0 +1,17 @@
package internal
import "time"
type VersionPlatform struct {
ZipFile string `json:"zipFile"` // Minecraft server url server
ZipSHA1 string `json:"zipSHA1"` // SHA1 to verify integrety to zip file
TarFile string `json:"tarFile"` // Minecraft server url in tar type
TarSHA1 string `json:"tarSHA1"` // SHA1 to verify integrety to tar file
ReleaseDate time.Time `json:"releaseDate"` // Platform release/build day
}
type Version struct {
IsPreview bool `json:"preview"` // Preview server
Platforms map[string]VersionPlatform `json:"platforms"` // Golang platforms target
}
type Versions map[string]Version

79
main.go

@ -1,68 +1,31 @@
package main
import (
"fmt"
"log"
"time"
"encoding/json"
"os"
"path/filepath"
"sirherobrine23.org/minecraft-server/bedrockfetch/bds"
"sirherobrine23.org/minecraft-server/bedrockfetch/internal"
)
func main() {
files, err := bds.MinecraftFetch()
localReleases := internal.Versions{}
VersionsPathFile, _ := filepath.Abs("./versions.json")
file, err := os.Open(VersionsPathFile)
if err != nil {
log.Fatalln(err)
panic(err)
}
for version, versionTargets := range files {
if _, exist := bds.Versions[version]; exist {
fmt.Printf("%s ared exist, skip\n", version)
continue
}
release := bds.BedrockVersions{
Version: version,
ReleaseType: "release",
DateRelease: time.Time{},
Targets: []bds.BedrockVersionsTarget{},
}
if versionTargets[0].IsPreview {
release.ReleaseType = "preview"
}
fmt.Printf("Minecraft server, version: %s\n", version)
for _, target := range versionTargets {
targetRelease := bds.BedrockVersionsTarget{
ZipFile: target.FileUrl,
NodeArch: "x64",
NodePlatform: "unknown",
}
if target.Platform == "linux" {
targetRelease.NodePlatform = "linux"
} else if target.Platform == "win" {
targetRelease.NodePlatform = "win32"
}
// Download file and convert
err = targetRelease.Download()
if err != nil {
log.Fatalln(err)
}
release.Targets = append(release.Targets, targetRelease)
}
// Upload files to release
err = release.UploadRelease()
if err != nil {
log.Fatalln(err)
}
// Add to global versions
bds.AddNewVersion(release)
defer file.Close()
if err = json.NewDecoder(file).Decode(&localReleases); err != nil {
panic(err)
}
bds.SaveLocalVersions()
}
file.Close()
localReleases.Find()
file, err = os.Create(VersionsPathFile)
if err != nil {
panic(err)
}
et := json.NewEncoder(file)
et.SetIndent("", " ")
et.Encode(&localReleases)
}

File diff suppressed because it is too large Load Diff