mirror of
https://github.com/go-gitea/gitea.git
synced 2024-12-02 23:24:12 +08:00
Backport #27787 by @KN4CK3R Add missing `.Close()` calls. The current code does not delete the temporary files if the data grows over 32mb. Co-authored-by: KN4CK3R <admin@oldschoolhack.me>
This commit is contained in:
parent
9dd8f34707
commit
25acbfed36
|
@ -32,7 +32,7 @@ func NewHashedBuffer() (*HashedBuffer, error) {
|
||||||
return NewHashedBufferWithSize(DefaultMemorySize)
|
return NewHashedBufferWithSize(DefaultMemorySize)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewHashedBuffer creates a hashed buffer with a specific memory size
|
// NewHashedBufferWithSize creates a hashed buffer with a specific memory size
|
||||||
func NewHashedBufferWithSize(maxMemorySize int) (*HashedBuffer, error) {
|
func NewHashedBufferWithSize(maxMemorySize int) (*HashedBuffer, error) {
|
||||||
b, err := filebuffer.New(maxMemorySize)
|
b, err := filebuffer.New(maxMemorySize)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -238,6 +238,8 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package
|
||||||
}
|
}
|
||||||
|
|
||||||
unsignedIndexContent, _ := packages_module.NewHashedBuffer()
|
unsignedIndexContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer unsignedIndexContent.Close()
|
||||||
|
|
||||||
h := sha1.New()
|
h := sha1.New()
|
||||||
|
|
||||||
if err := writeGzipStream(io.MultiWriter(unsignedIndexContent, h), "APKINDEX", buf.Bytes(), true); err != nil {
|
if err := writeGzipStream(io.MultiWriter(unsignedIndexContent, h), "APKINDEX", buf.Bytes(), true); err != nil {
|
||||||
|
@ -275,6 +277,7 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package
|
||||||
}
|
}
|
||||||
|
|
||||||
signedIndexContent, _ := packages_module.NewHashedBuffer()
|
signedIndexContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer signedIndexContent.Close()
|
||||||
|
|
||||||
if err := writeGzipStream(
|
if err := writeGzipStream(
|
||||||
signedIndexContent,
|
signedIndexContent,
|
||||||
|
|
|
@ -196,11 +196,16 @@ func buildPackagesIndices(ctx context.Context, ownerID int64, repoVersion *packa
|
||||||
}
|
}
|
||||||
|
|
||||||
packagesContent, _ := packages_module.NewHashedBuffer()
|
packagesContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer packagesContent.Close()
|
||||||
|
|
||||||
packagesGzipContent, _ := packages_module.NewHashedBuffer()
|
packagesGzipContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer packagesGzipContent.Close()
|
||||||
|
|
||||||
gzw := gzip.NewWriter(packagesGzipContent)
|
gzw := gzip.NewWriter(packagesGzipContent)
|
||||||
|
|
||||||
packagesXzContent, _ := packages_module.NewHashedBuffer()
|
packagesXzContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer packagesXzContent.Close()
|
||||||
|
|
||||||
xzw, _ := xz.NewWriter(packagesXzContent)
|
xzw, _ := xz.NewWriter(packagesXzContent)
|
||||||
|
|
||||||
w := io.MultiWriter(packagesContent, gzw, xzw)
|
w := io.MultiWriter(packagesContent, gzw, xzw)
|
||||||
|
@ -323,6 +328,8 @@ func buildReleaseFiles(ctx context.Context, ownerID int64, repoVersion *packages
|
||||||
}
|
}
|
||||||
|
|
||||||
inReleaseContent, _ := packages_module.NewHashedBuffer()
|
inReleaseContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer inReleaseContent.Close()
|
||||||
|
|
||||||
sw, err := clearsign.Encode(inReleaseContent, e.PrivateKey, nil)
|
sw, err := clearsign.Encode(inReleaseContent, e.PrivateKey, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -367,11 +374,14 @@ func buildReleaseFiles(ctx context.Context, ownerID int64, repoVersion *packages
|
||||||
sw.Close()
|
sw.Close()
|
||||||
|
|
||||||
releaseGpgContent, _ := packages_module.NewHashedBuffer()
|
releaseGpgContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer releaseGpgContent.Close()
|
||||||
|
|
||||||
if err := openpgp.ArmoredDetachSign(releaseGpgContent, e, bytes.NewReader(buf.Bytes()), nil); err != nil {
|
if err := openpgp.ArmoredDetachSign(releaseGpgContent, e, bytes.NewReader(buf.Bytes()), nil); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
releaseContent, _ := packages_module.CreateHashedBufferFromReader(&buf)
|
releaseContent, _ := packages_module.CreateHashedBufferFromReader(&buf)
|
||||||
|
defer releaseContent.Close()
|
||||||
|
|
||||||
for _, file := range []struct {
|
for _, file := range []struct {
|
||||||
Name string
|
Name string
|
||||||
|
|
|
@ -258,11 +258,14 @@ func buildRepomd(ctx context.Context, pv *packages_model.PackageVersion, ownerID
|
||||||
}
|
}
|
||||||
|
|
||||||
repomdAscContent, _ := packages_module.NewHashedBuffer()
|
repomdAscContent, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer repomdAscContent.Close()
|
||||||
|
|
||||||
if err := openpgp.ArmoredDetachSign(repomdAscContent, e, bytes.NewReader(buf.Bytes()), nil); err != nil {
|
if err := openpgp.ArmoredDetachSign(repomdAscContent, e, bytes.NewReader(buf.Bytes()), nil); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
repomdContent, _ := packages_module.CreateHashedBufferFromReader(&buf)
|
repomdContent, _ := packages_module.CreateHashedBufferFromReader(&buf)
|
||||||
|
defer repomdContent.Close()
|
||||||
|
|
||||||
for _, file := range []struct {
|
for _, file := range []struct {
|
||||||
Name string
|
Name string
|
||||||
|
@ -548,6 +551,8 @@ func (wc *writtenCounter) Written() int64 {
|
||||||
|
|
||||||
func addDataAsFileToRepo(ctx context.Context, pv *packages_model.PackageVersion, filetype string, obj any) (*repoData, error) {
|
func addDataAsFileToRepo(ctx context.Context, pv *packages_model.PackageVersion, filetype string, obj any) (*repoData, error) {
|
||||||
content, _ := packages_module.NewHashedBuffer()
|
content, _ := packages_module.NewHashedBuffer()
|
||||||
|
defer content.Close()
|
||||||
|
|
||||||
gzw := gzip.NewWriter(content)
|
gzw := gzip.NewWriter(content)
|
||||||
wc := &writtenCounter{}
|
wc := &writtenCounter{}
|
||||||
h := sha256.New()
|
h := sha256.New()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user