Improve memory usage of dump by not loading all files in memory prior to adding them to the zip

This commit is contained in:
kolaente 2020-06-20 11:48:45 +02:00
parent c12bac0c96
commit db0126968a
Signed by untrusted user: konrad
GPG Key ID: F40E70337AB24C9B
2 changed files with 17 additions and 10 deletions

View File

@ -16,27 +16,25 @@
package files package files
import "bytes" import (
"io"
)
// Dump dumps all saved files // Dump dumps all saved files
// This only includes the raw files, no db entries. // This only includes the raw files, no db entries.
func Dump() (allFiles map[int64][]byte, err error) { func Dump() (allFiles map[int64]io.ReadCloser, err error) {
files := []*File{} files := []*File{}
err = x.Find(&files) err = x.Find(&files)
if err != nil { if err != nil {
return return
} }
allFiles = make(map[int64][]byte, len(files)) allFiles = make(map[int64]io.ReadCloser, len(files))
for _, file := range files { for _, file := range files {
if err := file.LoadFileByID(); err != nil { if err := file.LoadFileByID(); err != nil {
return nil, err return nil, err
} }
var buf bytes.Buffer allFiles[file.ID] = file.File
if _, err := buf.ReadFrom(file.File); err != nil {
return nil, err
}
allFiles[file.ID] = buf.Bytes()
} }
return return

View File

@ -80,11 +80,20 @@ func Dump(filename string) error {
if err != nil { if err != nil {
return fmt.Errorf("error saving file: %s", err) return fmt.Errorf("error saving file: %s", err)
} }
for fid, fcontent := range allFiles { for fid, file := range allFiles {
err = writeBytesToZip("files/"+strconv.FormatInt(fid, 10), fcontent, dumpWriter) header := &zip.FileHeader{
Name: "files/" + strconv.FormatInt(fid, 10),
Method: compressionUsed,
}
w, err := dumpWriter.CreateHeader(header)
if err != nil {
return err
}
_, err = io.Copy(w, file)
if err != nil { if err != nil {
return fmt.Errorf("error writing file %d: %s", fid, err) return fmt.Errorf("error writing file %d: %s", fid, err)
} }
_ = file.Close()
} }
log.Infof("Dumped files") log.Infof("Dumped files")