Add util package

This commit is contained in:
Jan Eitzinger 2023-05-12 15:09:39 +02:00
parent 35bc674b43
commit 6aea486891
5 changed files with 224 additions and 137 deletions

73
internal/util/compress.go Normal file
View File

@ -0,0 +1,73 @@
package util
import (
"compress/gzip"
"io"
"os"
"github.com/ClusterCockpit/cc-backend/pkg/log"
)
func CompressFile(fileIn string, fileOut string) error {
originalFile, err := os.Open(fileIn)
if err != nil {
log.Errorf("CompressFile() error: %v", err)
return err
}
defer originalFile.Close()
gzippedFile, err := os.Create(fileOut)
if err != nil {
log.Errorf("CompressFile() error: %v", err)
return err
}
defer gzippedFile.Close()
gzipWriter := gzip.NewWriter(gzippedFile)
defer gzipWriter.Close()
_, err = io.Copy(gzipWriter, originalFile)
if err != nil {
log.Errorf("CompressFile() error: %v", err)
return err
}
gzipWriter.Flush()
if err := os.Remove(fileIn); err != nil {
log.Errorf("CompressFile() error: %v", err)
return err
}
return nil
}
func UncompressFile(fileIn string, fileOut string) error {
gzippedFile, err := os.Open(fileIn)
if err != nil {
log.Errorf("UncompressFile() error: %v", err)
return err
}
defer gzippedFile.Close()
gzipReader, _ := gzip.NewReader(gzippedFile)
defer gzipReader.Close()
uncompressedFile, err := os.Create(fileOut)
if err != nil {
log.Errorf("UncompressFile() error: %v", err)
return err
}
defer uncompressedFile.Close()
_, err = io.Copy(uncompressedFile, gzipReader)
if err != nil {
log.Errorf("UncompressFile() error: %v", err)
return err
}
if err := os.Remove(fileIn); err != nil {
log.Errorf("UncompressFile() error: %v", err)
return err
}
return nil
}

106
internal/util/copy.go Normal file
View File

@ -0,0 +1,106 @@
package util
import (
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
)
func CopyFile(src, dst string) (err error) {
in, err := os.Open(src)
if err != nil {
return
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
return
}
defer func() {
if e := out.Close(); e != nil {
err = e
}
}()
_, err = io.Copy(out, in)
if err != nil {
return
}
err = out.Sync()
if err != nil {
return
}
si, err := os.Stat(src)
if err != nil {
return
}
err = os.Chmod(dst, si.Mode())
if err != nil {
return
}
return
}
// CopyDir recursively copies a directory tree, attempting to preserve permissions.
// Source directory must exist, destination directory must *not* exist.
// Symlinks are ignored and skipped.
func CopyDir(src string, dst string) (err error) {
src = filepath.Clean(src)
dst = filepath.Clean(dst)
si, err := os.Stat(src)
if err != nil {
return err
}
if !si.IsDir() {
return fmt.Errorf("source is not a directory")
}
_, err = os.Stat(dst)
if err != nil && !os.IsNotExist(err) {
return
}
if err == nil {
return fmt.Errorf("destination already exists")
}
err = os.MkdirAll(dst, si.Mode())
if err != nil {
return
}
entries, err := ioutil.ReadDir(src)
if err != nil {
return
}
for _, entry := range entries {
srcPath := filepath.Join(src, entry.Name())
dstPath := filepath.Join(dst, entry.Name())
if entry.IsDir() {
err = CopyDir(srcPath, dstPath)
if err != nil {
return
}
} else {
// Skip symlinks.
if entry.Mode()&os.ModeSymlink != 0 {
continue
}
err = CopyFile(srcPath, dstPath)
if err != nil {
return
}
}
}
return
}

30
internal/util/fstat.go Normal file
View File

@ -0,0 +1,30 @@
package util
import (
"errors"
"os"
"github.com/ClusterCockpit/cc-backend/pkg/log"
)
func CheckFileExists(filePath string) bool {
_, err := os.Stat(filePath)
return !errors.Is(err, os.ErrNotExist)
}
func GetFilesize(filePath string) int64 {
fileInfo, err := os.Stat(filePath)
if err != nil {
log.Errorf("Error on Stat %s: %v", filePath, err)
}
return fileInfo.Size()
}
func GetFilecount(path string) int {
files, err := os.ReadDir(path)
if err != nil {
log.Errorf("Error on ReadDir %s: %v", path, err)
}
return len(files)
}

View File

@ -7,120 +7,21 @@ package archive_test
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"io/ioutil"
"os"
"path/filepath" "path/filepath"
"testing" "testing"
"time" "time"
"github.com/ClusterCockpit/cc-backend/internal/util"
"github.com/ClusterCockpit/cc-backend/pkg/archive" "github.com/ClusterCockpit/cc-backend/pkg/archive"
"github.com/ClusterCockpit/cc-backend/pkg/schema" "github.com/ClusterCockpit/cc-backend/pkg/schema"
) )
func CopyFile(src, dst string) (err error) {
in, err := os.Open(src)
if err != nil {
return
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
return
}
defer func() {
if e := out.Close(); e != nil {
err = e
}
}()
_, err = io.Copy(out, in)
if err != nil {
return
}
err = out.Sync()
if err != nil {
return
}
si, err := os.Stat(src)
if err != nil {
return
}
err = os.Chmod(dst, si.Mode())
if err != nil {
return
}
return
}
// CopyDir recursively copies a directory tree, attempting to preserve permissions.
// Source directory must exist, destination directory must *not* exist.
// Symlinks are ignored and skipped.
func CopyDir(src string, dst string) (err error) {
src = filepath.Clean(src)
dst = filepath.Clean(dst)
si, err := os.Stat(src)
if err != nil {
return err
}
if !si.IsDir() {
return fmt.Errorf("source is not a directory")
}
_, err = os.Stat(dst)
if err != nil && !os.IsNotExist(err) {
return
}
if err == nil {
return fmt.Errorf("destination already exists")
}
err = os.MkdirAll(dst, si.Mode())
if err != nil {
return
}
entries, err := ioutil.ReadDir(src)
if err != nil {
return
}
for _, entry := range entries {
srcPath := filepath.Join(src, entry.Name())
dstPath := filepath.Join(dst, entry.Name())
if entry.IsDir() {
err = CopyDir(srcPath, dstPath)
if err != nil {
return
}
} else {
// Skip symlinks.
if entry.Mode()&os.ModeSymlink != 0 {
continue
}
err = CopyFile(srcPath, dstPath)
if err != nil {
return
}
}
}
return
}
var jobs []*schema.Job var jobs []*schema.Job
func setup(t *testing.T) archive.ArchiveBackend { func setup(t *testing.T) archive.ArchiveBackend {
tmpdir := t.TempDir() tmpdir := t.TempDir()
jobarchive := filepath.Join(tmpdir, "job-archive") jobarchive := filepath.Join(tmpdir, "job-archive")
CopyDir("./testdata/archive/", jobarchive) util.CopyDir("./testdata/archive/", jobarchive)
archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive) archiveCfg := fmt.Sprintf("{\"kind\": \"file\",\"path\": \"%s\"}", jobarchive)
if err := archive.Init(json.RawMessage(archiveCfg), false); err != nil { if err := archive.Init(json.RawMessage(archiveCfg), false); err != nil {

View File

@ -11,7 +11,6 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"io"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
@ -20,6 +19,7 @@ import (
"time" "time"
"github.com/ClusterCockpit/cc-backend/internal/config" "github.com/ClusterCockpit/cc-backend/internal/config"
"github.com/ClusterCockpit/cc-backend/internal/util"
"github.com/ClusterCockpit/cc-backend/pkg/log" "github.com/ClusterCockpit/cc-backend/pkg/log"
"github.com/ClusterCockpit/cc-backend/pkg/schema" "github.com/ClusterCockpit/cc-backend/pkg/schema"
"github.com/santhosh-tekuri/jsonschema/v5" "github.com/santhosh-tekuri/jsonschema/v5"
@ -34,11 +34,6 @@ type FsArchive struct {
clusters []string clusters []string
} }
func checkFileExists(filePath string) bool {
_, err := os.Stat(filePath)
return !errors.Is(err, os.ErrNotExist)
}
func getDirectory( func getDirectory(
job *schema.Job, job *schema.Job,
rootPath string, rootPath string,
@ -169,39 +164,21 @@ func (fsa *FsArchive) CleanUp(jobs []*schema.Job) {
if err := os.RemoveAll(dir); err != nil { if err := os.RemoveAll(dir); err != nil {
log.Errorf("JobArchive Cleanup() error: %v", err) log.Errorf("JobArchive Cleanup() error: %v", err)
} }
parent := filepath.Clean(filepath.Join(dir, ".."))
if util.GetFilecount(parent) == 0 {
if err := os.Remove(parent); err != nil {
log.Errorf("JobArchive Cleanup() error: %v", err)
}
}
} }
} }
func (fsa *FsArchive) Compress(jobs []*schema.Job) { func (fsa *FsArchive) Compress(jobs []*schema.Job) {
for _, job := range jobs { for _, job := range jobs {
fileIn := getPath(job, fsa.path, "data.json") fileIn := getPath(job, fsa.path, "data.json")
if !checkFileExists(fileIn) && (job.Duration > 600 || job.NumNodes > 4) { if !util.CheckFileExists(fileIn) && util.GetFilesize(fileIn) > 2000 {
util.CompressFile(fileIn, getPath(job, fsa.path, "data.json.gz"))
originalFile, err := os.Open(fileIn)
if err != nil {
log.Errorf("JobArchive Compress() error: %v", err)
}
defer originalFile.Close()
fileOut := getPath(job, fsa.path, "data.json.gz")
gzippedFile, err := os.Create(fileOut)
if err != nil {
log.Errorf("JobArchive Compress() error: %v", err)
}
defer gzippedFile.Close()
gzipWriter := gzip.NewWriter(gzippedFile)
defer gzipWriter.Close()
_, err = io.Copy(gzipWriter, originalFile)
if err != nil {
log.Errorf("JobArchive Compress() error: %v", err)
}
gzipWriter.Flush()
if err := os.Remove(fileIn); err != nil {
log.Errorf("JobArchive Compress() error: %v", err)
}
} }
} }
} }
@ -209,7 +186,8 @@ func (fsa *FsArchive) Compress(jobs []*schema.Job) {
func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) { func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) {
var isCompressed bool = true var isCompressed bool = true
filename := getPath(job, fsa.path, "data.json.gz") filename := getPath(job, fsa.path, "data.json.gz")
if !checkFileExists(filename) {
if !util.CheckFileExists(filename) {
filename = getPath(job, fsa.path, "data.json") filename = getPath(job, fsa.path, "data.json")
isCompressed = false isCompressed = false
} }
@ -218,7 +196,6 @@ func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) {
} }
func (fsa *FsArchive) LoadJobMeta(job *schema.Job) (*schema.JobMeta, error) { func (fsa *FsArchive) LoadJobMeta(job *schema.Job) (*schema.JobMeta, error) {
filename := getPath(job, fsa.path, "meta.json") filename := getPath(job, fsa.path, "meta.json")
return loadJobMeta(filename) return loadJobMeta(filename)
} }
@ -285,7 +262,7 @@ func (fsa *FsArchive) Iter(loadMetricData bool) <-chan JobContainer {
var isCompressed bool = true var isCompressed bool = true
filename := filepath.Join(dirpath, startTimeDir.Name(), "data.json.gz") filename := filepath.Join(dirpath, startTimeDir.Name(), "data.json.gz")
if !checkFileExists(filename) { if !util.CheckFileExists(filename) {
filename = filepath.Join(dirpath, startTimeDir.Name(), "data.json") filename = filepath.Join(dirpath, startTimeDir.Name(), "data.json")
isCompressed = false isCompressed = false
} }