Backup commit

This commit is contained in:
Jan Eitzinger 2023-02-28 09:33:55 +01:00
parent 52d424b332
commit c6c217adb0
5 changed files with 27 additions and 87 deletions

1
.gitignore vendored
View File

@ -10,4 +10,3 @@
/web/frontend/public/build
/web/frontend/node_modules
.vscode/settings.json
sanitize-archive

View File

@ -49,10 +49,6 @@ type MetricConfig struct {
Scope MetricScope `json:"scope"`
Aggregation *string `json:"aggregation"`
Timestep int `json:"timestep"`
Peak *float64 `json:"peak"`
Normal *float64 `json:"normal"`
Caution *float64 `json:"caution"`
Alert *float64 `json:"alert"`
SubClusters []*SubClusterConfig `json:"subClusters"`
}

View File

@ -20,7 +20,13 @@ else
cp ./configs/env-template.txt .env
cp ./docs/config.json config.json
go build ./cmd/cc-backend
<<<<<<< Updated upstream
./cc-backend --server --dev --init-db --add-user demo:admin:AdminDev
fi
=======
./cc-backend --server --dev --init-db --add-user demo:admin:AdminDev
fi
>>>>>>> Stashed changes

View File

@ -10,10 +10,8 @@ import (
"encoding/json"
"fmt"
"os"
"path"
"path/filepath"
"strconv"
"time"
"github.com/ClusterCockpit/cc-backend/pkg/log"
)
@ -28,7 +26,7 @@ type FsArchive struct {
}
func getPath(
job *Job,
job *JobMeta,
rootPath string,
file string) string {
@ -37,7 +35,7 @@ func getPath(
rootPath,
job.Cluster,
lvl1, lvl2,
strconv.FormatInt(job.StartTime.Unix(), 10), file)
strconv.FormatInt(job.StartTime, 10), file)
}
func loadJobMeta(filename string) (*JobMeta, error) {
@ -79,25 +77,6 @@ func (fsa *FsArchive) Init(rawConfig json.RawMessage) error {
return nil
}
func (fsa *FsArchive) LoadJobData(job *Job) (JobData, error) {
filename := getPath(job, fsa.path, "data.json")
f, err := os.Open(filename)
if err != nil {
log.Errorf("fsBackend LoadJobData()- %v", err)
return nil, err
}
defer f.Close()
return DecodeJobData(bufio.NewReader(f), filename)
}
func (fsa *FsArchive) LoadJobMeta(job *Job) (*JobMeta, error) {
filename := getPath(job, fsa.path, "meta.json")
return loadJobMeta(filename)
}
func (fsa *FsArchive) LoadClusterCfg(name string) (*Cluster, error) {
b, err := os.ReadFile(filepath.Join(fsa.path, name, "cluster.json"))
@ -159,63 +138,7 @@ func (fsa *FsArchive) Iter() <-chan *JobMeta {
return ch
}
func (fsa *FsArchive) StoreJobMeta(jobMeta *JobMeta) error {
job := Job{
BaseJob: jobMeta.BaseJob,
StartTime: time.Unix(jobMeta.StartTime, 0),
StartTimeUnix: jobMeta.StartTime,
}
f, err := os.Create(getPath(&job, fsa.path, "meta.json"))
if err != nil {
return err
}
if err := EncodeJobMeta(f, jobMeta); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
return nil
}
func (fsa *FsArchive) GetClusters() []string {
return fsa.clusters
}
func (fsa *FsArchive) ImportJob(
jobMeta *JobMeta,
jobData *JobData) error {
job := Job{
BaseJob: jobMeta.BaseJob,
StartTime: time.Unix(jobMeta.StartTime, 0),
StartTimeUnix: jobMeta.StartTime,
}
dir := getPath(&job, fsa.path, "")
if err := os.MkdirAll(dir, 0777); err != nil {
return err
}
f, err := os.Create(path.Join(dir, "meta.json"))
if err != nil {
return err
}
if err := EncodeJobMeta(f, jobMeta); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
f, err = os.Create(path.Join(dir, "data.json"))
if err != nil {
return err
}
if err := EncodeJobData(f, jobData); err != nil {
return err
}
return f.Close()
}

View File

@ -17,6 +17,14 @@ import (
var ar FsArchive
func deepCopyJobMeta(j *JobMeta) schema.JobMeta {
var jn schema.JobMeta
jn.StartTime = j
jn.BaseJob = j.BaseJob
}
func deepCopyClusterConfig(co *Cluster) schema.Cluster {
var cn schema.Cluster
@ -84,7 +92,15 @@ func main() {
}
}
// for job := range ar.Iter() {
// fmt.Printf("Job %d\n", job.JobID)
// }
for job := range ar.Iter() {
fmt.Printf("Job %d\n", job.JobID)
root := fmt.Sprintf("%s/%s/", dstPath, job.Cluster)
f, err := os.Create(getPath(job, root, "meta.json"))
if err != nil {
log.Fatal(err)
}
jmn := deepCopyJobMeta(job)
}
}