Remove jobMeta and use job struct everywhere

This commit is contained in:
2025-05-28 15:59:21 +02:00
parent eef48ac3a3
commit 3efee22536
29 changed files with 163 additions and 785 deletions

View File

@@ -23,7 +23,7 @@ type ArchiveBackend interface {
Exists(job *schema.Job) bool
LoadJobMeta(job *schema.Job) (*schema.JobMeta, error)
LoadJobMeta(job *schema.Job) (*schema.Job, error)
LoadJobData(job *schema.Job) (schema.JobData, error)
@@ -31,9 +31,9 @@ type ArchiveBackend interface {
LoadClusterCfg(name string) (*schema.Cluster, error)
StoreJobMeta(jobMeta *schema.JobMeta) error
StoreJobMeta(jobMeta *schema.Job) error
ImportJob(jobMeta *schema.JobMeta, jobData *schema.JobData) error
ImportJob(jobMeta *schema.Job, jobData *schema.JobData) error
GetClusters() []string
@@ -51,7 +51,7 @@ type ArchiveBackend interface {
}
type JobContainer struct {
Meta *schema.JobMeta
Meta *schema.Job
Data *schema.JobData
}
@@ -162,7 +162,6 @@ func LoadScopedStatsFromArchive(
metrics []string,
scopes []schema.MetricScope,
) (schema.ScopedJobStats, error) {
data, err := ar.LoadJobStats(job)
if err != nil {
log.Errorf("Error while loading job stats from archiveBackend: %s", err.Error())

View File

@@ -9,7 +9,6 @@ import (
"fmt"
"path/filepath"
"testing"
"time"
"github.com/ClusterCockpit/cc-backend/internal/util"
"github.com/ClusterCockpit/cc-backend/pkg/archive"
@@ -32,12 +31,12 @@ func setup(t *testing.T) archive.ArchiveBackend {
jobs[0] = &schema.Job{}
jobs[0].JobID = 1403244
jobs[0].Cluster = "emmy"
jobs[0].StartTime = time.Unix(1608923076, 0)
jobs[0].StartTime = 1608923076
jobs[1] = &schema.Job{}
jobs[0].JobID = 1404397
jobs[0].Cluster = "emmy"
jobs[0].StartTime = time.Unix(1609300556, 0)
jobs[0].StartTime = 1609300556
return archive.GetHandle()
}

View File

@@ -223,7 +223,7 @@ func GetMetricConfig(cluster, metric string) *schema.MetricConfig {
// AssignSubCluster sets the `job.subcluster` property of the job based
// on its cluster and resources.
func AssignSubCluster(job *schema.BaseJob) error {
func AssignSubCluster(job *schema.Job) error {
cluster := GetCluster(job.Cluster)
if cluster == nil {
return fmt.Errorf("ARCHIVE/CLUSTERCONFIG > unkown cluster: %v", job.Cluster)

View File

@@ -53,7 +53,7 @@ func getDirectory(
rootPath,
job.Cluster,
lvl1, lvl2,
strconv.FormatInt(job.StartTime.Unix(), 10))
strconv.FormatInt(job.StartTime, 10))
}
func getPath(
@@ -65,15 +65,15 @@ func getPath(
getDirectory(job, rootPath), file)
}
func loadJobMeta(filename string) (*schema.JobMeta, error) {
func loadJobMeta(filename string) (*schema.Job, error) {
b, err := os.ReadFile(filename)
if err != nil {
log.Errorf("loadJobMeta() > open file error: %v", err)
return &schema.JobMeta{}, err
return nil, err
}
if config.Keys.Validate {
if err := schema.Validate(schema.Meta, bytes.NewReader(b)); err != nil {
return &schema.JobMeta{}, fmt.Errorf("validate job meta: %v", err)
return nil, fmt.Errorf("validate job meta: %v", err)
}
}
@@ -429,7 +429,7 @@ func (fsa *FsArchive) LoadJobStats(job *schema.Job) (schema.ScopedJobStats, erro
return loadJobStats(filename, isCompressed)
}
func (fsa *FsArchive) LoadJobMeta(job *schema.Job) (*schema.JobMeta, error) {
func (fsa *FsArchive) LoadJobMeta(job *schema.Job) (*schema.Job, error) {
filename := getPath(job, fsa.path, "meta.json")
return loadJobMeta(filename)
}
@@ -518,18 +518,13 @@ func (fsa *FsArchive) Iter(loadMetricData bool) <-chan JobContainer {
return ch
}
func (fsa *FsArchive) StoreJobMeta(jobMeta *schema.JobMeta) error {
job := schema.Job{
BaseJob: jobMeta.BaseJob,
StartTime: time.Unix(jobMeta.StartTime, 0),
StartTimeUnix: jobMeta.StartTime,
}
f, err := os.Create(getPath(&job, fsa.path, "meta.json"))
func (fsa *FsArchive) StoreJobMeta(job *schema.Job) error {
f, err := os.Create(getPath(job, fsa.path, "meta.json"))
if err != nil {
log.Error("Error while creating filepath for meta.json")
return err
}
if err := EncodeJobMeta(f, jobMeta); err != nil {
if err := EncodeJobMeta(f, job); err != nil {
log.Error("Error while encoding job metadata to meta.json file")
return err
}
@@ -546,15 +541,10 @@ func (fsa *FsArchive) GetClusters() []string {
}
func (fsa *FsArchive) ImportJob(
jobMeta *schema.JobMeta,
jobMeta *schema.Job,
jobData *schema.JobData,
) error {
job := schema.Job{
BaseJob: jobMeta.BaseJob,
StartTime: time.Unix(jobMeta.StartTime, 0),
StartTimeUnix: jobMeta.StartTime,
}
dir := getPath(&job, fsa.path, "")
dir := getPath(jobMeta, fsa.path, "")
if err := os.MkdirAll(dir, 0777); err != nil {
log.Error("Error while creating job archive path")
return err

View File

@@ -9,7 +9,6 @@ import (
"fmt"
"path/filepath"
"testing"
"time"
"github.com/ClusterCockpit/cc-backend/internal/util"
"github.com/ClusterCockpit/cc-backend/pkg/schema"
@@ -86,8 +85,11 @@ func TestLoadJobMeta(t *testing.T) {
t.Fatal(err)
}
jobIn := schema.Job{BaseJob: schema.JobDefaults}
jobIn.StartTime = time.Unix(1608923076, 0)
jobIn := schema.Job{
Exclusive: 1,
MonitoringStatus: schema.MonitoringStatusRunningOrArchiving,
}
jobIn.StartTime = 1608923076
jobIn.JobID = 1403244
jobIn.Cluster = "emmy"
@@ -114,8 +116,11 @@ func TestLoadJobData(t *testing.T) {
t.Fatal(err)
}
jobIn := schema.Job{BaseJob: schema.JobDefaults}
jobIn.StartTime = time.Unix(1608923076, 0)
jobIn := schema.Job{
Exclusive: 1,
MonitoringStatus: schema.MonitoringStatusRunningOrArchiving,
}
jobIn.StartTime = 1608923076
jobIn.JobID = 1403244
jobIn.Cluster = "emmy"
@@ -142,8 +147,11 @@ func BenchmarkLoadJobData(b *testing.B) {
var fsa FsArchive
fsa.Init(json.RawMessage(archiveCfg))
jobIn := schema.Job{BaseJob: schema.JobDefaults}
jobIn.StartTime = time.Unix(1608923076, 0)
jobIn := schema.Job{
Exclusive: 1,
MonitoringStatus: schema.MonitoringStatusRunningOrArchiving,
}
jobIn.StartTime = 1608923076
jobIn.JobID = 1403244
jobIn.Cluster = "emmy"
@@ -165,8 +173,11 @@ func BenchmarkLoadJobDataCompressed(b *testing.B) {
var fsa FsArchive
fsa.Init(json.RawMessage(archiveCfg))
jobIn := schema.Job{BaseJob: schema.JobDefaults}
jobIn.StartTime = time.Unix(1608923076, 0)
jobIn := schema.Job{
Exclusive: 1,
MonitoringStatus: schema.MonitoringStatusRunningOrArchiving,
}
jobIn.StartTime = 1608923076
jobIn.JobID = 1403244
jobIn.Cluster = "emmy"

View File

@@ -69,8 +69,8 @@ func DecodeJobStats(r io.Reader, k string) (schema.ScopedJobStats, error) {
return nil, err
}
func DecodeJobMeta(r io.Reader) (*schema.JobMeta, error) {
var d schema.JobMeta
func DecodeJobMeta(r io.Reader) (*schema.Job, error) {
var d schema.Job
if err := json.NewDecoder(r).Decode(&d); err != nil {
log.Warn("Error while decoding raw job meta json")
return &d, err
@@ -103,7 +103,7 @@ func EncodeJobData(w io.Writer, d *schema.JobData) error {
return nil
}
func EncodeJobMeta(w io.Writer, d *schema.JobMeta) error {
func EncodeJobMeta(w io.Writer, d *schema.Job) error {
// Sanitize parameters
if err := json.NewEncoder(w).Encode(d); err != nil {
log.Warn("Error while encoding new job meta json")