mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2024-12-26 05:19:05 +01:00
Bugs fixed in unit tests and archiver init
This commit is contained in:
parent
db5809d522
commit
f305863616
@ -156,6 +156,7 @@ func setup(t *testing.T) *api.RestApi {
|
|||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
archiver.Start(repository.GetJobRepository())
|
||||||
auth.Init()
|
auth.Init()
|
||||||
graph.Init()
|
graph.Init()
|
||||||
|
|
||||||
|
@ -17,11 +17,12 @@ import (
|
|||||||
var (
|
var (
|
||||||
archivePending sync.WaitGroup
|
archivePending sync.WaitGroup
|
||||||
archiveChannel chan *schema.Job
|
archiveChannel chan *schema.Job
|
||||||
r *repository.JobRepository
|
jobRepo *repository.JobRepository
|
||||||
)
|
)
|
||||||
|
|
||||||
func Start(jobRepo *repository.JobRepository) {
|
func Start(r *repository.JobRepository) {
|
||||||
archiveChannel = make(chan *schema.Job, 128)
|
archiveChannel = make(chan *schema.Job, 128)
|
||||||
|
jobRepo = r
|
||||||
|
|
||||||
go archivingWorker()
|
go archivingWorker()
|
||||||
}
|
}
|
||||||
@ -37,9 +38,9 @@ func archivingWorker() {
|
|||||||
start := time.Now()
|
start := time.Now()
|
||||||
// not using meta data, called to load JobMeta into Cache?
|
// not using meta data, called to load JobMeta into Cache?
|
||||||
// will fail if job meta not in repository
|
// will fail if job meta not in repository
|
||||||
if _, err := r.FetchMetadata(job); err != nil {
|
if _, err := jobRepo.FetchMetadata(job); err != nil {
|
||||||
log.Errorf("archiving job (dbid: %d) failed at check metadata step: %s", job.ID, err.Error())
|
log.Errorf("archiving job (dbid: %d) failed at check metadata step: %s", job.ID, err.Error())
|
||||||
r.UpdateMonitoringStatus(job.ID, schema.MonitoringStatusArchivingFailed)
|
jobRepo.UpdateMonitoringStatus(job.ID, schema.MonitoringStatusArchivingFailed)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,16 +49,16 @@ func archivingWorker() {
|
|||||||
jobMeta, err := ArchiveJob(job, context.Background())
|
jobMeta, err := ArchiveJob(job, context.Background())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("archiving job (dbid: %d) failed at archiving job step: %s", job.ID, err.Error())
|
log.Errorf("archiving job (dbid: %d) failed at archiving job step: %s", job.ID, err.Error())
|
||||||
r.UpdateMonitoringStatus(job.ID, schema.MonitoringStatusArchivingFailed)
|
jobRepo.UpdateMonitoringStatus(job.ID, schema.MonitoringStatusArchivingFailed)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := r.UpdateFootprint(jobMeta); err != nil {
|
if err := jobRepo.UpdateFootprint(jobMeta); err != nil {
|
||||||
log.Errorf("archiving job (dbid: %d) failed at update Footprint step: %s", job.ID, err.Error())
|
log.Errorf("archiving job (dbid: %d) failed at update Footprint step: %s", job.ID, err.Error())
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// Update the jobs database entry one last time:
|
// Update the jobs database entry one last time:
|
||||||
if err := r.MarkArchived(jobMeta, schema.MonitoringStatusArchivingSuccessful); err != nil {
|
if err := jobRepo.MarkArchived(jobMeta, schema.MonitoringStatusArchivingSuccessful); err != nil {
|
||||||
log.Errorf("archiving job (dbid: %d) failed at marking archived step: %s", job.ID, err.Error())
|
log.Errorf("archiving job (dbid: %d) failed at marking archived step: %s", job.ID, err.Error())
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -70,6 +71,10 @@ func archivingWorker() {
|
|||||||
|
|
||||||
// Trigger async archiving
|
// Trigger async archiving
|
||||||
func TriggerArchiving(job *schema.Job) {
|
func TriggerArchiving(job *schema.Job) {
|
||||||
|
if archiveChannel == nil {
|
||||||
|
log.Fatal("Cannot archive without archiving channel. Did you Start the archiver?")
|
||||||
|
}
|
||||||
|
|
||||||
archivePending.Add(1)
|
archivePending.Add(1)
|
||||||
archiveChannel <- job
|
archiveChannel <- job
|
||||||
}
|
}
|
||||||
|
@ -94,7 +94,7 @@
|
|||||||
},
|
},
|
||||||
"scope": "hwthread",
|
"scope": "hwthread",
|
||||||
"aggregation": "sum",
|
"aggregation": "sum",
|
||||||
"energy": true,
|
"energy": "power",
|
||||||
"timestep": 60,
|
"timestep": 60,
|
||||||
"peak": 500,
|
"peak": 500,
|
||||||
"normal": 250,
|
"normal": 250,
|
||||||
@ -136,7 +136,7 @@
|
|||||||
},
|
},
|
||||||
"scope": "accelerator",
|
"scope": "accelerator",
|
||||||
"aggregation": "sum",
|
"aggregation": "sum",
|
||||||
"energy": true,
|
"energy": "power",
|
||||||
"timestep": 60,
|
"timestep": 60,
|
||||||
"peak": 400,
|
"peak": 400,
|
||||||
"normal": 200,
|
"normal": 200,
|
||||||
@ -190,7 +190,7 @@
|
|||||||
},
|
},
|
||||||
"scope": "socket",
|
"scope": "socket",
|
||||||
"aggregation": "sum",
|
"aggregation": "sum",
|
||||||
"energy": true,
|
"energy": "power",
|
||||||
"timestep": 60,
|
"timestep": 60,
|
||||||
"peak": 500,
|
"peak": 500,
|
||||||
"normal": 250,
|
"normal": 250,
|
||||||
|
@ -256,7 +256,7 @@
|
|||||||
"normal": 250,
|
"normal": 250,
|
||||||
"caution": 100,
|
"caution": 100,
|
||||||
"alert": 50,
|
"alert": 50,
|
||||||
"energy": true
|
"energy": "power"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "mem_power",
|
"name": "mem_power",
|
||||||
@ -270,7 +270,7 @@
|
|||||||
"normal": 50,
|
"normal": 50,
|
||||||
"caution": 20,
|
"caution": 20,
|
||||||
"alert": 10,
|
"alert": 10,
|
||||||
"energy": true
|
"energy": "power"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "ipc",
|
"name": "ipc",
|
||||||
|
Loading…
Reference in New Issue
Block a user