mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2024-12-24 12:29:05 +01:00
Add LoadData Benchmark
This commit is contained in:
parent
6aea486891
commit
c6dceb1265
@ -7,9 +7,11 @@ package archive
|
|||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/ClusterCockpit/cc-backend/internal/util"
|
||||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -106,7 +108,7 @@ func TestLoadJobMeta(t *testing.T) {
|
|||||||
|
|
||||||
func TestLoadJobData(t *testing.T) {
|
func TestLoadJobData(t *testing.T) {
|
||||||
var fsa FsArchive
|
var fsa FsArchive
|
||||||
_, err := fsa.Init(json.RawMessage("{\"path\":\"testdata/archive\"}"))
|
_, err := fsa.Init(json.RawMessage("{\"path\": \"testdata/archive\"}"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -121,8 +123,8 @@ func TestLoadJobData(t *testing.T) {
|
|||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for name, scopes := range data {
|
for _, scopes := range data {
|
||||||
fmt.Printf("Metric name: %s\n", name)
|
// fmt.Printf("Metric name: %s\n", name)
|
||||||
|
|
||||||
if _, exists := scopes[schema.MetricScopeNode]; !exists {
|
if _, exists := scopes[schema.MetricScopeNode]; !exists {
|
||||||
t.Fail()
|
t.Fail()
|
||||||
@ -130,6 +132,48 @@ func TestLoadJobData(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BenchmarkLoadJobData(b *testing.B) {
|
||||||
|
|
||||||
|
tmpdir := b.TempDir()
|
||||||
|
jobarchive := filepath.Join(tmpdir, "job-archive")
|
||||||
|
util.CopyDir("./testdata/archive/", jobarchive)
|
||||||
|
archiveCfg := fmt.Sprintf("{\"path\": \"%s\"}", jobarchive)
|
||||||
|
|
||||||
|
var fsa FsArchive
|
||||||
|
fsa.Init(json.RawMessage(archiveCfg))
|
||||||
|
|
||||||
|
jobIn := schema.Job{BaseJob: schema.JobDefaults}
|
||||||
|
jobIn.StartTime = time.Unix(1608923076, 0)
|
||||||
|
jobIn.JobID = 1403244
|
||||||
|
jobIn.Cluster = "emmy"
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
fsa.LoadJobData(&jobIn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkLoadJobDataCompressed(b *testing.B) {
|
||||||
|
|
||||||
|
tmpdir := b.TempDir()
|
||||||
|
jobarchive := filepath.Join(tmpdir, "job-archive")
|
||||||
|
util.CopyDir("./testdata/archive/", jobarchive)
|
||||||
|
archiveCfg := fmt.Sprintf("{\"path\": \"%s\"}", jobarchive)
|
||||||
|
|
||||||
|
var fsa FsArchive
|
||||||
|
fsa.Init(json.RawMessage(archiveCfg))
|
||||||
|
|
||||||
|
jobIn := schema.Job{BaseJob: schema.JobDefaults}
|
||||||
|
jobIn.StartTime = time.Unix(1608923076, 0)
|
||||||
|
jobIn.JobID = 1403244
|
||||||
|
jobIn.Cluster = "emmy"
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
fsa.LoadJobData(&jobIn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestLoadCluster(t *testing.T) {
|
func TestLoadCluster(t *testing.T) {
|
||||||
var fsa FsArchive
|
var fsa FsArchive
|
||||||
_, err := fsa.Init(json.RawMessage("{\"path\":\"testdata/archive\"}"))
|
_, err := fsa.Init(json.RawMessage("{\"path\":\"testdata/archive\"}"))
|
||||||
|
Loading…
Reference in New Issue
Block a user