More URL filter presets; Some tweaks

This commit is contained in:
Lou Knauer 2022-03-21 13:30:19 +01:00
parent 8ebf00d980
commit 92349708ae
3 changed files with 42 additions and 9 deletions

View File

@ -162,9 +162,9 @@ func ArchiveJob(job *schema.Job, ctx context.Context) (*schema.JobMeta, error) {
allMetrics = append(allMetrics, mc.Name)
}
// TODO: For now: Only single-node-jobs get archived in full resolution
// TODO: Talk about this! What resolutions to store data at...
scopes := []schema.MetricScope{schema.MetricScopeNode}
if job.NumNodes == 1 {
if job.NumNodes <= 8 {
scopes = append(scopes, schema.MetricScopeCore)
}

View File

@ -63,7 +63,7 @@ var cache *lrucache.Cache = lrucache.New(512 * 1024 * 1024)
// Fetches the metric data for a job.
func LoadData(job *schema.Job, metrics []string, scopes []schema.MetricScope, ctx context.Context) (schema.JobData, error) {
data := cache.Get(cacheKey(job, metrics, scopes), func() (interface{}, time.Duration, int) {
data := cache.Get(cacheKey(job, metrics, scopes), func() (_ interface{}, ttl time.Duration, size int) {
var jd schema.JobData
var err error
if job.State == schema.JobStateRunning ||
@ -93,30 +93,43 @@ func LoadData(job *schema.Job, metrics []string, scopes []schema.MetricScope, ct
return err, 0, 0
}
}
size = jd.Size()
} else {
jd, err = loadFromArchive(job)
if err != nil {
return err, 0, 0
}
// Avoid sending unrequested data to the client:
if metrics != nil {
res := schema.JobData{}
for _, metric := range metrics {
if metricdata, ok := jd[metric]; ok {
res[metric] = metricdata
if perscope, ok := jd[metric]; ok {
if len(scopes) > 1 {
subset := make(map[schema.MetricScope]*schema.JobMetric)
for _, scope := range scopes {
if jm, ok := perscope[scope]; ok {
subset[scope] = jm
}
}
perscope = subset
}
res[metric] = perscope
}
}
jd = res
}
size = 1 // loadFromArchive() caches in the same cache.
}
ttl := 5 * time.Hour
ttl = 5 * time.Hour
if job.State == schema.JobStateRunning {
ttl = 2 * time.Minute
}
prepareJobData(job, jd, scopes)
return jd, ttl, jd.Size()
return jd, ttl, size
})
if err, ok := data.(error); ok {

View File

@ -174,8 +174,8 @@ func buildFilterPresets(query url.Values) map[string]interface{} {
filterPresets["user"] = query.Get("user")
filterPresets["userMatch"] = "eq"
}
if query.Get("state") != "" && schema.JobState(query.Get("state")).Valid() {
filterPresets["state"] = query.Get("state")
if len(query["state"]) != 0 {
filterPresets["state"] = query["state"]
}
if rawtags, ok := query["tag"]; ok {
tags := make([]int, len(rawtags))
@ -188,6 +188,16 @@ func buildFilterPresets(query url.Values) map[string]interface{} {
}
filterPresets["tags"] = tags
}
if query.Get("duration") != "" {
parts := strings.Split(query.Get("duration"), "-")
if len(parts) == 2 {
a, e1 := strconv.Atoi(parts[0])
b, e2 := strconv.Atoi(parts[1])
if e1 == nil && e2 == nil {
filterPresets["duration"] = map[string]int{"from": a, "to": b}
}
}
}
if query.Get("numNodes") != "" {
parts := strings.Split(query.Get("numNodes"), "-")
if len(parts) == 2 {
@ -198,6 +208,16 @@ func buildFilterPresets(query url.Values) map[string]interface{} {
}
}
}
if query.Get("numAccelerators") != "" {
parts := strings.Split(query.Get("numAccelerators"), "-")
if len(parts) == 2 {
a, e1 := strconv.Atoi(parts[0])
b, e2 := strconv.Atoi(parts[1])
if e1 == nil && e2 == nil {
filterPresets["numAccelerators"] = map[string]int{"from": a, "to": b}
}
}
}
if query.Get("jobId") != "" {
filterPresets["jobId"] = query.Get("jobId")
}