Change listQuery to jobId array, adapt filter pipe

This commit is contained in:
Christoph Kluge 2023-06-30 16:55:34 +02:00
parent 4729905322
commit 2f471dc192
4 changed files with 27 additions and 20 deletions

View File

@ -10,7 +10,6 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"math"
"strconv" "strconv"
"sync" "sync"
"time" "time"
@ -18,7 +17,6 @@ import (
"github.com/ClusterCockpit/cc-backend/internal/auth" "github.com/ClusterCockpit/cc-backend/internal/auth"
"github.com/ClusterCockpit/cc-backend/internal/graph/model" "github.com/ClusterCockpit/cc-backend/internal/graph/model"
"github.com/ClusterCockpit/cc-backend/internal/metricdata" "github.com/ClusterCockpit/cc-backend/internal/metricdata"
"github.com/ClusterCockpit/cc-backend/internal/util"
"github.com/ClusterCockpit/cc-backend/pkg/log" "github.com/ClusterCockpit/cc-backend/pkg/log"
"github.com/ClusterCockpit/cc-backend/pkg/lrucache" "github.com/ClusterCockpit/cc-backend/pkg/lrucache"
"github.com/ClusterCockpit/cc-backend/pkg/schema" "github.com/ClusterCockpit/cc-backend/pkg/schema"
@ -318,7 +316,7 @@ func (r *JobRepository) FindConcurrentJobs(
stopTime = startTime + int64(job.Duration) stopTime = startTime + int64(job.Duration)
} }
// Add 5m overlap for jobs start time at the end // Add 200s overlap for jobs start time at the end
startTimeTail := startTime + 10 startTimeTail := startTime + 10
stopTimeTail := stopTime - 200 stopTimeTail := stopTime - 200
startTimeFront := startTime + 200 startTimeFront := startTime + 200
@ -338,8 +336,7 @@ func (r *JobRepository) FindConcurrentJobs(
} }
items := make([]*model.JobLink, 0, 10) items := make([]*model.JobLink, 0, 10)
minStart := int64(math.MaxInt64) queryString := fmt.Sprintf("cluster=%s", job.Cluster)
maxStart := int64(0)
for rows.Next() { for rows.Next() {
var id, jobId, startTime sql.NullInt64 var id, jobId, startTime sql.NullInt64
@ -350,9 +347,7 @@ func (r *JobRepository) FindConcurrentJobs(
} }
if id.Valid { if id.Valid {
minStart = util.Min(minStart, startTime.Int64) queryString += fmt.Sprintf("&jobId=%d", int(jobId.Int64))
maxStart = util.Max(maxStart, startTime.Int64)
items = append(items, items = append(items,
&model.JobLink{ &model.JobLink{
ID: fmt.Sprint(id.Int64), ID: fmt.Sprint(id.Int64),
@ -376,9 +371,7 @@ func (r *JobRepository) FindConcurrentJobs(
} }
if id.Valid { if id.Valid {
minStart = util.Min(minStart, startTime.Int64) queryString += fmt.Sprintf("&jobId=%d", int(jobId.Int64))
maxStart = util.Max(maxStart, startTime.Int64)
items = append(items, items = append(items,
&model.JobLink{ &model.JobLink{
ID: fmt.Sprint(id.Int64), ID: fmt.Sprint(id.Int64),
@ -388,8 +381,6 @@ func (r *JobRepository) FindConcurrentJobs(
} }
cnt := len(items) cnt := len(items)
queryString := fmt.Sprintf("cluster=%s&startTime=%d-%d&node=%s",
job.Cluster, minStart, maxStart, hostname)
return &model.JobLinkResultList{ return &model.JobLinkResultList{
ListQuery: &queryString, ListQuery: &queryString,

View File

@ -263,11 +263,11 @@ func buildStringCondition(field string, cond *model.StringInput, query sq.Select
return query.Where(field+" LIKE ?", fmt.Sprint("%", *cond.Contains, "%")) return query.Where(field+" LIKE ?", fmt.Sprint("%", *cond.Contains, "%"))
} }
if cond.In != nil { if cond.In != nil {
queryUsers := make([]string, len(cond.In)) queryElements := make([]string, len(cond.In))
for i, val := range cond.In { for i, val := range cond.In {
queryUsers[i] = val queryElements[i] = val
} }
return query.Where(sq.Or{sq.Eq{"job.user": queryUsers}}) return query.Where(sq.Or{sq.Eq{field: queryElements}})
} }
return query return query
} }

View File

@ -208,7 +208,13 @@ func buildFilterPresets(query url.Values) map[string]interface{} {
} }
} }
if query.Get("jobId") != "" { if query.Get("jobId") != "" {
if len(query["jobId"]) == 1 {
filterPresets["jobId"] = query.Get("jobId") filterPresets["jobId"] = query.Get("jobId")
filterPresets["jobIdMatch"] = "eq"
} else {
filterPresets["jobId"] = query["jobId"]
filterPresets["jobIdMatch"] = "in"
}
} }
if query.Get("arrayJobId") != "" { if query.Get("arrayJobId") != "" {
if num, err := strconv.Atoi(query.Get("arrayJobId")); err == nil { if num, err := strconv.Atoi(query.Get("arrayJobId")); err == nil {

View File

@ -34,6 +34,7 @@
let filters = { let filters = {
projectMatch: filterPresets.projectMatch || 'contains', projectMatch: filterPresets.projectMatch || 'contains',
userMatch: filterPresets.userMatch || 'contains', userMatch: filterPresets.userMatch || 'contains',
jobIdMatch: filterPresets.jobIdMatch || 'eq',
cluster: filterPresets.cluster || null, cluster: filterPresets.cluster || null,
partition: filterPresets.partition || null, partition: filterPresets.partition || null,
@ -88,7 +89,7 @@
if (filters.duration.from || filters.duration.to) if (filters.duration.from || filters.duration.to)
items.push({ duration: { from: filters.duration.from, to: filters.duration.to } }) items.push({ duration: { from: filters.duration.from, to: filters.duration.to } })
if (filters.jobId) if (filters.jobId)
items.push({ jobId: { eq: filters.jobId } }) items.push({ jobId: { [filters.jobIdMatch]: filters.jobId } })
if (filters.arrayJobId != null) if (filters.arrayJobId != null)
items.push({ arrayJobId: filters.arrayJobId }) items.push({ arrayJobId: filters.arrayJobId })
if (filters.numNodes.from != null || filters.numNodes.to != null) if (filters.numNodes.from != null || filters.numNodes.to != null)
@ -130,6 +131,15 @@
// } else { // } else {
opts.push(`startTime=${dateToUnixEpoch(filters.startTime.from)}-${dateToUnixEpoch(filters.startTime.to)}`) opts.push(`startTime=${dateToUnixEpoch(filters.startTime.from)}-${dateToUnixEpoch(filters.startTime.to)}`)
// } // }
if (filters.jobId.length != 0)
if (filters.jobIdMatch != 'in') {
opts.push(`jobId=${filters.jobId}`)
} else {
for (let singleJobId of filters.jobId)
opts.push(`jobId=${singleJobId}`)
}
if (filters.jobIdMatch != 'eq')
opts.push(`jobIdMatch=${filters.jobIdMatch}`)
for (let tag of filters.tags) for (let tag of filters.tags)
opts.push(`tag=${tag}`) opts.push(`tag=${tag}`)
if (filters.duration.from && filters.duration.to) if (filters.duration.from && filters.duration.to)