mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2025-02-04 15:25:17 +01:00
Merge branch 'dev' into add_detailed_nodelist
This commit is contained in:
commit
571652c314
@ -245,7 +245,7 @@ type Query {
|
||||
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
|
||||
|
||||
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
|
||||
jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]!
|
||||
jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate, numDurationBins: String, numMetricBins: Int): [JobsStatistics!]!
|
||||
|
||||
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
|
||||
|
||||
|
2
go.mod
2
go.mod
@ -1,6 +1,6 @@
|
||||
module github.com/ClusterCockpit/cc-backend
|
||||
|
||||
go 1.23
|
||||
go 1.23.0
|
||||
|
||||
require (
|
||||
github.com/99designs/gqlgen v0.17.57
|
||||
|
@ -48,8 +48,7 @@ type ResolverRoot interface {
|
||||
SubCluster() SubClusterResolver
|
||||
}
|
||||
|
||||
type DirectiveRoot struct {
|
||||
}
|
||||
type DirectiveRoot struct{}
|
||||
|
||||
type ComplexityRoot struct {
|
||||
Accelerator struct {
|
||||
@ -266,7 +265,7 @@ type ComplexityRoot struct {
|
||||
JobMetrics func(childComplexity int, id string, metrics []string, scopes []schema.MetricScope, resolution *int) int
|
||||
Jobs func(childComplexity int, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) int
|
||||
JobsFootprints func(childComplexity int, filter []*model.JobFilter, metrics []string) int
|
||||
JobsStatistics func(childComplexity int, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) int
|
||||
JobsStatistics func(childComplexity int, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate, numDurationBins *string, numMetricBins *int) int
|
||||
NodeMetrics func(childComplexity int, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) int
|
||||
NodeMetricsList func(childComplexity int, cluster string, subCluster string, nodeFilter string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time, page *model.PageRequest, resolution *int) int
|
||||
RooflineHeatmap func(childComplexity int, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) int
|
||||
@ -392,7 +391,7 @@ type QueryResolver interface {
|
||||
JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope, resolution *int) ([]*model.JobMetricWithName, error)
|
||||
JobsFootprints(ctx context.Context, filter []*model.JobFilter, metrics []string) (*model.Footprints, error)
|
||||
Jobs(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error)
|
||||
JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error)
|
||||
JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate, numDurationBins *string, numMetricBins *int) ([]*model.JobsStatistics, error)
|
||||
RooflineHeatmap(ctx context.Context, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) ([][]float64, error)
|
||||
NodeMetrics(ctx context.Context, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) ([]*model.NodeMetrics, error)
|
||||
NodeMetricsList(ctx context.Context, cluster string, subCluster string, nodeFilter string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time, page *model.PageRequest, resolution *int) (*model.NodesResultList, error)
|
||||
@ -1425,7 +1424,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
||||
return 0, false
|
||||
}
|
||||
|
||||
return e.complexity.Query.JobsStatistics(childComplexity, args["filter"].([]*model.JobFilter), args["metrics"].([]string), args["page"].(*model.PageRequest), args["sortBy"].(*model.SortByAggregate), args["groupBy"].(*model.Aggregate)), true
|
||||
return e.complexity.Query.JobsStatistics(childComplexity, args["filter"].([]*model.JobFilter), args["metrics"].([]string), args["page"].(*model.PageRequest), args["sortBy"].(*model.SortByAggregate), args["groupBy"].(*model.Aggregate), args["numDurationBins"].(*string), args["numMetricBins"].(*int)), true
|
||||
|
||||
case "Query.nodeMetrics":
|
||||
if e.complexity.Query.NodeMetrics == nil {
|
||||
@ -2206,7 +2205,7 @@ type Query {
|
||||
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
|
||||
|
||||
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
|
||||
jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]!
|
||||
jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate, numDurationBins: String, numMetricBins: Int): [JobsStatistics!]!
|
||||
|
||||
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
|
||||
|
||||
@ -2359,6 +2358,7 @@ func (ec *executionContext) field_Mutation_addTagsToJob_args(ctx context.Context
|
||||
args["tagIds"] = arg1
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Mutation_addTagsToJob_argsJob(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2423,6 +2423,7 @@ func (ec *executionContext) field_Mutation_createTag_args(ctx context.Context, r
|
||||
args["scope"] = arg2
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Mutation_createTag_argsType(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2499,6 +2500,7 @@ func (ec *executionContext) field_Mutation_deleteTag_args(ctx context.Context, r
|
||||
args["id"] = arg0
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Mutation_deleteTag_argsID(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2536,6 +2538,7 @@ func (ec *executionContext) field_Mutation_removeTagsFromJob_args(ctx context.Co
|
||||
args["tagIds"] = arg1
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Mutation_removeTagsFromJob_argsJob(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2595,6 +2598,7 @@ func (ec *executionContext) field_Mutation_updateConfiguration_args(ctx context.
|
||||
args["value"] = arg1
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Mutation_updateConfiguration_argsName(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2649,6 +2653,7 @@ func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs
|
||||
args["name"] = arg0
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query___type_argsName(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2681,6 +2686,7 @@ func (ec *executionContext) field_Query_allocatedNodes_args(ctx context.Context,
|
||||
args["cluster"] = arg0
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_allocatedNodes_argsCluster(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2728,6 +2734,7 @@ func (ec *executionContext) field_Query_jobMetrics_args(ctx context.Context, raw
|
||||
args["resolution"] = arg3
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_jobMetrics_argsID(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2826,6 +2833,7 @@ func (ec *executionContext) field_Query_job_args(ctx context.Context, rawArgs ma
|
||||
args["id"] = arg0
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_job_argsID(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2863,6 +2871,7 @@ func (ec *executionContext) field_Query_jobsFootprints_args(ctx context.Context,
|
||||
args["metrics"] = arg1
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_jobsFootprints_argsFilter(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -2935,8 +2944,19 @@ func (ec *executionContext) field_Query_jobsStatistics_args(ctx context.Context,
|
||||
return nil, err
|
||||
}
|
||||
args["groupBy"] = arg4
|
||||
arg5, err := ec.field_Query_jobsStatistics_argsNumDurationBins(ctx, rawArgs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args["numDurationBins"] = arg5
|
||||
arg6, err := ec.field_Query_jobsStatistics_argsNumMetricBins(ctx, rawArgs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args["numMetricBins"] = arg6
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_jobsStatistics_argsFilter(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -3047,6 +3067,50 @@ func (ec *executionContext) field_Query_jobsStatistics_argsGroupBy(
|
||||
return zeroVal, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_jobsStatistics_argsNumDurationBins(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
) (*string, error) {
|
||||
// We won't call the directive if the argument is null.
|
||||
// Set call_argument_directives_with_null to true to call directives
|
||||
// even if the argument is null.
|
||||
_, ok := rawArgs["numDurationBins"]
|
||||
if !ok {
|
||||
var zeroVal *string
|
||||
return zeroVal, nil
|
||||
}
|
||||
|
||||
ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("numDurationBins"))
|
||||
if tmp, ok := rawArgs["numDurationBins"]; ok {
|
||||
return ec.unmarshalOString2ᚖstring(ctx, tmp)
|
||||
}
|
||||
|
||||
var zeroVal *string
|
||||
return zeroVal, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_jobsStatistics_argsNumMetricBins(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
) (*int, error) {
|
||||
// We won't call the directive if the argument is null.
|
||||
// Set call_argument_directives_with_null to true to call directives
|
||||
// even if the argument is null.
|
||||
_, ok := rawArgs["numMetricBins"]
|
||||
if !ok {
|
||||
var zeroVal *int
|
||||
return zeroVal, nil
|
||||
}
|
||||
|
||||
ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("numMetricBins"))
|
||||
if tmp, ok := rawArgs["numMetricBins"]; ok {
|
||||
return ec.unmarshalOInt2ᚖint(ctx, tmp)
|
||||
}
|
||||
|
||||
var zeroVal *int
|
||||
return zeroVal, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_jobs_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
|
||||
var err error
|
||||
args := map[string]interface{}{}
|
||||
@ -3067,6 +3131,7 @@ func (ec *executionContext) field_Query_jobs_args(ctx context.Context, rawArgs m
|
||||
args["order"] = arg2
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_jobs_argsFilter(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -3183,6 +3248,7 @@ func (ec *executionContext) field_Query_nodeMetricsList_args(ctx context.Context
|
||||
args["resolution"] = arg8
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_nodeMetricsList_argsCluster(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -3416,6 +3482,7 @@ func (ec *executionContext) field_Query_nodeMetrics_args(ctx context.Context, ra
|
||||
args["to"] = arg5
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_nodeMetrics_argsCluster(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -3588,6 +3655,7 @@ func (ec *executionContext) field_Query_rooflineHeatmap_args(ctx context.Context
|
||||
args["maxY"] = arg6
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_rooflineHeatmap_argsFilter(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -3752,6 +3820,7 @@ func (ec *executionContext) field_Query_user_args(ctx context.Context, rawArgs m
|
||||
args["username"] = arg0
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field_Query_user_argsUsername(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -3784,6 +3853,7 @@ func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, ra
|
||||
args["includeDeprecated"] = arg0
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field___Type_enumValues_argsIncludeDeprecated(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -3816,6 +3886,7 @@ func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArg
|
||||
args["includeDeprecated"] = arg0
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) field___Type_fields_argsIncludeDeprecated(
|
||||
ctx context.Context,
|
||||
rawArgs map[string]interface{},
|
||||
@ -10355,7 +10426,7 @@ func (ec *executionContext) _Query_jobsStatistics(ctx context.Context, field gra
|
||||
}()
|
||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||
ctx = rctx // use context from middleware stack in children
|
||||
return ec.resolvers.Query().JobsStatistics(rctx, fc.Args["filter"].([]*model.JobFilter), fc.Args["metrics"].([]string), fc.Args["page"].(*model.PageRequest), fc.Args["sortBy"].(*model.SortByAggregate), fc.Args["groupBy"].(*model.Aggregate))
|
||||
return ec.resolvers.Query().JobsStatistics(rctx, fc.Args["filter"].([]*model.JobFilter), fc.Args["metrics"].([]string), fc.Args["page"].(*model.PageRequest), fc.Args["sortBy"].(*model.SortByAggregate), fc.Args["groupBy"].(*model.Aggregate), fc.Args["numDurationBins"].(*string), fc.Args["numMetricBins"].(*int))
|
||||
})
|
||||
if err != nil {
|
||||
ec.Error(ctx, err)
|
||||
@ -19995,7 +20066,7 @@ func (ec *executionContext) unmarshalOAggregate2ᚖgithubᚗcomᚋClusterCockpit
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
var res = new(model.Aggregate)
|
||||
res := new(model.Aggregate)
|
||||
err := res.UnmarshalGQL(v)
|
||||
return res, graphql.ErrorOnPath(ctx, err)
|
||||
}
|
||||
@ -20594,7 +20665,7 @@ func (ec *executionContext) unmarshalOSortByAggregate2ᚖgithubᚗcomᚋClusterC
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
var res = new(model.SortByAggregate)
|
||||
res := new(model.SortByAggregate)
|
||||
err := res.UnmarshalGQL(v)
|
||||
return res, graphql.ErrorOnPath(ctx, err)
|
||||
}
|
||||
|
@ -354,10 +354,14 @@ func (r *queryResolver) Jobs(ctx context.Context, filter []*model.JobFilter, pag
|
||||
}
|
||||
|
||||
// JobsStatistics is the resolver for the jobsStatistics field.
|
||||
func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error) {
|
||||
func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate, numDurationBins *string, numMetricBins *int) ([]*model.JobsStatistics, error) {
|
||||
var err error
|
||||
var stats []*model.JobsStatistics
|
||||
|
||||
// Top Level Defaults
|
||||
var defaultDurationBins string = "1h"
|
||||
var defaultMetricBins int = 10
|
||||
|
||||
if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") ||
|
||||
requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") {
|
||||
if groupBy == nil {
|
||||
@ -391,8 +395,13 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
||||
}
|
||||
|
||||
if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") || requireField(ctx, "histNumCores") || requireField(ctx, "histNumAccs") {
|
||||
|
||||
if numDurationBins == nil {
|
||||
numDurationBins = &defaultDurationBins
|
||||
}
|
||||
|
||||
if groupBy == nil {
|
||||
stats[0], err = r.Repo.AddHistograms(ctx, filter, stats[0])
|
||||
stats[0], err = r.Repo.AddHistograms(ctx, filter, stats[0], numDurationBins)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -402,8 +411,13 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
||||
}
|
||||
|
||||
if requireField(ctx, "histMetrics") {
|
||||
|
||||
if numMetricBins == nil {
|
||||
numMetricBins = &defaultMetricBins
|
||||
}
|
||||
|
||||
if groupBy == nil {
|
||||
stats[0], err = r.Repo.AddMetricHistograms(ctx, filter, metrics, stats[0])
|
||||
stats[0], err = r.Repo.AddMetricHistograms(ctx, filter, metrics, stats[0], numMetricBins)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||
@ -447,15 +446,40 @@ func (r *JobRepository) AddHistograms(
|
||||
ctx context.Context,
|
||||
filter []*model.JobFilter,
|
||||
stat *model.JobsStatistics,
|
||||
durationBins *string,
|
||||
) (*model.JobsStatistics, error) {
|
||||
start := time.Now()
|
||||
|
||||
var targetBinCount int
|
||||
var targetBinSize int
|
||||
switch {
|
||||
case *durationBins == "1m": // 1 Minute Bins + Max 60 Bins -> Max 60 Minutes
|
||||
targetBinCount = 60
|
||||
targetBinSize = 60
|
||||
case *durationBins == "10m": // 10 Minute Bins + Max 72 Bins -> Max 12 Hours
|
||||
targetBinCount = 72
|
||||
targetBinSize = 600
|
||||
case *durationBins == "1h": // 1 Hour Bins + Max 48 Bins -> Max 48 Hours
|
||||
targetBinCount = 48
|
||||
targetBinSize = 3600
|
||||
case *durationBins == "6h": // 6 Hour Bins + Max 12 Bins -> Max 3 Days
|
||||
targetBinCount = 12
|
||||
targetBinSize = 21600
|
||||
case *durationBins == "12h": // 12 hour Bins + Max 14 Bins -> Max 7 Days
|
||||
targetBinCount = 14
|
||||
targetBinSize = 43200
|
||||
default: // 24h
|
||||
targetBinCount = 24
|
||||
targetBinSize = 3600
|
||||
}
|
||||
|
||||
castType := r.getCastType()
|
||||
var err error
|
||||
value := fmt.Sprintf(`CAST(ROUND((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) / 3600) as %s) as value`, time.Now().Unix(), castType)
|
||||
stat.HistDuration, err = r.jobsStatisticsHistogram(ctx, value, filter)
|
||||
// Return X-Values always as seconds, will be formatted into minutes and hours in frontend
|
||||
value := fmt.Sprintf(`CAST(ROUND(((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) / %d) + 1) as %s) as value`, time.Now().Unix(), targetBinSize, castType)
|
||||
stat.HistDuration, err = r.jobsDurationStatisticsHistogram(ctx, value, filter, targetBinSize, &targetBinCount)
|
||||
if err != nil {
|
||||
log.Warn("Error while loading job statistics histogram: running jobs")
|
||||
log.Warn("Error while loading job statistics histogram: job duration")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -487,6 +511,7 @@ func (r *JobRepository) AddMetricHistograms(
|
||||
filter []*model.JobFilter,
|
||||
metrics []string,
|
||||
stat *model.JobsStatistics,
|
||||
targetBinCount *int,
|
||||
) (*model.JobsStatistics, error) {
|
||||
start := time.Now()
|
||||
|
||||
@ -494,7 +519,7 @@ func (r *JobRepository) AddMetricHistograms(
|
||||
for _, f := range filter {
|
||||
if f.State != nil {
|
||||
if len(f.State) == 1 && f.State[0] == "running" {
|
||||
stat.HistMetrics = r.runningJobsMetricStatisticsHistogram(ctx, metrics, filter)
|
||||
stat.HistMetrics = r.runningJobsMetricStatisticsHistogram(ctx, metrics, filter, targetBinCount)
|
||||
log.Debugf("Timer AddMetricHistograms %s", time.Since(start))
|
||||
return stat, nil
|
||||
}
|
||||
@ -503,7 +528,7 @@ func (r *JobRepository) AddMetricHistograms(
|
||||
|
||||
// All other cases: Query and make bins in sqlite directly
|
||||
for _, m := range metrics {
|
||||
metricHisto, err := r.jobsMetricStatisticsHistogram(ctx, m, filter)
|
||||
metricHisto, err := r.jobsMetricStatisticsHistogram(ctx, m, filter, targetBinCount)
|
||||
if err != nil {
|
||||
log.Warnf("Error while loading job metric statistics histogram: %s", m)
|
||||
continue
|
||||
@ -540,6 +565,7 @@ func (r *JobRepository) jobsStatisticsHistogram(
|
||||
}
|
||||
|
||||
points := make([]*model.HistoPoint, 0)
|
||||
// is it possible to introduce zero values here? requires info about bincount
|
||||
for rows.Next() {
|
||||
point := model.HistoPoint{}
|
||||
if err := rows.Scan(&point.Value, &point.Count); err != nil {
|
||||
@ -553,10 +579,66 @@ func (r *JobRepository) jobsStatisticsHistogram(
|
||||
return points, nil
|
||||
}
|
||||
|
||||
func (r *JobRepository) jobsDurationStatisticsHistogram(
|
||||
ctx context.Context,
|
||||
value string,
|
||||
filters []*model.JobFilter,
|
||||
binSizeSeconds int,
|
||||
targetBinCount *int,
|
||||
) ([]*model.HistoPoint, error) {
|
||||
start := time.Now()
|
||||
query, qerr := SecurityCheck(ctx,
|
||||
sq.Select(value, "COUNT(job.id) AS count").From("job"))
|
||||
|
||||
if qerr != nil {
|
||||
return nil, qerr
|
||||
}
|
||||
|
||||
// Setup Array
|
||||
points := make([]*model.HistoPoint, 0)
|
||||
for i := 1; i <= *targetBinCount; i++ {
|
||||
point := model.HistoPoint{Value: i * binSizeSeconds, Count: 0}
|
||||
points = append(points, &point)
|
||||
}
|
||||
|
||||
for _, f := range filters {
|
||||
query = BuildWhereClause(f, query)
|
||||
}
|
||||
|
||||
rows, err := query.GroupBy("value").RunWith(r.DB).Query()
|
||||
if err != nil {
|
||||
log.Error("Error while running query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Fill Array at matching $Value
|
||||
for rows.Next() {
|
||||
point := model.HistoPoint{}
|
||||
if err := rows.Scan(&point.Value, &point.Count); err != nil {
|
||||
log.Warn("Error while scanning rows")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, e := range points {
|
||||
if e.Value == (point.Value * binSizeSeconds) {
|
||||
// Note:
|
||||
// Matching on unmodified integer value (and multiplying point.Value by binSizeSeconds after match)
|
||||
// causes frontend to loop into highest targetBinCount, due to zoom condition instantly being fullfilled (cause unknown)
|
||||
e.Count = point.Count
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
|
||||
return points, nil
|
||||
}
|
||||
|
||||
func (r *JobRepository) jobsMetricStatisticsHistogram(
|
||||
ctx context.Context,
|
||||
metric string,
|
||||
filters []*model.JobFilter,
|
||||
bins *int,
|
||||
) (*model.MetricHistoPoints, error) {
|
||||
// Get specific Peak or largest Peak
|
||||
var metricConfig *schema.MetricConfig
|
||||
@ -624,16 +706,15 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
|
||||
return nil, sqlerr
|
||||
}
|
||||
|
||||
bins := 10
|
||||
binQuery := fmt.Sprintf(`CAST( (case when %s = value.max
|
||||
then value.max*0.999999999 else %s end - value.min) / (value.max -
|
||||
value.min) * %d as INTEGER )`, jm, jm, bins)
|
||||
value.min) * %v as INTEGER )`, jm, jm, *bins)
|
||||
|
||||
mainQuery := sq.Select(
|
||||
fmt.Sprintf(`%s + 1 as bin`, binQuery),
|
||||
fmt.Sprintf(`count(%s) as count`, jm),
|
||||
fmt.Sprintf(`CAST(((value.max / %d) * (%s )) as INTEGER ) as min`, bins, binQuery),
|
||||
fmt.Sprintf(`CAST(((value.max / %d) * (%s + 1 )) as INTEGER ) as max`, bins, binQuery),
|
||||
fmt.Sprintf(`CAST(((value.max / %d) * (%v )) as INTEGER ) as min`, *bins, binQuery),
|
||||
fmt.Sprintf(`CAST(((value.max / %d) * (%v + 1 )) as INTEGER ) as max`, *bins, binQuery),
|
||||
).From("job").CrossJoin(
|
||||
fmt.Sprintf(`(%s) as value`, crossJoinQuerySql), crossJoinQueryArgs...,
|
||||
).Where(fmt.Sprintf(`%s is not null and %s <= %f`, jm, jm, peak))
|
||||
@ -657,7 +738,15 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Setup Array
|
||||
points := make([]*model.MetricHistoPoint, 0)
|
||||
for i := 1; i <= *bins; i++ {
|
||||
binMax := ((int(peak) / *bins) * i)
|
||||
binMin := ((int(peak) / *bins) * (i - 1))
|
||||
point := model.MetricHistoPoint{Bin: &i, Count: 0, Min: &binMin, Max: &binMax}
|
||||
points = append(points, &point)
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
point := model.MetricHistoPoint{}
|
||||
if err := rows.Scan(&point.Bin, &point.Count, &point.Min, &point.Max); err != nil {
|
||||
@ -665,7 +754,20 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
|
||||
return nil, err // Totally bricks cc-backend if returned and if all metrics requested?
|
||||
}
|
||||
|
||||
points = append(points, &point)
|
||||
for _, e := range points {
|
||||
if e.Bin != nil && point.Bin != nil {
|
||||
if *e.Bin == *point.Bin {
|
||||
e.Count = point.Count
|
||||
if point.Min != nil {
|
||||
e.Min = point.Min
|
||||
}
|
||||
if point.Max != nil {
|
||||
e.Max = point.Max
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points}
|
||||
@ -678,7 +780,9 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
|
||||
ctx context.Context,
|
||||
metrics []string,
|
||||
filters []*model.JobFilter,
|
||||
bins *int,
|
||||
) []*model.MetricHistoPoints {
|
||||
|
||||
// Get Jobs
|
||||
jobs, err := r.QueryJobs(ctx, filters, &model.PageRequest{Page: 1, ItemsPerPage: 500 + 1}, nil)
|
||||
if err != nil {
|
||||
@ -720,7 +824,6 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
|
||||
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
||||
peak = metricConfig.Peak
|
||||
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
|
||||
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
|
||||
}
|
||||
}
|
||||
|
||||
@ -740,28 +843,24 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
|
||||
}
|
||||
|
||||
// Make and fill bins
|
||||
bins := 10.0
|
||||
peakBin := peak / bins
|
||||
peakBin := int(peak) / *bins
|
||||
|
||||
points := make([]*model.MetricHistoPoint, 0)
|
||||
for b := 0; b < 10; b++ {
|
||||
for b := 0; b < *bins; b++ {
|
||||
count := 0
|
||||
bindex := b + 1
|
||||
bmin := math.Round(peakBin * float64(b))
|
||||
bmax := math.Round(peakBin * (float64(b) + 1.0))
|
||||
bmin := peakBin * b
|
||||
bmax := peakBin * (b + 1)
|
||||
|
||||
// Iterate AVG values for indexed metric and count for bins
|
||||
for _, val := range avgs[idx] {
|
||||
if float64(val) >= bmin && float64(val) < bmax {
|
||||
if int(val) >= bmin && int(val) < bmax {
|
||||
count += 1
|
||||
}
|
||||
}
|
||||
|
||||
bminint := int(bmin)
|
||||
bmaxint := int(bmax)
|
||||
|
||||
// Append Bin to Metric Result Array
|
||||
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bminint, Max: &bmaxint}
|
||||
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bmin, Max: &bmax}
|
||||
points = append(points, &point)
|
||||
}
|
||||
|
||||
|
4
web/frontend/package-lock.json
generated
4
web/frontend/package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "cc-frontend",
|
||||
"version": "1.0.2",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "cc-frontend",
|
||||
"version": "1.0.2",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@rollup/plugin-replace": "^5.0.7",
|
||||
|
@ -174,6 +174,7 @@
|
||||
},
|
||||
});
|
||||
|
||||
// Note: Different footprints than those saved in DB per Job -> Caused by Legacy Naming
|
||||
$: footprintsQuery = queryStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
@ -470,10 +471,12 @@
|
||||
height={300}
|
||||
data={convert2uplot($statsQuery.data.stats[0].histDuration)}
|
||||
title="Duration Distribution"
|
||||
xlabel="Current Runtimes"
|
||||
xunit="Hours"
|
||||
xlabel="Current Job Runtimes"
|
||||
xunit="Runtime"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
usesBins
|
||||
xtime
|
||||
/>
|
||||
{/key}
|
||||
</Col>
|
||||
|
@ -463,7 +463,7 @@
|
||||
|
||||
<hr />
|
||||
|
||||
<!-- Usage Stats as Histograms -->
|
||||
<!-- User and Project Stats as Pie-Charts -->
|
||||
|
||||
<Row cols={{ lg: 4, md: 2, sm: 1 }}>
|
||||
<Col class="p-2">
|
||||
@ -587,17 +587,23 @@
|
||||
{/key}
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<hr class="my-2" />
|
||||
|
||||
<!-- Static Stats as Histograms : Running Duration && Allocated Hardware Counts-->
|
||||
|
||||
<Row cols={{ lg: 2, md: 1 }}>
|
||||
<Col class="p-2">
|
||||
{#key $mainQuery.data.stats}
|
||||
<Histogram
|
||||
data={convert2uplot($mainQuery.data.stats[0].histDuration)}
|
||||
title="Duration Distribution"
|
||||
xlabel="Current Runtimes"
|
||||
xunit="Hours"
|
||||
xlabel="Current Job Runtimes"
|
||||
xunit="Runtime"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
usesBins
|
||||
xtime
|
||||
/>
|
||||
{/key}
|
||||
</Col>
|
||||
@ -640,7 +646,11 @@
|
||||
{/key}
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<hr class="my-2" />
|
||||
|
||||
<!-- Selectable Stats as Histograms : Average Values of Running Jobs -->
|
||||
|
||||
{#if metricsInHistograms}
|
||||
{#key $mainQuery.data.stats[0].histMetrics}
|
||||
<PlotGrid
|
||||
|
@ -17,6 +17,9 @@
|
||||
Icon,
|
||||
Card,
|
||||
Spinner,
|
||||
Input,
|
||||
InputGroup,
|
||||
InputGroupText
|
||||
} from "@sveltestrap/sveltestrap";
|
||||
import {
|
||||
queryStore,
|
||||
@ -59,6 +62,11 @@
|
||||
let showFootprint = filterPresets.cluster
|
||||
? !!ccconfig[`plot_list_showFootprint:${filterPresets.cluster}`]
|
||||
: !!ccconfig.plot_list_showFootprint;
|
||||
|
||||
let numDurationBins = "1h";
|
||||
let numMetricBins = 10;
|
||||
let durationBinOptions = ["1m","10m","1h","6h","12h"];
|
||||
let metricBinOptions = [10, 20, 50, 100];
|
||||
|
||||
$: metricsInHistograms = selectedCluster
|
||||
? ccconfig[`user_view_histogramMetrics:${selectedCluster}`] || []
|
||||
@ -68,8 +76,8 @@
|
||||
$: stats = queryStore({
|
||||
client: client,
|
||||
query: gql`
|
||||
query ($jobFilters: [JobFilter!]!, $metricsInHistograms: [String!]) {
|
||||
jobsStatistics(filter: $jobFilters, metrics: $metricsInHistograms) {
|
||||
query ($jobFilters: [JobFilter!]!, $metricsInHistograms: [String!], $numDurationBins: String, $numMetricBins: Int) {
|
||||
jobsStatistics(filter: $jobFilters, metrics: $metricsInHistograms, numDurationBins: $numDurationBins , numMetricBins: $numMetricBins ) {
|
||||
totalJobs
|
||||
shortJobs
|
||||
totalWalltime
|
||||
@ -96,7 +104,7 @@
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: { jobFilters, metricsInHistograms },
|
||||
variables: { jobFilters, metricsInHistograms, numDurationBins, numMetricBins },
|
||||
});
|
||||
|
||||
onMount(() => filterComponent.updateFilters());
|
||||
@ -118,8 +126,8 @@
|
||||
{/if}
|
||||
|
||||
<!-- ROW2: Tools-->
|
||||
<Row cols={{ xs: 1, md: 2, lg: 4}} class="mb-3">
|
||||
<Col lg="2" class="mb-2 mb-lg-0">
|
||||
<Row cols={{ xs: 1, md: 2, lg: 6}} class="mb-3">
|
||||
<Col class="mb-2 mb-lg-0">
|
||||
<ButtonGroup class="w-100">
|
||||
<Button outline color="primary" on:click={() => (isSortingOpen = true)}>
|
||||
<Icon name="sort-up" /> Sorting
|
||||
@ -133,7 +141,7 @@
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
</Col>
|
||||
<Col lg="4" xl="6" class="mb-1 mb-lg-0">
|
||||
<Col lg="4" class="mb-1 mb-lg-0">
|
||||
<Filters
|
||||
{filterPresets}
|
||||
{matchedJobs}
|
||||
@ -148,12 +156,27 @@
|
||||
}}
|
||||
/>
|
||||
</Col>
|
||||
<Col lg="3" xl="2" class="mb-2 mb-lg-0">
|
||||
<Col class="mb-2 mb-lg-0">
|
||||
<InputGroup>
|
||||
<InputGroupText>
|
||||
<Icon name="bar-chart-line-fill" />
|
||||
</InputGroupText>
|
||||
<InputGroupText>
|
||||
Duration Bin Size
|
||||
</InputGroupText>
|
||||
<Input type="select" bind:value={numDurationBins} style="max-width: 120px;">
|
||||
{#each durationBinOptions as dbin}
|
||||
<option value={dbin}>{dbin}</option>
|
||||
{/each}
|
||||
</Input>
|
||||
</InputGroup>
|
||||
</Col>
|
||||
<Col class="mb-2 mb-lg-0">
|
||||
<TextFilter
|
||||
on:set-filter={({ detail }) => filterComponent.updateFilters(detail)}
|
||||
/>
|
||||
</Col>
|
||||
<Col lg="3" xl="2" class="mb-1 mb-lg-0">
|
||||
<Col class="mb-1 mb-lg-0">
|
||||
<Refresher on:refresh={() => {
|
||||
jobList.refreshJobs()
|
||||
jobList.refreshAllMetrics()
|
||||
@ -215,10 +238,12 @@
|
||||
<Histogram
|
||||
data={convert2uplot($stats.data.jobsStatistics[0].histDuration)}
|
||||
title="Duration Distribution"
|
||||
xlabel="Current Runtimes"
|
||||
xunit="Hours"
|
||||
xlabel="Job Runtimes"
|
||||
xunit="Runtime"
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
usesBins
|
||||
xtime
|
||||
/>
|
||||
{/key}
|
||||
</Col>
|
||||
@ -238,16 +263,32 @@
|
||||
</Row>
|
||||
|
||||
<!-- ROW4+5: Selectable Histograms -->
|
||||
<Row cols={{ xs: 1, md: 5}}>
|
||||
<Col>
|
||||
<Row>
|
||||
<Col xs="12" md="3" lg="2" class="mb-2 mb-md-0">
|
||||
<Button
|
||||
outline
|
||||
color="secondary"
|
||||
class="w-100"
|
||||
on:click={() => (isHistogramSelectionOpen = true)}
|
||||
>
|
||||
<Icon name="bar-chart-line" /> Select Histograms
|
||||
</Button>
|
||||
</Col>
|
||||
<Col xs="12" md="9" lg="10" class="mb-2 mb-md-0">
|
||||
<InputGroup>
|
||||
<InputGroupText>
|
||||
<Icon name="bar-chart-line-fill" />
|
||||
</InputGroupText>
|
||||
<InputGroupText>
|
||||
Metric Bins
|
||||
</InputGroupText>
|
||||
<Input type="select" bind:value={numMetricBins} style="max-width: 120px;">
|
||||
{#each metricBinOptions as mbin}
|
||||
<option value={mbin}>{mbin}</option>
|
||||
{/each}
|
||||
</Input>
|
||||
</InputGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
{#if metricsInHistograms?.length > 0}
|
||||
{#if $stats.error}
|
||||
@ -272,12 +313,12 @@
|
||||
>
|
||||
<Histogram
|
||||
data={convert2uplot(item.data)}
|
||||
usesBins={true}
|
||||
title="Distribution of '{item.metric} ({item.stat})' footprints"
|
||||
xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`}
|
||||
xunit={item.unit}
|
||||
ylabel="Number of Jobs"
|
||||
yunit="Jobs"
|
||||
usesBins
|
||||
/>
|
||||
</PlotGrid>
|
||||
{/key}
|
||||
|
@ -18,6 +18,7 @@
|
||||
const ccconfig = getContext("cc-config");
|
||||
let message = { msg: "", target: "", color: "#d63384" };
|
||||
let displayMessage = false;
|
||||
let cbmode = ccconfig?.plot_general_colorblindMode || false;
|
||||
|
||||
async function handleSettingSubmit(event) {
|
||||
const selector = event.detail.selector
|
||||
@ -28,6 +29,9 @@
|
||||
const res = await fetch(form.action, { method: "POST", body: formData });
|
||||
if (res.ok) {
|
||||
let text = await res.text();
|
||||
if (formData.get("key") === "plot_general_colorblindMode") {
|
||||
cbmode = JSON.parse(formData.get("value"));
|
||||
}
|
||||
popMessage(text, target, "#048109");
|
||||
} else {
|
||||
let text = await res.text();
|
||||
@ -51,4 +55,4 @@
|
||||
|
||||
<UserOptions config={ccconfig} {username} {isApi} bind:message bind:displayMessage on:update-config={(e) => handleSettingSubmit(e)}/>
|
||||
<PlotRenderOptions config={ccconfig} bind:message bind:displayMessage on:update-config={(e) => handleSettingSubmit(e)}/>
|
||||
<PlotColorScheme config={ccconfig} bind:message bind:displayMessage on:update-config={(e) => handleSettingSubmit(e)}/>
|
||||
<PlotColorScheme config={ccconfig} bind:cbmode bind:message bind:displayMessage on:update-config={(e) => handleSettingSubmit(e)}/>
|
||||
|
@ -24,6 +24,7 @@
|
||||
export let config;
|
||||
export let message;
|
||||
export let displayMessage;
|
||||
export let cbmode = false;
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
function updateSetting(selector, target) {
|
||||
@ -265,6 +266,62 @@
|
||||
],
|
||||
};
|
||||
|
||||
// https://personal.sron.nl/~pault/
|
||||
// https://tsitsul.in/blog/coloropt/
|
||||
const cvdschemes = {
|
||||
HighContrast: [
|
||||
"rgb(221,170,51)",
|
||||
"rgb(187,85,102)",
|
||||
"rgb(0,68,136)",
|
||||
"rgb(0,0,0)",
|
||||
],
|
||||
Bright: [
|
||||
"rgb(68,119,170)",
|
||||
"rgb(102,204,238)",
|
||||
"rgb(34,136,51)",
|
||||
"rgb(204,187,68)",
|
||||
"rgb(238,102,119)",
|
||||
"rgb(170,51,119)",
|
||||
"rgb(187,187,187)",
|
||||
],
|
||||
Muted: [
|
||||
"rgb(51,34,136)",
|
||||
"rgb(136,204,238)",
|
||||
"rgb(68,170,153)",
|
||||
"rgb(17,119,51)",
|
||||
"rgb(153,153,51)",
|
||||
"rgb(221,204,119)",
|
||||
"rgb(204,102,119)",
|
||||
"rgb(136,34,85)",
|
||||
"rgb(170,68,153)",
|
||||
"rgb(221,221,221)",
|
||||
],
|
||||
NormalSixColor: [
|
||||
"rgb(64,83,211)",
|
||||
"rgb(221,179,16)",
|
||||
"rgb(181,29,20)",
|
||||
"rgb(0,190,255)",
|
||||
"rgb(251,73,176)",
|
||||
"rgb(0,178,93)",
|
||||
"rgb(202,202,202)",
|
||||
],
|
||||
NormalTwelveColor: [
|
||||
"rgb(235,172,35)",
|
||||
"rgb(184,0,88)",
|
||||
"rgb(0,140,249)",
|
||||
"rgb(0,110,0)",
|
||||
"rgb(0,187,173)",
|
||||
"rgb(209,99,230)",
|
||||
"rgb(178,69,2)",
|
||||
"rgb(255,146,135)",
|
||||
"rgb(89,84,214)",
|
||||
"rgb(0,198,248)",
|
||||
"rgb(135,133,0)",
|
||||
"rgb(0,167,108)",
|
||||
"rgb(189,189,189)",
|
||||
]
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<Row cols={1} class="p-2 g-2">
|
||||
@ -281,7 +338,7 @@
|
||||
<CardTitle
|
||||
style="margin-bottom: 1em; display: flex; align-items: center;"
|
||||
>
|
||||
<div>Color Scheme for Timeseries Plots</div>
|
||||
<div>Color Scheme for Timeseries Plots {cbmode ? `(Color Blind Friendly Palettes)` : ``}</div>
|
||||
{#if displayMessage && message.target == "cs"}<div
|
||||
style="margin-left: auto; font-size: 0.9em;"
|
||||
>
|
||||
@ -293,7 +350,7 @@
|
||||
<input type="hidden" name="key" value="plot_general_colorscheme" />
|
||||
<Table hover>
|
||||
<tbody>
|
||||
{#each Object.entries(colorschemes) as [name, rgbrow]}
|
||||
{#each Object.entries(cbmode ? cvdschemes : colorschemes) as [name, rgbrow]}
|
||||
<tr>
|
||||
<th scope="col">{name}</th>
|
||||
<td>
|
||||
@ -333,8 +390,9 @@
|
||||
|
||||
<style>
|
||||
.color-dot {
|
||||
height: 10px;
|
||||
width: 10px;
|
||||
margin-left: 1px;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
border-radius: 50%;
|
||||
display: inline-block;
|
||||
}
|
||||
|
@ -129,8 +129,8 @@
|
||||
>
|
||||
|
||||
<!-- BACKGROUND -->
|
||||
<Col
|
||||
><Card class="h-100">
|
||||
<Col class="d-flex justify-content-between"
|
||||
><Card class="h-100" style="width: 49%;">
|
||||
<form
|
||||
id="backgrounds-form"
|
||||
method="post"
|
||||
@ -173,6 +173,50 @@
|
||||
</div>
|
||||
<Button color="primary" type="submit">Submit</Button>
|
||||
</form>
|
||||
</Card></Col
|
||||
>
|
||||
</Card>
|
||||
<Card class="h-100" style="width: 49%;">
|
||||
<form
|
||||
id="colorblindmode-form"
|
||||
method="post"
|
||||
action="/frontend/configuration/"
|
||||
class="card-body"
|
||||
on:submit|preventDefault={() =>
|
||||
updateSetting("#colorblindmode-form", "cbm")}
|
||||
>
|
||||
<!-- Svelte 'class' directive only on DOMs directly, normal 'class="xxx"' does not work, so style-array it is. -->
|
||||
<CardTitle
|
||||
style="margin-bottom: 1em; display: flex; align-items: center;"
|
||||
>
|
||||
<div>Color Blind Mode</div>
|
||||
{#if displayMessage && message.target == "cbm"}<div
|
||||
style="margin-left: auto; font-size: 0.9em;"
|
||||
>
|
||||
<code style="color: {message.color};" out:fade
|
||||
>Update: {message.msg}</code
|
||||
>
|
||||
</div>{/if}
|
||||
</CardTitle>
|
||||
<input type="hidden" name="key" value="plot_general_colorblindMode" />
|
||||
<div class="mb-3">
|
||||
<div>
|
||||
{#if config?.plot_general_colorblindMode}
|
||||
<input type="radio" id="cbm-true-checked" name="value" value="true" checked />
|
||||
{:else}
|
||||
<input type="radio" id="cbm-true" name="value" value="true" />
|
||||
{/if}
|
||||
<label for="true">Yes</label>
|
||||
</div>
|
||||
<div>
|
||||
{#if config?.plot_general_colorblindMode}
|
||||
<input type="radio" id="cbm-false" name="value" value="false" />
|
||||
{:else}
|
||||
<input type="radio" id="cbm-false-checked" name="value" value="false" checked />
|
||||
{/if}
|
||||
<label for="false">No</label>
|
||||
</div>
|
||||
</div>
|
||||
<Button color="primary" type="submit">Submit</Button>
|
||||
</form>
|
||||
</Card>
|
||||
</Col>
|
||||
</Row>
|
@ -15,8 +15,8 @@
|
||||
|
||||
<script>
|
||||
import uPlot from "uplot";
|
||||
import { formatNumber } from "../units.js";
|
||||
import { onMount, onDestroy } from "svelte";
|
||||
import { formatNumber } from "../units.js";
|
||||
import { Card } from "@sveltestrap/sveltestrap";
|
||||
|
||||
export let data;
|
||||
@ -26,16 +26,31 @@
|
||||
export let title = "";
|
||||
export let xlabel = "";
|
||||
export let xunit = "";
|
||||
export let xtime = false;
|
||||
export let ylabel = "";
|
||||
export let yunit = "";
|
||||
|
||||
const { bars } = uPlot.paths;
|
||||
|
||||
const drawStyles = {
|
||||
bars: 1,
|
||||
points: 2,
|
||||
};
|
||||
|
||||
function formatTime(t) {
|
||||
if (t !== null) {
|
||||
if (isNaN(t)) {
|
||||
return t;
|
||||
} else {
|
||||
const tAbs = Math.abs(t);
|
||||
const h = Math.floor(tAbs / 3600);
|
||||
const m = Math.floor((tAbs % 3600) / 60);
|
||||
if (h == 0) return `${m}m`;
|
||||
else if (m == 0) return `${h}h`;
|
||||
else return `${h}:${m}h`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function paths(u, seriesIdx, idx0, idx1, extendGap, buildClip) {
|
||||
let s = u.series[seriesIdx];
|
||||
let style = s.drawStyle;
|
||||
@ -139,7 +154,7 @@
|
||||
label: xlabel,
|
||||
labelGap: 10,
|
||||
size: 25,
|
||||
incrs: [1, 2, 5, 6, 10, 12, 50, 100, 500, 1000, 5000, 10000],
|
||||
incrs: xtime ? [60, 120, 300, 600, 1800, 3600, 7200, 14400, 18000, 21600, 43200, 86400] : [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000],
|
||||
border: {
|
||||
show: true,
|
||||
stroke: "#000000",
|
||||
@ -149,7 +164,13 @@
|
||||
size: 5 / devicePixelRatio,
|
||||
stroke: "#000000",
|
||||
},
|
||||
values: (_, t) => t.map((v) => formatNumber(v)),
|
||||
values: (_, t) => t.map((v) => {
|
||||
if (xtime) {
|
||||
return formatTime(v);
|
||||
} else {
|
||||
return formatNumber(v)
|
||||
}
|
||||
}),
|
||||
},
|
||||
{
|
||||
stroke: "#000000",
|
||||
@ -166,17 +187,25 @@
|
||||
size: 5 / devicePixelRatio,
|
||||
stroke: "#000000",
|
||||
},
|
||||
values: (_, t) => t.map((v) => formatNumber(v)),
|
||||
values: (_, t) => t.map((v) => {
|
||||
return formatNumber(v)
|
||||
}),
|
||||
},
|
||||
],
|
||||
series: [
|
||||
{
|
||||
label: xunit !== "" ? xunit : null,
|
||||
value: (u, ts, sidx, didx) => {
|
||||
if (usesBins) {
|
||||
if (usesBins && xtime) {
|
||||
const min = u.data[sidx][didx - 1] ? formatTime(u.data[sidx][didx - 1]) : 0;
|
||||
const max = formatTime(u.data[sidx][didx]);
|
||||
ts = min + " - " + max; // narrow spaces
|
||||
} else if (usesBins) {
|
||||
const min = u.data[sidx][didx - 1] ? u.data[sidx][didx - 1] : 0;
|
||||
const max = u.data[sidx][didx];
|
||||
ts = min + " - " + max; // narrow spaces
|
||||
} else if (xtime) {
|
||||
ts = formatTime(ts);
|
||||
}
|
||||
return ts;
|
||||
},
|
||||
@ -191,6 +220,7 @@
|
||||
},
|
||||
{
|
||||
drawStyle: drawStyles.bars,
|
||||
width: 1, // 1 / lastBinCount,
|
||||
lineInterpolation: null,
|
||||
stroke: "#85abce",
|
||||
fill: "#85abce", // + "1A", // Transparent Fill
|
||||
|
@ -152,10 +152,12 @@
|
||||
const lineWidth =
|
||||
clusterCockpitConfig.plot_general_lineWidth / window.devicePixelRatio;
|
||||
const lineColors = clusterCockpitConfig.plot_general_colorscheme;
|
||||
const cbmode = clusterCockpitConfig?.plot_general_colorblindMode || false;
|
||||
|
||||
const backgroundColors = {
|
||||
normal: "rgba(255, 255, 255, 1.0)",
|
||||
caution: "rgba(255, 128, 0, 0.3)",
|
||||
alert: "rgba(255, 0, 0, 0.3)",
|
||||
caution: cbmode ? "rgba(239, 230, 69, 0.3)" : "rgba(255, 128, 0, 0.3)",
|
||||
alert: cbmode ? "rgba(225, 86, 44, 0.3)" : "rgba(255, 0, 0, 0.3)",
|
||||
};
|
||||
const thresholds = findJobAggregationThresholds(
|
||||
subClusterTopology,
|
||||
@ -348,13 +350,13 @@
|
||||
label: "min",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: "red",
|
||||
stroke: cbmode ? "rgb(0,255,0)" : "red",
|
||||
});
|
||||
plotSeries.push({
|
||||
label: "max",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: "green",
|
||||
stroke: cbmode ? "rgb(0,0,255)" : "green",
|
||||
});
|
||||
plotSeries.push({
|
||||
label: usesMeanStatsSeries ? "mean" : "median",
|
||||
@ -364,8 +366,8 @@
|
||||
});
|
||||
|
||||
plotBands = [
|
||||
{ series: [2, 3], fill: "rgba(0,255,0,0.1)" },
|
||||
{ series: [3, 1], fill: "rgba(255,0,0,0.1)" },
|
||||
{ series: [2, 3], fill: cbmode ? "rgba(0,0,255,0.1)" : "rgba(0,255,0,0.1)" },
|
||||
{ series: [3, 1], fill: cbmode ? "rgba(0,255,0,0.1)" : "rgba(255,0,0,0.1)" },
|
||||
];
|
||||
} else {
|
||||
for (let i = 0; i < series.length; i++) {
|
||||
|
@ -40,6 +40,7 @@
|
||||
let timeoutId = null;
|
||||
|
||||
const lineWidth = clusterCockpitConfig.plot_general_lineWidth;
|
||||
const cbmode = clusterCockpitConfig?.plot_general_colorblindMode || false;
|
||||
|
||||
// Helpers
|
||||
function getGradientR(x) {
|
||||
@ -61,7 +62,7 @@
|
||||
return Math.floor(x * 255.0);
|
||||
}
|
||||
function getRGB(c) {
|
||||
return `rgb(${getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)})`;
|
||||
return `rgb(${cbmode ? '0' : getGradientR(c)}, ${getGradientG(c)}, ${getGradientB(c)})`;
|
||||
}
|
||||
function nearestThousand(num) {
|
||||
return Math.ceil(num / 1000) * 1000;
|
||||
|
@ -449,7 +449,7 @@ function getMetricConfigDeep(metric, cluster, subCluster) {
|
||||
}
|
||||
}
|
||||
|
||||
export function convert2uplot(canvasData) {
|
||||
export function convert2uplot(canvasData, secondsToMinutes = false, secondsToHours = false) {
|
||||
// Prep: Uplot Data Structure
|
||||
let uplotData = [[],[]] // [X, Y1, Y2, ...]
|
||||
// Iterate if exists
|
||||
@ -457,11 +457,21 @@ export function convert2uplot(canvasData) {
|
||||
canvasData.forEach( cd => {
|
||||
if (Object.keys(cd).length == 4) { // MetricHisto Datafromat
|
||||
uplotData[0].push(cd?.max ? cd.max : 0)
|
||||
uplotData[1].push(cd?.count ? cd.count : 0)
|
||||
} else { // Default -> Fill Histodata with zero values on unused value placing -> maybe allows zoom trigger as known
|
||||
if (secondsToHours) {
|
||||
let hours = cd.value / 3600
|
||||
console.log("x seconds to y hours", cd.value, hours)
|
||||
uplotData[0].push(hours)
|
||||
} else if (secondsToMinutes) {
|
||||
let minutes = cd.value / 60
|
||||
console.log("x seconds to y minutes", cd.value, minutes)
|
||||
uplotData[0].push(minutes)
|
||||
} else {
|
||||
uplotData[0].push(cd.value)
|
||||
}
|
||||
uplotData[1].push(cd.count)
|
||||
} else { // Default
|
||||
uplotData[0].push(cd.value)
|
||||
uplotData[1].push(cd.count)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
return uplotData
|
||||
|
Loading…
Reference in New Issue
Block a user