mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2024-12-26 05:19:05 +01:00
Merge pull request #233 from ClusterCockpit/214_user_status_histograms
214 user status histograms
This commit is contained in:
commit
63f3dc926c
@ -203,7 +203,7 @@ type Query {
|
|||||||
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
|
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
|
||||||
|
|
||||||
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
|
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
|
||||||
jobsStatistics(filter: [JobFilter!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]!
|
jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]!
|
||||||
|
|
||||||
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
|
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
|
||||||
|
|
||||||
@ -291,6 +291,19 @@ type HistoPoint {
|
|||||||
value: Int!
|
value: Int!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoints {
|
||||||
|
metric: String!
|
||||||
|
unit: String!
|
||||||
|
data: [MetricHistoPoint!]
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoint {
|
||||||
|
bin: Int
|
||||||
|
count: Int!
|
||||||
|
min: Int
|
||||||
|
max: Int
|
||||||
|
}
|
||||||
|
|
||||||
type JobsStatistics {
|
type JobsStatistics {
|
||||||
id: ID! # If `groupBy` was used, ID of the user/project/cluster
|
id: ID! # If `groupBy` was used, ID of the user/project/cluster
|
||||||
name: String! # if User-Statistics: Given Name of Account (ID) Owner
|
name: String! # if User-Statistics: Given Name of Account (ID) Owner
|
||||||
@ -308,6 +321,7 @@ type JobsStatistics {
|
|||||||
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
|
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
|
||||||
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
|
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
|
||||||
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
|
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
|
||||||
|
histMetrics: [MetricHistoPoints!]! # metric: metricname, data array of histopoints: value: metric average bin, count: number of jobs with that metric average
|
||||||
}
|
}
|
||||||
|
|
||||||
input PageRequest {
|
input PageRequest {
|
||||||
|
@ -25,6 +25,7 @@ import (
|
|||||||
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
|
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
|
||||||
func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
|
func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
|
||||||
return &executableSchema{
|
return &executableSchema{
|
||||||
|
schema: cfg.Schema,
|
||||||
resolvers: cfg.Resolvers,
|
resolvers: cfg.Resolvers,
|
||||||
directives: cfg.Directives,
|
directives: cfg.Directives,
|
||||||
complexity: cfg.Complexity,
|
complexity: cfg.Complexity,
|
||||||
@ -32,6 +33,7 @@ func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
|
Schema *ast.Schema
|
||||||
Resolvers ResolverRoot
|
Resolvers ResolverRoot
|
||||||
Directives DirectiveRoot
|
Directives DirectiveRoot
|
||||||
Complexity ComplexityRoot
|
Complexity ComplexityRoot
|
||||||
@ -145,6 +147,7 @@ type ComplexityRoot struct {
|
|||||||
|
|
||||||
JobsStatistics struct {
|
JobsStatistics struct {
|
||||||
HistDuration func(childComplexity int) int
|
HistDuration func(childComplexity int) int
|
||||||
|
HistMetrics func(childComplexity int) int
|
||||||
HistNumAccs func(childComplexity int) int
|
HistNumAccs func(childComplexity int) int
|
||||||
HistNumCores func(childComplexity int) int
|
HistNumCores func(childComplexity int) int
|
||||||
HistNumNodes func(childComplexity int) int
|
HistNumNodes func(childComplexity int) int
|
||||||
@ -180,6 +183,19 @@ type ComplexityRoot struct {
|
|||||||
Metric func(childComplexity int) int
|
Metric func(childComplexity int) int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
MetricHistoPoint struct {
|
||||||
|
Bin func(childComplexity int) int
|
||||||
|
Count func(childComplexity int) int
|
||||||
|
Max func(childComplexity int) int
|
||||||
|
Min func(childComplexity int) int
|
||||||
|
}
|
||||||
|
|
||||||
|
MetricHistoPoints struct {
|
||||||
|
Data func(childComplexity int) int
|
||||||
|
Metric func(childComplexity int) int
|
||||||
|
Unit func(childComplexity int) int
|
||||||
|
}
|
||||||
|
|
||||||
MetricStatistics struct {
|
MetricStatistics struct {
|
||||||
Avg func(childComplexity int) int
|
Avg func(childComplexity int) int
|
||||||
Max func(childComplexity int) int
|
Max func(childComplexity int) int
|
||||||
@ -212,7 +228,7 @@ type ComplexityRoot struct {
|
|||||||
JobMetrics func(childComplexity int, id string, metrics []string, scopes []schema.MetricScope) int
|
JobMetrics func(childComplexity int, id string, metrics []string, scopes []schema.MetricScope) int
|
||||||
Jobs func(childComplexity int, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) int
|
Jobs func(childComplexity int, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) int
|
||||||
JobsFootprints func(childComplexity int, filter []*model.JobFilter, metrics []string) int
|
JobsFootprints func(childComplexity int, filter []*model.JobFilter, metrics []string) int
|
||||||
JobsStatistics func(childComplexity int, filter []*model.JobFilter, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) int
|
JobsStatistics func(childComplexity int, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) int
|
||||||
NodeMetrics func(childComplexity int, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) int
|
NodeMetrics func(childComplexity int, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) int
|
||||||
RooflineHeatmap func(childComplexity int, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) int
|
RooflineHeatmap func(childComplexity int, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) int
|
||||||
Tags func(childComplexity int) int
|
Tags func(childComplexity int) int
|
||||||
@ -327,7 +343,7 @@ type QueryResolver interface {
|
|||||||
JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope) ([]*model.JobMetricWithName, error)
|
JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope) ([]*model.JobMetricWithName, error)
|
||||||
JobsFootprints(ctx context.Context, filter []*model.JobFilter, metrics []string) (*model.Footprints, error)
|
JobsFootprints(ctx context.Context, filter []*model.JobFilter, metrics []string) (*model.Footprints, error)
|
||||||
Jobs(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error)
|
Jobs(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error)
|
||||||
JobsStatistics(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error)
|
JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error)
|
||||||
RooflineHeatmap(ctx context.Context, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) ([][]float64, error)
|
RooflineHeatmap(ctx context.Context, filter []*model.JobFilter, rows int, cols int, minX float64, minY float64, maxX float64, maxY float64) ([][]float64, error)
|
||||||
NodeMetrics(ctx context.Context, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) ([]*model.NodeMetrics, error)
|
NodeMetrics(ctx context.Context, cluster string, nodes []string, scopes []schema.MetricScope, metrics []string, from time.Time, to time.Time) ([]*model.NodeMetrics, error)
|
||||||
}
|
}
|
||||||
@ -336,12 +352,16 @@ type SubClusterResolver interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type executableSchema struct {
|
type executableSchema struct {
|
||||||
|
schema *ast.Schema
|
||||||
resolvers ResolverRoot
|
resolvers ResolverRoot
|
||||||
directives DirectiveRoot
|
directives DirectiveRoot
|
||||||
complexity ComplexityRoot
|
complexity ComplexityRoot
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *executableSchema) Schema() *ast.Schema {
|
func (e *executableSchema) Schema() *ast.Schema {
|
||||||
|
if e.schema != nil {
|
||||||
|
return e.schema
|
||||||
|
}
|
||||||
return parsedSchema
|
return parsedSchema
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -763,6 +783,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
|
|
||||||
return e.complexity.JobsStatistics.HistDuration(childComplexity), true
|
return e.complexity.JobsStatistics.HistDuration(childComplexity), true
|
||||||
|
|
||||||
|
case "JobsStatistics.histMetrics":
|
||||||
|
if e.complexity.JobsStatistics.HistMetrics == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.JobsStatistics.HistMetrics(childComplexity), true
|
||||||
|
|
||||||
case "JobsStatistics.histNumAccs":
|
case "JobsStatistics.histNumAccs":
|
||||||
if e.complexity.JobsStatistics.HistNumAccs == nil {
|
if e.complexity.JobsStatistics.HistNumAccs == nil {
|
||||||
break
|
break
|
||||||
@ -952,6 +979,55 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
|
|
||||||
return e.complexity.MetricFootprints.Metric(childComplexity), true
|
return e.complexity.MetricFootprints.Metric(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.bin":
|
||||||
|
if e.complexity.MetricHistoPoint.Bin == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Bin(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.count":
|
||||||
|
if e.complexity.MetricHistoPoint.Count == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Count(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.max":
|
||||||
|
if e.complexity.MetricHistoPoint.Max == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Max(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.min":
|
||||||
|
if e.complexity.MetricHistoPoint.Min == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Min(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoints.data":
|
||||||
|
if e.complexity.MetricHistoPoints.Data == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoints.Data(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoints.metric":
|
||||||
|
if e.complexity.MetricHistoPoints.Metric == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoints.Metric(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoints.unit":
|
||||||
|
if e.complexity.MetricHistoPoints.Unit == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoints.Unit(childComplexity), true
|
||||||
|
|
||||||
case "MetricStatistics.avg":
|
case "MetricStatistics.avg":
|
||||||
if e.complexity.MetricStatistics.Avg == nil {
|
if e.complexity.MetricStatistics.Avg == nil {
|
||||||
break
|
break
|
||||||
@ -1145,7 +1221,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Query.JobsStatistics(childComplexity, args["filter"].([]*model.JobFilter), args["page"].(*model.PageRequest), args["sortBy"].(*model.SortByAggregate), args["groupBy"].(*model.Aggregate)), true
|
return e.complexity.Query.JobsStatistics(childComplexity, args["filter"].([]*model.JobFilter), args["metrics"].([]string), args["page"].(*model.PageRequest), args["sortBy"].(*model.SortByAggregate), args["groupBy"].(*model.Aggregate)), true
|
||||||
|
|
||||||
case "Query.nodeMetrics":
|
case "Query.nodeMetrics":
|
||||||
if e.complexity.Query.NodeMetrics == nil {
|
if e.complexity.Query.NodeMetrics == nil {
|
||||||
@ -1620,14 +1696,14 @@ func (ec *executionContext) introspectSchema() (*introspection.Schema, error) {
|
|||||||
if ec.DisableIntrospection {
|
if ec.DisableIntrospection {
|
||||||
return nil, errors.New("introspection disabled")
|
return nil, errors.New("introspection disabled")
|
||||||
}
|
}
|
||||||
return introspection.WrapSchema(parsedSchema), nil
|
return introspection.WrapSchema(ec.Schema()), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ec *executionContext) introspectType(name string) (*introspection.Type, error) {
|
func (ec *executionContext) introspectType(name string) (*introspection.Type, error) {
|
||||||
if ec.DisableIntrospection {
|
if ec.DisableIntrospection {
|
||||||
return nil, errors.New("introspection disabled")
|
return nil, errors.New("introspection disabled")
|
||||||
}
|
}
|
||||||
return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil
|
return introspection.WrapTypeFromDef(ec.Schema(), ec.Schema().Types[name]), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var sources = []*ast.Source{
|
var sources = []*ast.Source{
|
||||||
@ -1836,7 +1912,7 @@ type Query {
|
|||||||
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
|
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
|
||||||
|
|
||||||
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
|
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
|
||||||
jobsStatistics(filter: [JobFilter!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]!
|
jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]!
|
||||||
|
|
||||||
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
|
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!
|
||||||
|
|
||||||
@ -1924,6 +2000,19 @@ type HistoPoint {
|
|||||||
value: Int!
|
value: Int!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoints {
|
||||||
|
metric: String!
|
||||||
|
unit: String!
|
||||||
|
data: [MetricHistoPoint!]
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoint {
|
||||||
|
bin: Int
|
||||||
|
count: Int!
|
||||||
|
min: Int
|
||||||
|
max: Int
|
||||||
|
}
|
||||||
|
|
||||||
type JobsStatistics {
|
type JobsStatistics {
|
||||||
id: ID! # If ` + "`" + `groupBy` + "`" + ` was used, ID of the user/project/cluster
|
id: ID! # If ` + "`" + `groupBy` + "`" + ` was used, ID of the user/project/cluster
|
||||||
name: String! # if User-Statistics: Given Name of Account (ID) Owner
|
name: String! # if User-Statistics: Given Name of Account (ID) Owner
|
||||||
@ -1941,6 +2030,7 @@ type JobsStatistics {
|
|||||||
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
|
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
|
||||||
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
|
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
|
||||||
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
|
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
|
||||||
|
histMetrics: [MetricHistoPoints!]! # metric: metricname, data array of histopoints: value: metric average bin, count: number of jobs with that metric average
|
||||||
}
|
}
|
||||||
|
|
||||||
input PageRequest {
|
input PageRequest {
|
||||||
@ -2180,33 +2270,42 @@ func (ec *executionContext) field_Query_jobsStatistics_args(ctx context.Context,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
args["filter"] = arg0
|
args["filter"] = arg0
|
||||||
var arg1 *model.PageRequest
|
var arg1 []string
|
||||||
|
if tmp, ok := rawArgs["metrics"]; ok {
|
||||||
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("metrics"))
|
||||||
|
arg1, err = ec.unmarshalOString2ᚕstringᚄ(ctx, tmp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
args["metrics"] = arg1
|
||||||
|
var arg2 *model.PageRequest
|
||||||
if tmp, ok := rawArgs["page"]; ok {
|
if tmp, ok := rawArgs["page"]; ok {
|
||||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("page"))
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("page"))
|
||||||
arg1, err = ec.unmarshalOPageRequest2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐPageRequest(ctx, tmp)
|
arg2, err = ec.unmarshalOPageRequest2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐPageRequest(ctx, tmp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
args["page"] = arg1
|
args["page"] = arg2
|
||||||
var arg2 *model.SortByAggregate
|
var arg3 *model.SortByAggregate
|
||||||
if tmp, ok := rawArgs["sortBy"]; ok {
|
if tmp, ok := rawArgs["sortBy"]; ok {
|
||||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sortBy"))
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sortBy"))
|
||||||
arg2, err = ec.unmarshalOSortByAggregate2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐSortByAggregate(ctx, tmp)
|
arg3, err = ec.unmarshalOSortByAggregate2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐSortByAggregate(ctx, tmp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
args["sortBy"] = arg2
|
args["sortBy"] = arg3
|
||||||
var arg3 *model.Aggregate
|
var arg4 *model.Aggregate
|
||||||
if tmp, ok := rawArgs["groupBy"]; ok {
|
if tmp, ok := rawArgs["groupBy"]; ok {
|
||||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupBy"))
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupBy"))
|
||||||
arg3, err = ec.unmarshalOAggregate2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐAggregate(ctx, tmp)
|
arg4, err = ec.unmarshalOAggregate2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐAggregate(ctx, tmp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
args["groupBy"] = arg3
|
args["groupBy"] = arg4
|
||||||
return args, nil
|
return args, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -5850,6 +5949,58 @@ func (ec *executionContext) fieldContext_JobsStatistics_histNumAccs(ctx context.
|
|||||||
return fc, nil
|
return fc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _JobsStatistics_histMetrics(ctx context.Context, field graphql.CollectedField, obj *model.JobsStatistics) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_JobsStatistics_histMetrics(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.HistMetrics, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.([]*model.MetricHistoPoints)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNMetricHistoPoints2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointsᚄ(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_JobsStatistics_histMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "JobsStatistics",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
switch field.Name {
|
||||||
|
case "metric":
|
||||||
|
return ec.fieldContext_MetricHistoPoints_metric(ctx, field)
|
||||||
|
case "unit":
|
||||||
|
return ec.fieldContext_MetricHistoPoints_unit(ctx, field)
|
||||||
|
case "data":
|
||||||
|
return ec.fieldContext_MetricHistoPoints_data(ctx, field)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("no field named %q was found under type MetricHistoPoints", field.Name)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) _MetricConfig_name(ctx context.Context, field graphql.CollectedField, obj *schema.MetricConfig) (ret graphql.Marshaler) {
|
func (ec *executionContext) _MetricConfig_name(ctx context.Context, field graphql.CollectedField, obj *schema.MetricConfig) (ret graphql.Marshaler) {
|
||||||
fc, err := ec.fieldContext_MetricConfig_name(ctx, field)
|
fc, err := ec.fieldContext_MetricConfig_name(ctx, field)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -6395,6 +6546,312 @@ func (ec *executionContext) fieldContext_MetricFootprints_data(ctx context.Conte
|
|||||||
return fc, nil
|
return fc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_bin(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_bin(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Bin, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(*int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalOInt2ᚖint(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_bin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_count(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_count(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Count, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNInt2int(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_count(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_min(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_min(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Min, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(*int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalOInt2ᚖint(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_min(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_max(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_max(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Max, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(*int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalOInt2ᚖint(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_max(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoints_metric(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoints_metric(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Metric, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(string)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNString2string(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoints_metric(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoints",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type String does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoints_unit(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoints_unit(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Unit, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(string)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNString2string(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoints_unit(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoints",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type String does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoints_data(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoints_data(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Data, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.([]*model.MetricHistoPoint)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalOMetricHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointᚄ(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoints_data(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoints",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
switch field.Name {
|
||||||
|
case "bin":
|
||||||
|
return ec.fieldContext_MetricHistoPoint_bin(ctx, field)
|
||||||
|
case "count":
|
||||||
|
return ec.fieldContext_MetricHistoPoint_count(ctx, field)
|
||||||
|
case "min":
|
||||||
|
return ec.fieldContext_MetricHistoPoint_min(ctx, field)
|
||||||
|
case "max":
|
||||||
|
return ec.fieldContext_MetricHistoPoint_max(ctx, field)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("no field named %q was found under type MetricHistoPoint", field.Name)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) _MetricStatistics_avg(ctx context.Context, field graphql.CollectedField, obj *schema.MetricStatistics) (ret graphql.Marshaler) {
|
func (ec *executionContext) _MetricStatistics_avg(ctx context.Context, field graphql.CollectedField, obj *schema.MetricStatistics) (ret graphql.Marshaler) {
|
||||||
fc, err := ec.fieldContext_MetricStatistics_avg(ctx, field)
|
fc, err := ec.fieldContext_MetricStatistics_avg(ctx, field)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -7592,7 +8049,7 @@ func (ec *executionContext) _Query_jobsStatistics(ctx context.Context, field gra
|
|||||||
}()
|
}()
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return ec.resolvers.Query().JobsStatistics(rctx, fc.Args["filter"].([]*model.JobFilter), fc.Args["page"].(*model.PageRequest), fc.Args["sortBy"].(*model.SortByAggregate), fc.Args["groupBy"].(*model.Aggregate))
|
return ec.resolvers.Query().JobsStatistics(rctx, fc.Args["filter"].([]*model.JobFilter), fc.Args["metrics"].([]string), fc.Args["page"].(*model.PageRequest), fc.Args["sortBy"].(*model.SortByAggregate), fc.Args["groupBy"].(*model.Aggregate))
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
@ -7649,6 +8106,8 @@ func (ec *executionContext) fieldContext_Query_jobsStatistics(ctx context.Contex
|
|||||||
return ec.fieldContext_JobsStatistics_histNumCores(ctx, field)
|
return ec.fieldContext_JobsStatistics_histNumCores(ctx, field)
|
||||||
case "histNumAccs":
|
case "histNumAccs":
|
||||||
return ec.fieldContext_JobsStatistics_histNumAccs(ctx, field)
|
return ec.fieldContext_JobsStatistics_histNumAccs(ctx, field)
|
||||||
|
case "histMetrics":
|
||||||
|
return ec.fieldContext_JobsStatistics_histMetrics(ctx, field)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("no field named %q was found under type JobsStatistics", field.Name)
|
return nil, fmt.Errorf("no field named %q was found under type JobsStatistics", field.Name)
|
||||||
},
|
},
|
||||||
@ -13136,6 +13595,11 @@ func (ec *executionContext) _JobsStatistics(ctx context.Context, sel ast.Selecti
|
|||||||
if out.Values[i] == graphql.Null {
|
if out.Values[i] == graphql.Null {
|
||||||
out.Invalids++
|
out.Invalids++
|
||||||
}
|
}
|
||||||
|
case "histMetrics":
|
||||||
|
out.Values[i] = ec._JobsStatistics_histMetrics(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
panic("unknown field " + strconv.Quote(field.Name))
|
panic("unknown field " + strconv.Quote(field.Name))
|
||||||
}
|
}
|
||||||
@ -13284,6 +13748,97 @@ func (ec *executionContext) _MetricFootprints(ctx context.Context, sel ast.Selec
|
|||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var metricHistoPointImplementors = []string{"MetricHistoPoint"}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint(ctx context.Context, sel ast.SelectionSet, obj *model.MetricHistoPoint) graphql.Marshaler {
|
||||||
|
fields := graphql.CollectFields(ec.OperationContext, sel, metricHistoPointImplementors)
|
||||||
|
|
||||||
|
out := graphql.NewFieldSet(fields)
|
||||||
|
deferred := make(map[string]*graphql.FieldSet)
|
||||||
|
for i, field := range fields {
|
||||||
|
switch field.Name {
|
||||||
|
case "__typename":
|
||||||
|
out.Values[i] = graphql.MarshalString("MetricHistoPoint")
|
||||||
|
case "bin":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_bin(ctx, field, obj)
|
||||||
|
case "count":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_count(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
|
case "min":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_min(ctx, field, obj)
|
||||||
|
case "max":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_max(ctx, field, obj)
|
||||||
|
default:
|
||||||
|
panic("unknown field " + strconv.Quote(field.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.Dispatch(ctx)
|
||||||
|
if out.Invalids > 0 {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
|
||||||
|
atomic.AddInt32(&ec.deferred, int32(len(deferred)))
|
||||||
|
|
||||||
|
for label, dfs := range deferred {
|
||||||
|
ec.processDeferredGroup(graphql.DeferredGroup{
|
||||||
|
Label: label,
|
||||||
|
Path: graphql.GetPath(ctx),
|
||||||
|
FieldSet: dfs,
|
||||||
|
Context: ctx,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
var metricHistoPointsImplementors = []string{"MetricHistoPoints"}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoints(ctx context.Context, sel ast.SelectionSet, obj *model.MetricHistoPoints) graphql.Marshaler {
|
||||||
|
fields := graphql.CollectFields(ec.OperationContext, sel, metricHistoPointsImplementors)
|
||||||
|
|
||||||
|
out := graphql.NewFieldSet(fields)
|
||||||
|
deferred := make(map[string]*graphql.FieldSet)
|
||||||
|
for i, field := range fields {
|
||||||
|
switch field.Name {
|
||||||
|
case "__typename":
|
||||||
|
out.Values[i] = graphql.MarshalString("MetricHistoPoints")
|
||||||
|
case "metric":
|
||||||
|
out.Values[i] = ec._MetricHistoPoints_metric(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
|
case "unit":
|
||||||
|
out.Values[i] = ec._MetricHistoPoints_unit(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
|
case "data":
|
||||||
|
out.Values[i] = ec._MetricHistoPoints_data(ctx, field, obj)
|
||||||
|
default:
|
||||||
|
panic("unknown field " + strconv.Quote(field.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.Dispatch(ctx)
|
||||||
|
if out.Invalids > 0 {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
|
||||||
|
atomic.AddInt32(&ec.deferred, int32(len(deferred)))
|
||||||
|
|
||||||
|
for label, dfs := range deferred {
|
||||||
|
ec.processDeferredGroup(graphql.DeferredGroup{
|
||||||
|
Label: label,
|
||||||
|
Path: graphql.GetPath(ctx),
|
||||||
|
FieldSet: dfs,
|
||||||
|
Context: ctx,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
var metricStatisticsImplementors = []string{"MetricStatistics"}
|
var metricStatisticsImplementors = []string{"MetricStatistics"}
|
||||||
|
|
||||||
func (ec *executionContext) _MetricStatistics(ctx context.Context, sel ast.SelectionSet, obj *schema.MetricStatistics) graphql.Marshaler {
|
func (ec *executionContext) _MetricStatistics(ctx context.Context, sel ast.SelectionSet, obj *schema.MetricStatistics) graphql.Marshaler {
|
||||||
@ -15536,6 +16091,70 @@ func (ec *executionContext) marshalNMetricFootprints2ᚖgithubᚗcomᚋClusterCo
|
|||||||
return ec._MetricFootprints(ctx, sel, v)
|
return ec._MetricFootprints(ctx, sel, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalNMetricHistoPoint2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPoint(ctx context.Context, sel ast.SelectionSet, v *model.MetricHistoPoint) graphql.Marshaler {
|
||||||
|
if v == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
|
||||||
|
ec.Errorf(ctx, "the requested element is null which the schema does not allow")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
return ec._MetricHistoPoint(ctx, sel, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalNMetricHistoPoints2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointsᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.MetricHistoPoints) graphql.Marshaler {
|
||||||
|
ret := make(graphql.Array, len(v))
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
isLen1 := len(v) == 1
|
||||||
|
if !isLen1 {
|
||||||
|
wg.Add(len(v))
|
||||||
|
}
|
||||||
|
for i := range v {
|
||||||
|
i := i
|
||||||
|
fc := &graphql.FieldContext{
|
||||||
|
Index: &i,
|
||||||
|
Result: &v[i],
|
||||||
|
}
|
||||||
|
ctx := graphql.WithFieldContext(ctx, fc)
|
||||||
|
f := func(i int) {
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
if !isLen1 {
|
||||||
|
defer wg.Done()
|
||||||
|
}
|
||||||
|
ret[i] = ec.marshalNMetricHistoPoints2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPoints(ctx, sel, v[i])
|
||||||
|
}
|
||||||
|
if isLen1 {
|
||||||
|
f(i)
|
||||||
|
} else {
|
||||||
|
go f(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
for _, e := range ret {
|
||||||
|
if e == graphql.Null {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalNMetricHistoPoints2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPoints(ctx context.Context, sel ast.SelectionSet, v *model.MetricHistoPoints) graphql.Marshaler {
|
||||||
|
if v == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
|
||||||
|
ec.Errorf(ctx, "the requested element is null which the schema does not allow")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
return ec._MetricHistoPoints(ctx, sel, v)
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalNMetricScope2githubᚗcomᚋClusterCockpitᚋccᚑbackendᚋpkgᚋschemaᚐMetricScope(ctx context.Context, v interface{}) (schema.MetricScope, error) {
|
func (ec *executionContext) unmarshalNMetricScope2githubᚗcomᚋClusterCockpitᚋccᚑbackendᚋpkgᚋschemaᚐMetricScope(ctx context.Context, v interface{}) (schema.MetricScope, error) {
|
||||||
var res schema.MetricScope
|
var res schema.MetricScope
|
||||||
err := res.UnmarshalGQL(v)
|
err := res.UnmarshalGQL(v)
|
||||||
@ -16591,6 +17210,53 @@ func (ec *executionContext) marshalOJobState2ᚕgithubᚗcomᚋClusterCockpitᚋ
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalOMetricHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.MetricHistoPoint) graphql.Marshaler {
|
||||||
|
if v == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ret := make(graphql.Array, len(v))
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
isLen1 := len(v) == 1
|
||||||
|
if !isLen1 {
|
||||||
|
wg.Add(len(v))
|
||||||
|
}
|
||||||
|
for i := range v {
|
||||||
|
i := i
|
||||||
|
fc := &graphql.FieldContext{
|
||||||
|
Index: &i,
|
||||||
|
Result: &v[i],
|
||||||
|
}
|
||||||
|
ctx := graphql.WithFieldContext(ctx, fc)
|
||||||
|
f := func(i int) {
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
if !isLen1 {
|
||||||
|
defer wg.Done()
|
||||||
|
}
|
||||||
|
ret[i] = ec.marshalNMetricHistoPoint2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPoint(ctx, sel, v[i])
|
||||||
|
}
|
||||||
|
if isLen1 {
|
||||||
|
f(i)
|
||||||
|
} else {
|
||||||
|
go f(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
for _, e := range ret {
|
||||||
|
if e == graphql.Null {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalOMetricScope2ᚕgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋpkgᚋschemaᚐMetricScopeᚄ(ctx context.Context, v interface{}) ([]schema.MetricScope, error) {
|
func (ec *executionContext) unmarshalOMetricScope2ᚕgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋpkgᚋschemaᚐMetricScopeᚄ(ctx context.Context, v interface{}) ([]schema.MetricScope, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -85,22 +85,23 @@ type JobResultList struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type JobsStatistics struct {
|
type JobsStatistics struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
TotalJobs int `json:"totalJobs"`
|
TotalJobs int `json:"totalJobs"`
|
||||||
RunningJobs int `json:"runningJobs"`
|
RunningJobs int `json:"runningJobs"`
|
||||||
ShortJobs int `json:"shortJobs"`
|
ShortJobs int `json:"shortJobs"`
|
||||||
TotalWalltime int `json:"totalWalltime"`
|
TotalWalltime int `json:"totalWalltime"`
|
||||||
TotalNodes int `json:"totalNodes"`
|
TotalNodes int `json:"totalNodes"`
|
||||||
TotalNodeHours int `json:"totalNodeHours"`
|
TotalNodeHours int `json:"totalNodeHours"`
|
||||||
TotalCores int `json:"totalCores"`
|
TotalCores int `json:"totalCores"`
|
||||||
TotalCoreHours int `json:"totalCoreHours"`
|
TotalCoreHours int `json:"totalCoreHours"`
|
||||||
TotalAccs int `json:"totalAccs"`
|
TotalAccs int `json:"totalAccs"`
|
||||||
TotalAccHours int `json:"totalAccHours"`
|
TotalAccHours int `json:"totalAccHours"`
|
||||||
HistDuration []*HistoPoint `json:"histDuration"`
|
HistDuration []*HistoPoint `json:"histDuration"`
|
||||||
HistNumNodes []*HistoPoint `json:"histNumNodes"`
|
HistNumNodes []*HistoPoint `json:"histNumNodes"`
|
||||||
HistNumCores []*HistoPoint `json:"histNumCores"`
|
HistNumCores []*HistoPoint `json:"histNumCores"`
|
||||||
HistNumAccs []*HistoPoint `json:"histNumAccs"`
|
HistNumAccs []*HistoPoint `json:"histNumAccs"`
|
||||||
|
HistMetrics []*MetricHistoPoints `json:"histMetrics"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type MetricFootprints struct {
|
type MetricFootprints struct {
|
||||||
@ -108,6 +109,19 @@ type MetricFootprints struct {
|
|||||||
Data []schema.Float `json:"data"`
|
Data []schema.Float `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoint struct {
|
||||||
|
Bin *int `json:"bin,omitempty"`
|
||||||
|
Count int `json:"count"`
|
||||||
|
Min *int `json:"min,omitempty"`
|
||||||
|
Max *int `json:"max,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoints struct {
|
||||||
|
Metric string `json:"metric"`
|
||||||
|
Unit string `json:"unit"`
|
||||||
|
Data []*MetricHistoPoint `json:"data,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type NodeMetrics struct {
|
type NodeMetrics struct {
|
||||||
Host string `json:"host"`
|
Host string `json:"host"`
|
||||||
SubCluster string `json:"subCluster"`
|
SubCluster string `json:"subCluster"`
|
||||||
|
@ -2,7 +2,7 @@ package graph
|
|||||||
|
|
||||||
// This file will be automatically regenerated based on the schema, any resolver implementations
|
// This file will be automatically regenerated based on the schema, any resolver implementations
|
||||||
// will be copied through when generating and any unknown code will be moved to the end.
|
// will be copied through when generating and any unknown code will be moved to the end.
|
||||||
// Code generated by github.com/99designs/gqlgen version v0.17.36
|
// Code generated by github.com/99designs/gqlgen version v0.17.40
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -244,7 +244,7 @@ func (r *queryResolver) Jobs(ctx context.Context, filter []*model.JobFilter, pag
|
|||||||
}
|
}
|
||||||
|
|
||||||
// JobsStatistics is the resolver for the jobsStatistics field.
|
// JobsStatistics is the resolver for the jobsStatistics field.
|
||||||
func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error) {
|
func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error) {
|
||||||
var err error
|
var err error
|
||||||
var stats []*model.JobsStatistics
|
var stats []*model.JobsStatistics
|
||||||
|
|
||||||
@ -291,6 +291,17 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if requireField(ctx, "histMetrics") {
|
||||||
|
if groupBy == nil {
|
||||||
|
stats[0], err = r.Repo.AddMetricHistograms(ctx, filter, metrics, stats[0])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return nil, errors.New("metric histograms only implemented without groupBy argument")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return stats, nil
|
return stats, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ func SecurityCheck(ctx context.Context, query sq.SelectBuilder) (sq.SelectBuilde
|
|||||||
user := GetUserFromContext(ctx)
|
user := GetUserFromContext(ctx)
|
||||||
if user == nil {
|
if user == nil {
|
||||||
var qnil sq.SelectBuilder
|
var qnil sq.SelectBuilder
|
||||||
return qnil, fmt.Errorf("user context is nil!")
|
return qnil, fmt.Errorf("user context is nil")
|
||||||
} else if user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleApi}) { // Admin & Co. : All jobs
|
} else if user.HasAnyRole([]schema.Role{schema.RoleAdmin, schema.RoleSupport, schema.RoleApi}) { // Admin & Co. : All jobs
|
||||||
return query, nil
|
return query, nil
|
||||||
} else if user.HasRole(schema.RoleManager) { // Manager : Add filter for managed projects' jobs only + personal jobs
|
} else if user.HasRole(schema.RoleManager) { // Manager : Add filter for managed projects' jobs only + personal jobs
|
||||||
|
@ -8,11 +8,15 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||||
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
"github.com/ClusterCockpit/cc-backend/internal/graph/model"
|
||||||
|
"github.com/ClusterCockpit/cc-backend/internal/metricdata"
|
||||||
|
"github.com/ClusterCockpit/cc-backend/pkg/archive"
|
||||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||||
|
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||||
sq "github.com/Masterminds/squirrel"
|
sq "github.com/Masterminds/squirrel"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -450,6 +454,39 @@ func (r *JobRepository) AddHistograms(
|
|||||||
return stat, nil
|
return stat, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Requires thresholds for metric from config for cluster? Of all clusters and use largest? split to 10 + 1 for artifacts?
|
||||||
|
func (r *JobRepository) AddMetricHistograms(
|
||||||
|
ctx context.Context,
|
||||||
|
filter []*model.JobFilter,
|
||||||
|
metrics []string,
|
||||||
|
stat *model.JobsStatistics) (*model.JobsStatistics, error) {
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
// Running Jobs Only: First query jobdata from sqlite, then query data and make bins
|
||||||
|
for _, f := range filter {
|
||||||
|
if f.State != nil {
|
||||||
|
if len(f.State) == 1 && f.State[0] == "running" {
|
||||||
|
stat.HistMetrics = r.runningJobsMetricStatisticsHistogram(ctx, metrics, filter)
|
||||||
|
log.Debugf("Timer AddMetricHistograms %s", time.Since(start))
|
||||||
|
return stat, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// All other cases: Query and make bins in sqlite directly
|
||||||
|
for _, m := range metrics {
|
||||||
|
metricHisto, err := r.jobsMetricStatisticsHistogram(ctx, m, filter)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("Error while loading job metric statistics histogram: %s", m)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
stat.HistMetrics = append(stat.HistMetrics, metricHisto)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debugf("Timer AddMetricHistograms %s", time.Since(start))
|
||||||
|
return stat, nil
|
||||||
|
}
|
||||||
|
|
||||||
// `value` must be the column grouped by, but renamed to "value"
|
// `value` must be the column grouped by, but renamed to "value"
|
||||||
func (r *JobRepository) jobsStatisticsHistogram(
|
func (r *JobRepository) jobsStatisticsHistogram(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -487,3 +524,231 @@ func (r *JobRepository) jobsStatisticsHistogram(
|
|||||||
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
|
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
|
||||||
return points, nil
|
return points, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *JobRepository) jobsMetricStatisticsHistogram(
|
||||||
|
ctx context.Context,
|
||||||
|
metric string,
|
||||||
|
filters []*model.JobFilter) (*model.MetricHistoPoints, error) {
|
||||||
|
|
||||||
|
var dbMetric string
|
||||||
|
switch metric {
|
||||||
|
case "cpu_load":
|
||||||
|
dbMetric = "load_avg"
|
||||||
|
case "flops_any":
|
||||||
|
dbMetric = "flops_any_avg"
|
||||||
|
case "mem_bw":
|
||||||
|
dbMetric = "mem_bw_avg"
|
||||||
|
case "mem_used":
|
||||||
|
dbMetric = "mem_used_max"
|
||||||
|
case "net_bw":
|
||||||
|
dbMetric = "net_bw_avg"
|
||||||
|
case "file_bw":
|
||||||
|
dbMetric = "file_bw_avg"
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%s not implemented", metric)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get specific Peak or largest Peak
|
||||||
|
var metricConfig *schema.MetricConfig
|
||||||
|
var peak float64 = 0.0
|
||||||
|
var unit string = ""
|
||||||
|
|
||||||
|
for _, f := range filters {
|
||||||
|
if f.Cluster != nil {
|
||||||
|
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
||||||
|
peak = metricConfig.Peak
|
||||||
|
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
|
||||||
|
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if peak == 0.0 {
|
||||||
|
for _, c := range archive.Clusters {
|
||||||
|
for _, m := range c.MetricConfig {
|
||||||
|
if m.Name == metric {
|
||||||
|
if m.Peak > peak {
|
||||||
|
peak = m.Peak
|
||||||
|
}
|
||||||
|
if unit == "" {
|
||||||
|
unit = m.Unit.Prefix + m.Unit.Base
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// log.Debugf("Metric %s: DB %s, Peak %f, Unit %s", metric, dbMetric, peak, unit)
|
||||||
|
// Make bins, see https://jereze.com/code/sql-histogram/
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
crossJoinQuery := sq.Select(
|
||||||
|
fmt.Sprintf(`max(%s) as max`, dbMetric),
|
||||||
|
fmt.Sprintf(`min(%s) as min`, dbMetric),
|
||||||
|
).From("job").Where(
|
||||||
|
fmt.Sprintf(`%s is not null`, dbMetric),
|
||||||
|
).Where(
|
||||||
|
fmt.Sprintf(`%s <= %f`, dbMetric, peak),
|
||||||
|
)
|
||||||
|
|
||||||
|
crossJoinQuery, cjqerr := SecurityCheck(ctx, crossJoinQuery)
|
||||||
|
|
||||||
|
if cjqerr != nil {
|
||||||
|
return nil, cjqerr
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range filters {
|
||||||
|
crossJoinQuery = BuildWhereClause(f, crossJoinQuery)
|
||||||
|
}
|
||||||
|
|
||||||
|
crossJoinQuerySql, crossJoinQueryArgs, sqlerr := crossJoinQuery.ToSql()
|
||||||
|
if sqlerr != nil {
|
||||||
|
return nil, sqlerr
|
||||||
|
}
|
||||||
|
|
||||||
|
bins := 10
|
||||||
|
binQuery := fmt.Sprintf(`CAST( (case when job.%s = value.max then value.max*0.999999999 else job.%s end - value.min) / (value.max - value.min) * %d as INTEGER )`, dbMetric, dbMetric, bins)
|
||||||
|
|
||||||
|
mainQuery := sq.Select(
|
||||||
|
fmt.Sprintf(`%s + 1 as bin`, binQuery),
|
||||||
|
fmt.Sprintf(`count(job.%s) as count`, dbMetric),
|
||||||
|
fmt.Sprintf(`CAST(((value.max / %d) * (%s )) as INTEGER ) as min`, bins, binQuery),
|
||||||
|
fmt.Sprintf(`CAST(((value.max / %d) * (%s + 1 )) as INTEGER ) as max`, bins, binQuery),
|
||||||
|
).From("job").CrossJoin(
|
||||||
|
fmt.Sprintf(`(%s) as value`, crossJoinQuerySql), crossJoinQueryArgs...,
|
||||||
|
).Where(fmt.Sprintf(`job.%s is not null and job.%s <= %f`, dbMetric, dbMetric, peak))
|
||||||
|
|
||||||
|
mainQuery, qerr := SecurityCheck(ctx, mainQuery)
|
||||||
|
|
||||||
|
if qerr != nil {
|
||||||
|
return nil, qerr
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range filters {
|
||||||
|
mainQuery = BuildWhereClause(f, mainQuery)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finalize query with Grouping and Ordering
|
||||||
|
mainQuery = mainQuery.GroupBy("bin").OrderBy("bin")
|
||||||
|
|
||||||
|
rows, err := mainQuery.RunWith(r.DB).Query()
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("Error while running mainQuery: %s", err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
points := make([]*model.MetricHistoPoint, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
point := model.MetricHistoPoint{}
|
||||||
|
if err := rows.Scan(&point.Bin, &point.Count, &point.Min, &point.Max); err != nil {
|
||||||
|
log.Warnf("Error while scanning rows for %s", metric)
|
||||||
|
return nil, err // Totally bricks cc-backend if returned and if all metrics requested?
|
||||||
|
}
|
||||||
|
|
||||||
|
points = append(points, &point)
|
||||||
|
}
|
||||||
|
|
||||||
|
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Data: points}
|
||||||
|
|
||||||
|
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
|
||||||
|
return &result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *JobRepository) runningJobsMetricStatisticsHistogram(
|
||||||
|
ctx context.Context,
|
||||||
|
metrics []string,
|
||||||
|
filters []*model.JobFilter) []*model.MetricHistoPoints {
|
||||||
|
|
||||||
|
// Get Jobs
|
||||||
|
jobs, err := r.QueryJobs(ctx, filters, &model.PageRequest{Page: 1, ItemsPerPage: 500 + 1}, nil)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("Error while querying jobs for footprint: %s", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if len(jobs) > 500 {
|
||||||
|
log.Errorf("too many jobs matched (max: %d)", 500)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get AVGs from metric repo
|
||||||
|
avgs := make([][]schema.Float, len(metrics))
|
||||||
|
for i := range avgs {
|
||||||
|
avgs[i] = make([]schema.Float, 0, len(jobs))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, job := range jobs {
|
||||||
|
if job.MonitoringStatus == schema.MonitoringStatusDisabled || job.MonitoringStatus == schema.MonitoringStatusArchivingFailed {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := metricdata.LoadAverages(job, metrics, avgs, ctx); err != nil {
|
||||||
|
log.Errorf("Error while loading averages for histogram: %s", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iterate metrics to fill endresult
|
||||||
|
data := make([]*model.MetricHistoPoints, 0)
|
||||||
|
for idx, metric := range metrics {
|
||||||
|
// Get specific Peak or largest Peak
|
||||||
|
var metricConfig *schema.MetricConfig
|
||||||
|
var peak float64 = 0.0
|
||||||
|
var unit string = ""
|
||||||
|
|
||||||
|
for _, f := range filters {
|
||||||
|
if f.Cluster != nil {
|
||||||
|
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
||||||
|
peak = metricConfig.Peak
|
||||||
|
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
|
||||||
|
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if peak == 0.0 {
|
||||||
|
for _, c := range archive.Clusters {
|
||||||
|
for _, m := range c.MetricConfig {
|
||||||
|
if m.Name == metric {
|
||||||
|
if m.Peak > peak {
|
||||||
|
peak = m.Peak
|
||||||
|
}
|
||||||
|
if unit == "" {
|
||||||
|
unit = m.Unit.Prefix + m.Unit.Base
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make and fill bins
|
||||||
|
bins := 10.0
|
||||||
|
peakBin := peak / bins
|
||||||
|
|
||||||
|
points := make([]*model.MetricHistoPoint, 0)
|
||||||
|
for b := 0; b < 10; b++ {
|
||||||
|
count := 0
|
||||||
|
bindex := b + 1
|
||||||
|
bmin := math.Round(peakBin * float64(b))
|
||||||
|
bmax := math.Round(peakBin * (float64(b) + 1.0))
|
||||||
|
|
||||||
|
// Iterate AVG values for indexed metric and count for bins
|
||||||
|
for _, val := range avgs[idx] {
|
||||||
|
if float64(val) >= bmin && float64(val) < bmax {
|
||||||
|
count += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bminint := int(bmin)
|
||||||
|
bmaxint := int(bmax)
|
||||||
|
|
||||||
|
// Append Bin to Metric Result Array
|
||||||
|
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bminint, Max: &bmaxint}
|
||||||
|
points = append(points, &point)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append Metric Result Array to final results array
|
||||||
|
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Data: points}
|
||||||
|
data = append(data, &result)
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
@ -8,8 +8,8 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
|
||||||
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
"github.com/ClusterCockpit/cc-backend/pkg/log"
|
||||||
|
"github.com/ClusterCockpit/cc-backend/pkg/schema"
|
||||||
)
|
)
|
||||||
|
|
||||||
var Clusters []*schema.Cluster
|
var Clusters []*schema.Cluster
|
||||||
|
18
web/frontend/package-lock.json
generated
18
web/frontend/package-lock.json
generated
@ -369,12 +369,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/chart.js": {
|
"node_modules/chart.js": {
|
||||||
"version": "4.4.0",
|
"version": "4.4.1",
|
||||||
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.1.tgz",
|
||||||
"integrity": "sha512-vQEj6d+z0dcsKLlQvbKIMYFHd3t8W/7L2vfJIbYcfyPcRx92CsHqECpueN8qVGNlKyDcr5wBrYAYKnfu/9Q1hQ==",
|
"integrity": "sha512-C74QN1bxwV1v2PEujhmKjOZ7iUM4w6BWs23Md/6aOZZSlwMzeCIDGuZay++rBgChYru7/+QFeoQW0fQoP534Dg==",
|
||||||
"version": "4.4.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.0.tgz",
|
|
||||||
"integrity": "sha512-vQEj6d+z0dcsKLlQvbKIMYFHd3t8W/7L2vfJIbYcfyPcRx92CsHqECpueN8qVGNlKyDcr5wBrYAYKnfu/9Q1hQ==",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@kurkle/color": "^0.3.0"
|
"@kurkle/color": "^0.3.0"
|
||||||
},
|
},
|
||||||
@ -903,12 +900,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/terser": {
|
"node_modules/terser": {
|
||||||
"version": "5.24.0",
|
"version": "5.25.0",
|
||||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.24.0.tgz",
|
"resolved": "https://registry.npmjs.org/terser/-/terser-5.25.0.tgz",
|
||||||
"integrity": "sha512-ZpGR4Hy3+wBEzVEnHvstMvqpD/nABNelQn/z2r0fjVWGQsN3bpOLzQlqDxmb4CDZnXq5lpjnQ+mHQLAOpfM5iw==",
|
"integrity": "sha512-we0I9SIsfvNUMP77zC9HG+MylwYYsGFSBG8qm+13oud2Yh+O104y614FRbyjpxys16jZwot72Fpi827YvGzuqg==",
|
||||||
"version": "5.24.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.24.0.tgz",
|
|
||||||
"integrity": "sha512-ZpGR4Hy3+wBEzVEnHvstMvqpD/nABNelQn/z2r0fjVWGQsN3bpOLzQlqDxmb4CDZnXq5lpjnQ+mHQLAOpfM5iw==",
|
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@jridgewell/source-map": "^0.3.3",
|
"@jridgewell/source-map": "^0.3.3",
|
||||||
|
@ -389,9 +389,10 @@
|
|||||||
<Histogram
|
<Histogram
|
||||||
data={convert2uplot(item.bins)}
|
data={convert2uplot(item.bins)}
|
||||||
width={width} height={250}
|
width={width} height={250}
|
||||||
|
usesBins={true}
|
||||||
title="Average Distribution of '{item.metric}'"
|
title="Average Distribution of '{item.metric}'"
|
||||||
xlabel={`${item.metric} bin maximum [${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
xlabel={`${item.metric} bin maximum ${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? '[' + metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
||||||
(metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base : '')}]`}
|
(metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base + ']' : '')}`}
|
||||||
xunit={`${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
xunit={`${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') +
|
||||||
(metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base : '')}`}
|
(metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base : '')}`}
|
||||||
ylabel="Normalized Hours"
|
ylabel="Normalized Hours"
|
||||||
|
65
web/frontend/src/HistogramSelection.svelte
Normal file
65
web/frontend/src/HistogramSelection.svelte
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
<script>
|
||||||
|
import { Modal, ModalBody, ModalHeader, ModalFooter,
|
||||||
|
Button, ListGroup, ListGroupItem } from 'sveltestrap'
|
||||||
|
import { gql, getContextClient , mutationStore } from '@urql/svelte'
|
||||||
|
|
||||||
|
export let cluster
|
||||||
|
export let metricsInHistograms
|
||||||
|
export let isOpen
|
||||||
|
|
||||||
|
let availableMetrics = ['cpu_load', 'flops_any', 'mem_used', 'mem_bw', 'net_bw', 'file_bw']
|
||||||
|
let pendingMetrics = [...metricsInHistograms] // Copy
|
||||||
|
const client = getContextClient()
|
||||||
|
|
||||||
|
const updateConfigurationMutation = ({ name, value }) => {
|
||||||
|
return mutationStore({
|
||||||
|
client: client,
|
||||||
|
query: gql`mutation($name: String!, $value: String!) {
|
||||||
|
updateConfiguration(name: $name, value: $value)
|
||||||
|
}`,
|
||||||
|
variables: { name, value }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateConfiguration(data) {
|
||||||
|
updateConfigurationMutation({
|
||||||
|
name: data.name,
|
||||||
|
value: JSON.stringify(data.value)
|
||||||
|
}).subscribe(res => {
|
||||||
|
if (res.fetching === false && res.error) {
|
||||||
|
throw res.error
|
||||||
|
// console.log('Error on subscription: ' + res.error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeAndApply() {
|
||||||
|
metricsInHistograms = [...pendingMetrics] // Set for parent
|
||||||
|
isOpen = !isOpen
|
||||||
|
updateConfiguration({
|
||||||
|
name: cluster ? `user_view_histogramMetrics:${cluster}` : 'user_view_histogramMetrics',
|
||||||
|
value: metricsInHistograms
|
||||||
|
})
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<Modal {isOpen}
|
||||||
|
toggle={() => (isOpen = !isOpen)}>
|
||||||
|
<ModalHeader>
|
||||||
|
Select metrics presented in histograms
|
||||||
|
</ModalHeader>
|
||||||
|
<ModalBody>
|
||||||
|
<ListGroup>
|
||||||
|
{#each availableMetrics as metric (metric)}
|
||||||
|
<ListGroupItem>
|
||||||
|
<input type="checkbox" bind:group={pendingMetrics} value={metric}>
|
||||||
|
{metric}
|
||||||
|
</ListGroupItem>
|
||||||
|
{/each}
|
||||||
|
</ListGroup>
|
||||||
|
</ModalBody>
|
||||||
|
<ModalFooter>
|
||||||
|
<Button color="primary" on:click={closeAndApply}> Close & Apply </Button>
|
||||||
|
<Button color="secondary" on:click={() => (isOpen = !isOpen)}> Close </Button>
|
||||||
|
</ModalFooter>
|
||||||
|
</Modal>
|
@ -15,6 +15,7 @@
|
|||||||
Table,
|
Table,
|
||||||
Progress,
|
Progress,
|
||||||
Icon,
|
Icon,
|
||||||
|
Button
|
||||||
} from "sveltestrap";
|
} from "sveltestrap";
|
||||||
import { init, convert2uplot, transformPerNodeDataForRoofline } from "./utils.js";
|
import { init, convert2uplot, transformPerNodeDataForRoofline } from "./utils.js";
|
||||||
import { scaleNumbers } from "./units.js";
|
import { scaleNumbers } from "./units.js";
|
||||||
@ -24,6 +25,8 @@
|
|||||||
getContextClient,
|
getContextClient,
|
||||||
mutationStore,
|
mutationStore,
|
||||||
} from "@urql/svelte";
|
} from "@urql/svelte";
|
||||||
|
import PlotTable from './PlotTable.svelte'
|
||||||
|
import HistogramSelection from './HistogramSelection.svelte'
|
||||||
|
|
||||||
const { query: initq } = init();
|
const { query: initq } = init();
|
||||||
const ccconfig = getContext("cc-config");
|
const ccconfig = getContext("cc-config");
|
||||||
@ -63,6 +66,9 @@
|
|||||||
option.key == ccconfig.status_view_selectedTopUserCategory
|
option.key == ccconfig.status_view_selectedTopUserCategory
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let isHistogramSelectionOpen = false
|
||||||
|
$: metricsInHistograms = cluster ? ccconfig[`user_view_histogramMetrics:${cluster}`] : (ccconfig.user_view_histogramMetrics || [])
|
||||||
|
|
||||||
const client = getContextClient();
|
const client = getContextClient();
|
||||||
$: mainQuery = queryStore({
|
$: mainQuery = queryStore({
|
||||||
client: client,
|
client: client,
|
||||||
@ -73,6 +79,7 @@
|
|||||||
$metrics: [String!]
|
$metrics: [String!]
|
||||||
$from: Time!
|
$from: Time!
|
||||||
$to: Time!
|
$to: Time!
|
||||||
|
$metricsInHistograms: [String!]
|
||||||
) {
|
) {
|
||||||
nodeMetrics(
|
nodeMetrics(
|
||||||
cluster: $cluster
|
cluster: $cluster
|
||||||
@ -98,7 +105,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats: jobsStatistics(filter: $filter) {
|
stats: jobsStatistics(filter: $filter, metrics: $metricsInHistograms) {
|
||||||
histDuration {
|
histDuration {
|
||||||
count
|
count
|
||||||
value
|
value
|
||||||
@ -115,6 +122,16 @@
|
|||||||
count
|
count
|
||||||
value
|
value
|
||||||
}
|
}
|
||||||
|
histMetrics {
|
||||||
|
metric
|
||||||
|
unit
|
||||||
|
data {
|
||||||
|
min
|
||||||
|
max
|
||||||
|
count
|
||||||
|
bin
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
allocatedNodes(cluster: $cluster) {
|
allocatedNodes(cluster: $cluster) {
|
||||||
@ -129,6 +146,7 @@
|
|||||||
from: from.toISOString(),
|
from: from.toISOString(),
|
||||||
to: to.toISOString(),
|
to: to.toISOString(),
|
||||||
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
|
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],
|
||||||
|
metricsInHistograms: metricsInHistograms
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -311,7 +329,7 @@
|
|||||||
<Col xs="auto" style="align-self: flex-end;">
|
<Col xs="auto" style="align-self: flex-end;">
|
||||||
<h4 class="mb-0">Current utilization of cluster "{cluster}"</h4>
|
<h4 class="mb-0">Current utilization of cluster "{cluster}"</h4>
|
||||||
</Col>
|
</Col>
|
||||||
<Col xs="auto">
|
<Col xs="auto" style="margin-left: 0.25rem;">
|
||||||
{#if $initq.fetching || $mainQuery.fetching}
|
{#if $initq.fetching || $mainQuery.fetching}
|
||||||
<Spinner />
|
<Spinner />
|
||||||
{:else if $initq.error}
|
{:else if $initq.error}
|
||||||
@ -321,6 +339,13 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</Col>
|
</Col>
|
||||||
<Col xs="auto" style="margin-left: auto;">
|
<Col xs="auto" style="margin-left: auto;">
|
||||||
|
<Button
|
||||||
|
outline color="secondary"
|
||||||
|
on:click={() => (isHistogramSelectionOpen = true)}>
|
||||||
|
<Icon name="bar-chart-line"/> Select Histograms
|
||||||
|
</Button>
|
||||||
|
</Col>
|
||||||
|
<Col xs="auto" style="margin-left: 0.25rem;">
|
||||||
<Refresher
|
<Refresher
|
||||||
initially={120}
|
initially={120}
|
||||||
on:reload={() => {
|
on:reload={() => {
|
||||||
@ -666,4 +691,35 @@
|
|||||||
{/key}
|
{/key}
|
||||||
</Col>
|
</Col>
|
||||||
</Row>
|
</Row>
|
||||||
|
<hr class="my-2" />
|
||||||
|
{#if metricsInHistograms}
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
{#key $mainQuery.data.stats[0].histMetrics}
|
||||||
|
<PlotTable
|
||||||
|
let:item
|
||||||
|
let:width
|
||||||
|
renderFor="user"
|
||||||
|
items={$mainQuery.data.stats[0].histMetrics}
|
||||||
|
itemsPerRow={3}>
|
||||||
|
|
||||||
|
<Histogram
|
||||||
|
data={convert2uplot(item.data)}
|
||||||
|
usesBins={true}
|
||||||
|
width={width} height={250}
|
||||||
|
title="Distribution of '{item.metric}' averages"
|
||||||
|
xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`}
|
||||||
|
xunit={item.unit}
|
||||||
|
ylabel="Number of Jobs"
|
||||||
|
yunit="Jobs"/>
|
||||||
|
</PlotTable>
|
||||||
|
{/key}
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
<HistogramSelection
|
||||||
|
bind:cluster={cluster}
|
||||||
|
bind:metricsInHistograms={metricsInHistograms}
|
||||||
|
bind:isOpen={isHistogramSelectionOpen} />
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
<script>
|
<script>
|
||||||
import { onMount, getContext } from 'svelte'
|
import { onMount, getContext } from 'svelte'
|
||||||
import { init, convert2uplot } from './utils.js'
|
import { init, convert2uplot } from './utils.js'
|
||||||
import { Table, Row, Col, Button, Icon, Card, Spinner, Input } from 'sveltestrap'
|
import { Table, Row, Col, Button, Icon, Card, Spinner } from 'sveltestrap'
|
||||||
import { queryStore, gql, getContextClient } from '@urql/svelte'
|
import { queryStore, gql, getContextClient } from '@urql/svelte'
|
||||||
import Filters from './filters/Filters.svelte'
|
import Filters from './filters/Filters.svelte'
|
||||||
import JobList from './joblist/JobList.svelte'
|
import JobList from './joblist/JobList.svelte'
|
||||||
@ -9,6 +9,8 @@
|
|||||||
import Refresher from './joblist/Refresher.svelte'
|
import Refresher from './joblist/Refresher.svelte'
|
||||||
import Histogram from './plots/Histogram.svelte'
|
import Histogram from './plots/Histogram.svelte'
|
||||||
import MetricSelection from './MetricSelection.svelte'
|
import MetricSelection from './MetricSelection.svelte'
|
||||||
|
import HistogramSelection from './HistogramSelection.svelte'
|
||||||
|
import PlotTable from './PlotTable.svelte'
|
||||||
import { scramble, scrambleNames } from './joblist/JobInfo.svelte'
|
import { scramble, scrambleNames } from './joblist/JobInfo.svelte'
|
||||||
|
|
||||||
const { query: initq } = init()
|
const { query: initq } = init()
|
||||||
@ -23,26 +25,29 @@
|
|||||||
let jobFilters = [];
|
let jobFilters = [];
|
||||||
let sorting = { field: 'startTime', order: 'DESC' }, isSortingOpen = false
|
let sorting = { field: 'startTime', order: 'DESC' }, isSortingOpen = false
|
||||||
let metrics = ccconfig.plot_list_selectedMetrics, isMetricsSelectionOpen = false
|
let metrics = ccconfig.plot_list_selectedMetrics, isMetricsSelectionOpen = false
|
||||||
let w1, w2, histogramHeight = 250
|
let w1, w2, histogramHeight = 250, isHistogramSelectionOpen = false
|
||||||
let selectedCluster = filterPresets?.cluster ? filterPresets.cluster : null
|
let selectedCluster = filterPresets?.cluster ? filterPresets.cluster : null
|
||||||
let showFootprint = filterPresets.cluster
|
let showFootprint = filterPresets.cluster
|
||||||
? !!ccconfig[`plot_list_showFootprint:${filterPresets.cluster}`]
|
? !!ccconfig[`plot_list_showFootprint:${filterPresets.cluster}`]
|
||||||
: !!ccconfig.plot_list_showFootprint
|
: !!ccconfig.plot_list_showFootprint
|
||||||
|
|
||||||
|
$: metricsInHistograms = selectedCluster ? ccconfig[`user_view_histogramMetrics:${selectedCluster}`] : (ccconfig.user_view_histogramMetrics || [])
|
||||||
|
|
||||||
const client = getContextClient();
|
const client = getContextClient();
|
||||||
$: stats = queryStore({
|
$: stats = queryStore({
|
||||||
client: client,
|
client: client,
|
||||||
query: gql`
|
query: gql`
|
||||||
query($jobFilters: [JobFilter!]!) {
|
query($jobFilters: [JobFilter!]!, $metricsInHistograms: [String!]) {
|
||||||
jobsStatistics(filter: $jobFilters) {
|
jobsStatistics(filter: $jobFilters, metrics: $metricsInHistograms) {
|
||||||
totalJobs
|
totalJobs
|
||||||
shortJobs
|
shortJobs
|
||||||
totalWalltime
|
totalWalltime
|
||||||
totalCoreHours
|
totalCoreHours
|
||||||
histDuration { count, value }
|
histDuration { count, value }
|
||||||
histNumNodes { count, value }
|
histNumNodes { count, value }
|
||||||
|
histMetrics { metric, unit, data { min, max, count, bin } }
|
||||||
}}`,
|
}}`,
|
||||||
variables: { jobFilters }
|
variables: { jobFilters, metricsInHistograms }
|
||||||
})
|
})
|
||||||
|
|
||||||
onMount(() => filterComponent.update())
|
onMount(() => filterComponent.update())
|
||||||
@ -71,6 +76,12 @@
|
|||||||
on:click={() => (isMetricsSelectionOpen = true)}>
|
on:click={() => (isMetricsSelectionOpen = true)}>
|
||||||
<Icon name="graph-up"/> Metrics
|
<Icon name="graph-up"/> Metrics
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
outline color="secondary"
|
||||||
|
on:click={() => (isHistogramSelectionOpen = true)}>
|
||||||
|
<Icon name="bar-chart-line"/> Select Histograms
|
||||||
|
</Button>
|
||||||
</Col>
|
</Col>
|
||||||
<Col xs="auto">
|
<Col xs="auto">
|
||||||
<Filters
|
<Filters
|
||||||
@ -162,6 +173,41 @@
|
|||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
</Row>
|
</Row>
|
||||||
|
{#if metricsInHistograms}
|
||||||
|
<Row>
|
||||||
|
{#if $stats.error}
|
||||||
|
<Col>
|
||||||
|
<Card body color="danger">{$stats.error.message}</Card>
|
||||||
|
</Col>
|
||||||
|
{:else if !$stats.data}
|
||||||
|
<Col>
|
||||||
|
<Spinner secondary />
|
||||||
|
</Col>
|
||||||
|
{:else}
|
||||||
|
<Col>
|
||||||
|
{#key $stats.data.jobsStatistics[0].histMetrics}
|
||||||
|
<PlotTable
|
||||||
|
let:item
|
||||||
|
let:width
|
||||||
|
renderFor="user"
|
||||||
|
items={$stats.data.jobsStatistics[0].histMetrics}
|
||||||
|
itemsPerRow={3}>
|
||||||
|
|
||||||
|
<Histogram
|
||||||
|
data={convert2uplot(item.data)}
|
||||||
|
usesBins={true}
|
||||||
|
width={width} height={250}
|
||||||
|
title="Distribution of '{item.metric}' averages"
|
||||||
|
xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`}
|
||||||
|
xunit={item.unit}
|
||||||
|
ylabel="Number of Jobs"
|
||||||
|
yunit="Jobs"/>
|
||||||
|
</PlotTable>
|
||||||
|
{/key}
|
||||||
|
</Col>
|
||||||
|
{/if}
|
||||||
|
</Row>
|
||||||
|
{/if}
|
||||||
<br/>
|
<br/>
|
||||||
<Row>
|
<Row>
|
||||||
<Col>
|
<Col>
|
||||||
@ -183,4 +229,9 @@
|
|||||||
bind:metrics={metrics}
|
bind:metrics={metrics}
|
||||||
bind:isOpen={isMetricsSelectionOpen}
|
bind:isOpen={isMetricsSelectionOpen}
|
||||||
bind:showFootprint={showFootprint}
|
bind:showFootprint={showFootprint}
|
||||||
view='list'/>
|
view='list'/>
|
||||||
|
|
||||||
|
<HistogramSelection
|
||||||
|
bind:cluster={selectedCluster}
|
||||||
|
bind:metricsInHistograms={metricsInHistograms}
|
||||||
|
bind:isOpen={isHistogramSelectionOpen} />
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
import { Card } from 'sveltestrap'
|
import { Card } from 'sveltestrap'
|
||||||
|
|
||||||
export let data
|
export let data
|
||||||
|
export let usesBins = false
|
||||||
export let width = 500
|
export let width = 500
|
||||||
export let height = 300
|
export let height = 300
|
||||||
export let title = ''
|
export let title = ''
|
||||||
@ -160,6 +161,14 @@
|
|||||||
series: [
|
series: [
|
||||||
{
|
{
|
||||||
label: xunit !== '' ? xunit : null,
|
label: xunit !== '' ? xunit : null,
|
||||||
|
value: (u, ts, sidx, didx) => {
|
||||||
|
if (usesBins) {
|
||||||
|
const min = u.data[sidx][didx - 1] ? u.data[sidx][didx - 1] : 0
|
||||||
|
const max = u.data[sidx][didx]
|
||||||
|
ts = min + ' - ' + max // narrow spaces
|
||||||
|
}
|
||||||
|
return ts
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Object.assign({
|
Object.assign({
|
||||||
label: yunit !== '' ? yunit : null,
|
label: yunit !== '' ? yunit : null,
|
||||||
|
@ -316,11 +316,17 @@ export function checkMetricDisabled(m, c, s) { //[m]etric, [c]luster, [s]ubclust
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function convert2uplot(canvasData) {
|
export function convert2uplot(canvasData) {
|
||||||
// initial use: Canvas Histogram Data to Uplot
|
// Prep: Uplot Data Structure
|
||||||
let uplotData = [[],[]] // [X, Y1, Y2, ...]
|
let uplotData = [[],[]] // [X, Y1, Y2, ...]
|
||||||
canvasData.forEach( pair => {
|
// Iterate
|
||||||
uplotData[0].push(pair.value)
|
canvasData.forEach( cd => {
|
||||||
uplotData[1].push(pair.count)
|
if (Object.keys(cd).length == 4) { // MetricHisto Datafromat
|
||||||
|
uplotData[0].push(cd?.max ? cd.max : 0)
|
||||||
|
uplotData[1].push(cd.count)
|
||||||
|
} else { // Default
|
||||||
|
uplotData[0].push(cd.value)
|
||||||
|
uplotData[1].push(cd.count)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
return uplotData
|
return uplotData
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user