mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2024-12-26 05:19:05 +01:00
Finished backend sql query and gql resolve
This commit is contained in:
parent
9bc36152d9
commit
b5b355c16c
@ -288,7 +288,14 @@ type HistoPoint {
|
|||||||
|
|
||||||
type MetricHistoPoints {
|
type MetricHistoPoints {
|
||||||
metric: String!
|
metric: String!
|
||||||
data: [HistoPoint!]
|
data: [MetricHistoPoint!]
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoint {
|
||||||
|
min: Int!
|
||||||
|
max: Int!
|
||||||
|
count: Int!
|
||||||
|
bin: Int!
|
||||||
}
|
}
|
||||||
|
|
||||||
type JobsStatistics {
|
type JobsStatistics {
|
||||||
|
@ -179,6 +179,13 @@ type ComplexityRoot struct {
|
|||||||
Metric func(childComplexity int) int
|
Metric func(childComplexity int) int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
MetricHistoPoint struct {
|
||||||
|
Bin func(childComplexity int) int
|
||||||
|
Count func(childComplexity int) int
|
||||||
|
Max func(childComplexity int) int
|
||||||
|
Min func(childComplexity int) int
|
||||||
|
}
|
||||||
|
|
||||||
MetricHistoPoints struct {
|
MetricHistoPoints struct {
|
||||||
Data func(childComplexity int) int
|
Data func(childComplexity int) int
|
||||||
Metric func(childComplexity int) int
|
Metric func(childComplexity int) int
|
||||||
@ -938,6 +945,34 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
|
|
||||||
return e.complexity.MetricFootprints.Metric(childComplexity), true
|
return e.complexity.MetricFootprints.Metric(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.bin":
|
||||||
|
if e.complexity.MetricHistoPoint.Bin == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Bin(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.count":
|
||||||
|
if e.complexity.MetricHistoPoint.Count == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Count(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.max":
|
||||||
|
if e.complexity.MetricHistoPoint.Max == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Max(childComplexity), true
|
||||||
|
|
||||||
|
case "MetricHistoPoint.min":
|
||||||
|
if e.complexity.MetricHistoPoint.Min == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.MetricHistoPoint.Min(childComplexity), true
|
||||||
|
|
||||||
case "MetricHistoPoints.data":
|
case "MetricHistoPoints.data":
|
||||||
if e.complexity.MetricHistoPoints.Data == nil {
|
if e.complexity.MetricHistoPoints.Data == nil {
|
||||||
break
|
break
|
||||||
@ -1921,7 +1956,14 @@ type HistoPoint {
|
|||||||
|
|
||||||
type MetricHistoPoints {
|
type MetricHistoPoints {
|
||||||
metric: String!
|
metric: String!
|
||||||
data: [HistoPoint!]
|
data: [MetricHistoPoint!]
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoint {
|
||||||
|
min: Int!
|
||||||
|
max: Int!
|
||||||
|
count: Int!
|
||||||
|
bin: Int!
|
||||||
}
|
}
|
||||||
|
|
||||||
type JobsStatistics {
|
type JobsStatistics {
|
||||||
@ -1941,7 +1983,7 @@ type JobsStatistics {
|
|||||||
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
|
histNumNodes: [HistoPoint!]! # value: number of nodes, count: number of jobs with that number of nodes
|
||||||
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
|
histNumCores: [HistoPoint!]! # value: number of cores, count: number of jobs with that number of cores
|
||||||
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
|
histNumAccs: [HistoPoint!]! # value: number of accs, count: number of jobs with that number of accs
|
||||||
histMetrics: [MetricHistoPoints!]! # value: metric average bin, count: number of jobs with that metric average
|
histMetrics: [MetricHistoPoints!]! # metric: metricname, data array of histopoints: value: metric average bin, count: number of jobs with that metric average
|
||||||
}
|
}
|
||||||
|
|
||||||
input PageRequest {
|
input PageRequest {
|
||||||
@ -6283,6 +6325,182 @@ func (ec *executionContext) fieldContext_MetricFootprints_data(ctx context.Conte
|
|||||||
return fc, nil
|
return fc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_min(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_min(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Min, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNInt2int(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_min(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_max(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_max(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Max, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNInt2int(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_max(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_count(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_count(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Count, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNInt2int(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_count(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint_bin(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoint) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_MetricHistoPoint_bin(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Bin, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(int)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNInt2int(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_MetricHistoPoint_bin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "MetricHistoPoint",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Int does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) _MetricHistoPoints_metric(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
|
func (ec *executionContext) _MetricHistoPoints_metric(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
|
||||||
fc, err := ec.fieldContext_MetricHistoPoints_metric(ctx, field)
|
fc, err := ec.fieldContext_MetricHistoPoints_metric(ctx, field)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -6350,9 +6568,9 @@ func (ec *executionContext) _MetricHistoPoints_data(ctx context.Context, field g
|
|||||||
if resTmp == nil {
|
if resTmp == nil {
|
||||||
return graphql.Null
|
return graphql.Null
|
||||||
}
|
}
|
||||||
res := resTmp.([]*model.HistoPoint)
|
res := resTmp.([]*model.MetricHistoPoint)
|
||||||
fc.Result = res
|
fc.Result = res
|
||||||
return ec.marshalOHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐHistoPointᚄ(ctx, field.Selections, res)
|
return ec.marshalOMetricHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointᚄ(ctx, field.Selections, res)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ec *executionContext) fieldContext_MetricHistoPoints_data(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
func (ec *executionContext) fieldContext_MetricHistoPoints_data(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
@ -6363,12 +6581,16 @@ func (ec *executionContext) fieldContext_MetricHistoPoints_data(ctx context.Cont
|
|||||||
IsResolver: false,
|
IsResolver: false,
|
||||||
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
switch field.Name {
|
switch field.Name {
|
||||||
|
case "min":
|
||||||
|
return ec.fieldContext_MetricHistoPoint_min(ctx, field)
|
||||||
|
case "max":
|
||||||
|
return ec.fieldContext_MetricHistoPoint_max(ctx, field)
|
||||||
case "count":
|
case "count":
|
||||||
return ec.fieldContext_HistoPoint_count(ctx, field)
|
return ec.fieldContext_MetricHistoPoint_count(ctx, field)
|
||||||
case "value":
|
case "bin":
|
||||||
return ec.fieldContext_HistoPoint_value(ctx, field)
|
return ec.fieldContext_MetricHistoPoint_bin(ctx, field)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("no field named %q was found under type HistoPoint", field.Name)
|
return nil, fmt.Errorf("no field named %q was found under type MetricHistoPoint", field.Name)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
return fc, nil
|
return fc, nil
|
||||||
@ -13254,6 +13476,60 @@ func (ec *executionContext) _MetricFootprints(ctx context.Context, sel ast.Selec
|
|||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var metricHistoPointImplementors = []string{"MetricHistoPoint"}
|
||||||
|
|
||||||
|
func (ec *executionContext) _MetricHistoPoint(ctx context.Context, sel ast.SelectionSet, obj *model.MetricHistoPoint) graphql.Marshaler {
|
||||||
|
fields := graphql.CollectFields(ec.OperationContext, sel, metricHistoPointImplementors)
|
||||||
|
|
||||||
|
out := graphql.NewFieldSet(fields)
|
||||||
|
deferred := make(map[string]*graphql.FieldSet)
|
||||||
|
for i, field := range fields {
|
||||||
|
switch field.Name {
|
||||||
|
case "__typename":
|
||||||
|
out.Values[i] = graphql.MarshalString("MetricHistoPoint")
|
||||||
|
case "min":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_min(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
|
case "max":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_max(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
|
case "count":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_count(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
|
case "bin":
|
||||||
|
out.Values[i] = ec._MetricHistoPoint_bin(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
out.Invalids++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
panic("unknown field " + strconv.Quote(field.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.Dispatch(ctx)
|
||||||
|
if out.Invalids > 0 {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
|
||||||
|
atomic.AddInt32(&ec.deferred, int32(len(deferred)))
|
||||||
|
|
||||||
|
for label, dfs := range deferred {
|
||||||
|
ec.processDeferredGroup(graphql.DeferredGroup{
|
||||||
|
Label: label,
|
||||||
|
Path: graphql.GetPath(ctx),
|
||||||
|
FieldSet: dfs,
|
||||||
|
Context: ctx,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
var metricHistoPointsImplementors = []string{"MetricHistoPoints"}
|
var metricHistoPointsImplementors = []string{"MetricHistoPoints"}
|
||||||
|
|
||||||
func (ec *executionContext) _MetricHistoPoints(ctx context.Context, sel ast.SelectionSet, obj *model.MetricHistoPoints) graphql.Marshaler {
|
func (ec *executionContext) _MetricHistoPoints(ctx context.Context, sel ast.SelectionSet, obj *model.MetricHistoPoints) graphql.Marshaler {
|
||||||
@ -15547,6 +15823,16 @@ func (ec *executionContext) marshalNMetricFootprints2ᚖgithubᚗcomᚋClusterCo
|
|||||||
return ec._MetricFootprints(ctx, sel, v)
|
return ec._MetricFootprints(ctx, sel, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalNMetricHistoPoint2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPoint(ctx context.Context, sel ast.SelectionSet, v *model.MetricHistoPoint) graphql.Marshaler {
|
||||||
|
if v == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
|
||||||
|
ec.Errorf(ctx, "the requested element is null which the schema does not allow")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
return ec._MetricHistoPoint(ctx, sel, v)
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) marshalNMetricHistoPoints2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointsᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.MetricHistoPoints) graphql.Marshaler {
|
func (ec *executionContext) marshalNMetricHistoPoints2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointsᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.MetricHistoPoints) graphql.Marshaler {
|
||||||
ret := make(graphql.Array, len(v))
|
ret := make(graphql.Array, len(v))
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
@ -16408,53 +16694,6 @@ func (ec *executionContext) marshalOFootprints2ᚖgithubᚗcomᚋClusterCockpit
|
|||||||
return ec._Footprints(ctx, sel, v)
|
return ec._Footprints(ctx, sel, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ec *executionContext) marshalOHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐHistoPointᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.HistoPoint) graphql.Marshaler {
|
|
||||||
if v == nil {
|
|
||||||
return graphql.Null
|
|
||||||
}
|
|
||||||
ret := make(graphql.Array, len(v))
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
isLen1 := len(v) == 1
|
|
||||||
if !isLen1 {
|
|
||||||
wg.Add(len(v))
|
|
||||||
}
|
|
||||||
for i := range v {
|
|
||||||
i := i
|
|
||||||
fc := &graphql.FieldContext{
|
|
||||||
Index: &i,
|
|
||||||
Result: &v[i],
|
|
||||||
}
|
|
||||||
ctx := graphql.WithFieldContext(ctx, fc)
|
|
||||||
f := func(i int) {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
ec.Error(ctx, ec.Recover(ctx, r))
|
|
||||||
ret = nil
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
if !isLen1 {
|
|
||||||
defer wg.Done()
|
|
||||||
}
|
|
||||||
ret[i] = ec.marshalNHistoPoint2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐHistoPoint(ctx, sel, v[i])
|
|
||||||
}
|
|
||||||
if isLen1 {
|
|
||||||
f(i)
|
|
||||||
} else {
|
|
||||||
go f(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
wg.Wait()
|
|
||||||
|
|
||||||
for _, e := range ret {
|
|
||||||
if e == graphql.Null {
|
|
||||||
return graphql.Null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalOID2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) {
|
func (ec *executionContext) unmarshalOID2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
@ -16703,6 +16942,53 @@ func (ec *executionContext) marshalOJobState2ᚕgithubᚗcomᚋClusterCockpitᚋ
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) marshalOMetricHistoPoint2ᚕᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPointᚄ(ctx context.Context, sel ast.SelectionSet, v []*model.MetricHistoPoint) graphql.Marshaler {
|
||||||
|
if v == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ret := make(graphql.Array, len(v))
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
isLen1 := len(v) == 1
|
||||||
|
if !isLen1 {
|
||||||
|
wg.Add(len(v))
|
||||||
|
}
|
||||||
|
for i := range v {
|
||||||
|
i := i
|
||||||
|
fc := &graphql.FieldContext{
|
||||||
|
Index: &i,
|
||||||
|
Result: &v[i],
|
||||||
|
}
|
||||||
|
ctx := graphql.WithFieldContext(ctx, fc)
|
||||||
|
f := func(i int) {
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
if !isLen1 {
|
||||||
|
defer wg.Done()
|
||||||
|
}
|
||||||
|
ret[i] = ec.marshalNMetricHistoPoint2ᚖgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐMetricHistoPoint(ctx, sel, v[i])
|
||||||
|
}
|
||||||
|
if isLen1 {
|
||||||
|
f(i)
|
||||||
|
} else {
|
||||||
|
go f(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
for _, e := range ret {
|
||||||
|
if e == graphql.Null {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalOMetricScope2ᚕgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋpkgᚋschemaᚐMetricScopeᚄ(ctx context.Context, v interface{}) ([]schema.MetricScope, error) {
|
func (ec *executionContext) unmarshalOMetricScope2ᚕgithubᚗcomᚋClusterCockpitᚋccᚑbackendᚋpkgᚋschemaᚐMetricScopeᚄ(ctx context.Context, v interface{}) ([]schema.MetricScope, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -109,9 +109,16 @@ type MetricFootprints struct {
|
|||||||
Data []schema.Float `json:"data"`
|
Data []schema.Float `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type MetricHistoPoint struct {
|
||||||
|
Min int `json:"min"`
|
||||||
|
Max int `json:"max"`
|
||||||
|
Count int `json:"count"`
|
||||||
|
Bin int `json:"bin"`
|
||||||
|
}
|
||||||
|
|
||||||
type MetricHistoPoints struct {
|
type MetricHistoPoints struct {
|
||||||
Metric string `json:"metric"`
|
Metric string `json:"metric"`
|
||||||
Data []*HistoPoint `json:"data,omitempty"`
|
Data []*MetricHistoPoint `json:"data,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type NodeMetrics struct {
|
type NodeMetrics struct {
|
||||||
|
@ -521,65 +521,89 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
|
|||||||
metric string,
|
metric string,
|
||||||
filters []*model.JobFilter) (*model.MetricHistoPoints, error) {
|
filters []*model.JobFilter) (*model.MetricHistoPoints, error) {
|
||||||
|
|
||||||
// "job.load_avg as value"
|
var dbMetric string
|
||||||
|
switch metric {
|
||||||
// switch m {
|
case "cpu_load":
|
||||||
// case "cpu_load":
|
dbMetric = "load_avg"
|
||||||
|
case "flops_any":
|
||||||
|
dbMetric = "flops_any_avg"
|
||||||
|
case "mem_bw":
|
||||||
|
dbMetric = "mem_bw_avg"
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%s not implemented", metric)
|
||||||
|
}
|
||||||
|
|
||||||
// Get specific Peak or largest Peak
|
// Get specific Peak or largest Peak
|
||||||
var metricConfig *schema.MetricConfig
|
var metricConfig *schema.MetricConfig
|
||||||
var peak float64 = 0.0
|
var peak float64 = 0.0
|
||||||
|
|
||||||
for _, f := range filters {
|
for _, f := range filters {
|
||||||
if f.Cluster != nil {
|
if f.Cluster != nil {
|
||||||
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
||||||
peak = metricConfig.Peak
|
peak = metricConfig.Peak
|
||||||
} else {
|
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
|
||||||
for _, c := range archive.Clusters {
|
}
|
||||||
for _, m := range c.MetricConfig {
|
}
|
||||||
if m.Name == metric {
|
|
||||||
if m.Peak > peak {
|
if peak == 0.0 {
|
||||||
peak = m.Peak
|
for _, c := range archive.Clusters {
|
||||||
}
|
for _, m := range c.MetricConfig {
|
||||||
|
if m.Name == metric {
|
||||||
|
if m.Peak > peak {
|
||||||
|
peak = m.Peak
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make bins
|
// Make bins, see https://jereze.com/code/sql-histogram/
|
||||||
|
// Diffs:
|
||||||
|
// CAST(X AS INTEGER) instead of floor(X), used also for for Min , Max selection
|
||||||
|
// renamed to bin for simplicity and model struct
|
||||||
|
// Ditched rename from job to data, as it conflicts with security check afterwards
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
query, qerr := SecurityCheck(ctx,
|
prepQuery := sq.Select(
|
||||||
sq.Select(value, "COUNT(job.id) AS count").From("job"))
|
fmt.Sprintf(`CAST(min(job.%s) as INTEGER) as min`, dbMetric),
|
||||||
|
fmt.Sprintf(`CAST(max(job.%s) as INTEGER) as max`, dbMetric),
|
||||||
|
fmt.Sprintf(`count(job.%s) as count`, dbMetric),
|
||||||
|
fmt.Sprintf(`CAST((case when job.%s = value.max then value.max*0.999999999 else job.%s end - value.min) / (value.max - value.min) * 10 as INTEGER) +1 as bin`, dbMetric, dbMetric))
|
||||||
|
prepQuery = prepQuery.From("job")
|
||||||
|
prepQuery = prepQuery.CrossJoin(fmt.Sprintf(`(select max(%s) as max, min(%s) as min from job where %s is not null and %s < %f) as value`, dbMetric, dbMetric, dbMetric, dbMetric, peak))
|
||||||
|
prepQuery = prepQuery.Where(fmt.Sprintf(`job.%s is not null and job.%s < %f`, dbMetric, dbMetric, peak))
|
||||||
|
|
||||||
|
query, qerr := SecurityCheck(ctx, prepQuery)
|
||||||
|
|
||||||
if qerr != nil {
|
if qerr != nil {
|
||||||
return nil, qerr
|
return nil, qerr
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range filters {
|
for _, f := range filters {
|
||||||
if f.Cluster != nil {
|
|
||||||
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
|
||||||
peak = metricConfig.Peak
|
|
||||||
}
|
|
||||||
query = BuildWhereClause(f, query)
|
query = BuildWhereClause(f, query)
|
||||||
}
|
}
|
||||||
|
|
||||||
rows, err := query.GroupBy("value").RunWith(r.DB).Query()
|
// Finalize query with Grouping and Ordering
|
||||||
|
query = query.GroupBy("bin").OrderBy("bin")
|
||||||
|
|
||||||
|
rows, err := query.RunWith(r.DB).Query()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Error while running query")
|
log.Errorf("Error while running query: %s", err)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
points := make([]*model.HistoPoint, 0)
|
points := make([]*model.MetricHistoPoint, 0)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
point := model.HistoPoint{}
|
point := model.MetricHistoPoint{}
|
||||||
if err := rows.Scan(&point.Value, &point.Count); err != nil {
|
if err := rows.Scan(&point.Min, &point.Max, &point.Count, &point.Bin); err != nil {
|
||||||
log.Warn("Error while scanning rows")
|
log.Warn("Error while scanning rows")
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
points = append(points, &point)
|
points = append(points, &point)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
result := model.MetricHistoPoints{Metric: metric, Data: points}
|
||||||
|
|
||||||
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
|
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
|
||||||
return points, nil
|
return &result, nil
|
||||||
}
|
}
|
||||||
|
@ -28,13 +28,13 @@
|
|||||||
let metrics = ccconfig.plot_list_selectedMetrics, isMetricsSelectionOpen = false
|
let metrics = ccconfig.plot_list_selectedMetrics, isMetricsSelectionOpen = false
|
||||||
let w1, w2, histogramHeight = 250
|
let w1, w2, histogramHeight = 250
|
||||||
let selectedCluster = filterPresets?.cluster ? filterPresets.cluster : null
|
let selectedCluster = filterPresets?.cluster ? filterPresets.cluster : null
|
||||||
let metricsInHistograms = ccconfig[`user_view_histogramMetrics:${cluster}`] || ccconfig.user_view_histogramMetrics
|
let metricsInHistograms = ccconfig[`user_view_histogramMetrics:${selectedCluster}`] || ccconfig.user_view_histogramMetrics || []
|
||||||
|
|
||||||
const client = getContextClient();
|
const client = getContextClient();
|
||||||
$: stats = queryStore({
|
$: stats = queryStore({
|
||||||
client: client,
|
client: client,
|
||||||
query: gql`
|
query: gql`
|
||||||
query($jobFilters: [JobFilter!]!, $metricsInHistograms: [String!]!) {
|
query($jobFilters: [JobFilter!]!, $metricsInHistograms: [String!]) {
|
||||||
jobsStatistics(filter: $jobFilters, metrics: $metricsInHistograms) {
|
jobsStatistics(filter: $jobFilters, metrics: $metricsInHistograms) {
|
||||||
totalJobs
|
totalJobs
|
||||||
shortJobs
|
shortJobs
|
||||||
@ -42,7 +42,7 @@
|
|||||||
totalCoreHours
|
totalCoreHours
|
||||||
histDuration { count, value }
|
histDuration { count, value }
|
||||||
histNumNodes { count, value }
|
histNumNodes { count, value }
|
||||||
histMetrics { metric, data { count, value } }
|
histMetrics { metric, data { min, max, count, bin } }
|
||||||
}}`,
|
}}`,
|
||||||
variables: { jobFilters, metricsInHistograms}
|
variables: { jobFilters, metricsInHistograms}
|
||||||
})
|
})
|
||||||
@ -169,29 +169,43 @@
|
|||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
</Row>
|
</Row>
|
||||||
<Row>
|
{#if metricsInHistograms}
|
||||||
<Col>
|
<Row>
|
||||||
<PlotTable
|
{#if $stats.error}
|
||||||
let:item
|
<Col>
|
||||||
let:width
|
<Card body color="danger">{$stats.error.message}</Card>
|
||||||
renderFor="analysis"
|
</Col>
|
||||||
items={$stats.data.jobsStatistics[0].hostMetrics}>
|
{:else if !$stats.data}
|
||||||
|
<Col>
|
||||||
|
<Spinner secondary />
|
||||||
|
</Col>
|
||||||
|
{:else}
|
||||||
|
<Col>
|
||||||
|
{#key $stats.data.jobsStatistics[0].histMetrics}
|
||||||
|
<PlotTable
|
||||||
|
let:item
|
||||||
|
let:width
|
||||||
|
renderFor="analysis"
|
||||||
|
items={$stats.data.jobsStatistics[0].histMetrics}>
|
||||||
|
|
||||||
{item}
|
{item}
|
||||||
|
|
||||||
<!-- <Histogram
|
<!-- <Histogram
|
||||||
data={convert2uplot(item.bins)}
|
data={convert2uplot(item.bins)}
|
||||||
width={width} height={250}
|
width={width} height={250}
|
||||||
title="Average Distribution of '{item.metric}'"
|
title="Average Distribution of '{item.metric}'"
|
||||||
xlabel={`${item.metric} bin maximum [${(metricConfig(selectedCluster, item.metric)?.unit?.prefix ? metricConfig(selectedCluster, item.metric)?.unit?.prefix : '') +
|
xlabel={`${item.metric} bin maximum [${(metricConfig(selectedCluster, item.metric)?.unit?.prefix ? metricConfig(selectedCluster, item.metric)?.unit?.prefix : '') +
|
||||||
(metricConfig(selectedCluster, item.metric)?.unit?.base ? metricConfig(selectedCluster, item.metric)?.unit?.base : '')}]`}
|
(metricConfig(selectedCluster, item.metric)?.unit?.base ? metricConfig(selectedCluster, item.metric)?.unit?.base : '')}]`}
|
||||||
xunit={`${(metricConfig(selectedCluster, item.metric)?.unit?.prefix ? metricConfig(selectedCluster, item.metric)?.unit?.prefix : '') +
|
xunit={`${(metricConfig(selectedCluster, item.metric)?.unit?.prefix ? metricConfig(selectedCluster, item.metric)?.unit?.prefix : '') +
|
||||||
(metricConfig(selectedCluster, item.metric)?.unit?.base ? metricConfig(selectedCluster, item.metric)?.unit?.base : '')}`}
|
(metricConfig(selectedCluster, item.metric)?.unit?.base ? metricConfig(selectedCluster, item.metric)?.unit?.base : '')}`}
|
||||||
ylabel="Normalized Hours"
|
ylabel="Normalized Hours"
|
||||||
yunit="Hours"/> -->
|
yunit="Hours"/> -->
|
||||||
</PlotTable>
|
</PlotTable>
|
||||||
</Col>
|
{/key}
|
||||||
</Row>
|
</Col>
|
||||||
|
{/if}
|
||||||
|
</Row>
|
||||||
|
{/if}
|
||||||
<br/>
|
<br/>
|
||||||
<Row>
|
<Row>
|
||||||
<Col>
|
<Col>
|
||||||
|
Loading…
Reference in New Issue
Block a user