Refactor svelte frontend

- Adapt to new metricConfig logic
- Footprint-Metrics generalized for bar card
- Footprint-Metrics in stats filter and sorting
- Frontend always uses GQL, except adminOptions
- Job View will load scopes for all metrics on request
This commit is contained in:
Christoph Kluge
2024-07-22 15:41:33 +02:00
parent c4d93e492b
commit 6a1cb51c2f
38 changed files with 627 additions and 810 deletions

View File

@@ -119,7 +119,6 @@ func (api *RestApi) MountFrontendApiRoutes(r *mux.Router) {
if api.Authentication != nil {
r.HandleFunc("/jwt/", api.getJWT).Methods(http.MethodGet)
r.HandleFunc("/configuration/", api.updateConfiguration).Methods(http.MethodPost)
r.HandleFunc("/jobs/metrics/{id}", api.getJobMetrics).Methods(http.MethodGet) // Fetched in Job.svelte: Needs All-User-Access-Session-Auth
}
}

View File

@@ -211,6 +211,7 @@ type ComplexityRoot struct {
MetricHistoPoints struct {
Data func(childComplexity int) int
Metric func(childComplexity int) int
Stat func(childComplexity int) int
Unit func(childComplexity int) int
}
@@ -1104,6 +1105,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.MetricHistoPoints.Metric(childComplexity), true
case "MetricHistoPoints.stat":
if e.complexity.MetricHistoPoints.Stat == nil {
break
}
return e.complexity.MetricHistoPoints.Stat(childComplexity), true
case "MetricHistoPoints.unit":
if e.complexity.MetricHistoPoints.Unit == nil {
break
@@ -2100,6 +2108,7 @@ input JobFilter {
input OrderByInput {
field: String!
type: String!,
order: SortDirectionEnum! = ASC
}
@@ -2147,6 +2156,7 @@ type HistoPoint {
type MetricHistoPoints {
metric: String!
unit: String!
stat: String
data: [MetricHistoPoint!]
}
@@ -6445,6 +6455,8 @@ func (ec *executionContext) fieldContext_JobsStatistics_histMetrics(_ context.Co
return ec.fieldContext_MetricHistoPoints_metric(ctx, field)
case "unit":
return ec.fieldContext_MetricHistoPoints_unit(ctx, field)
case "stat":
return ec.fieldContext_MetricHistoPoints_stat(ctx, field)
case "data":
return ec.fieldContext_MetricHistoPoints_data(ctx, field)
}
@@ -7295,6 +7307,47 @@ func (ec *executionContext) fieldContext_MetricHistoPoints_unit(_ context.Contex
return fc, nil
}
func (ec *executionContext) _MetricHistoPoints_stat(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_MetricHistoPoints_stat(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Stat, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*string)
fc.Result = res
return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_MetricHistoPoints_stat(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "MetricHistoPoints",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type String does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _MetricHistoPoints_data(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_MetricHistoPoints_data(ctx, field)
if err != nil {
@@ -13217,7 +13270,7 @@ func (ec *executionContext) unmarshalInputOrderByInput(ctx context.Context, obj
asMap["order"] = "ASC"
}
fieldsInOrder := [...]string{"field", "order"}
fieldsInOrder := [...]string{"field", "type", "order"}
for _, k := range fieldsInOrder {
v, ok := asMap[k]
if !ok {
@@ -13231,6 +13284,13 @@ func (ec *executionContext) unmarshalInputOrderByInput(ctx context.Context, obj
return it, err
}
it.Field = data
case "type":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type"))
data, err := ec.unmarshalNString2string(ctx, v)
if err != nil {
return it, err
}
it.Type = data
case "order":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("order"))
data, err := ec.unmarshalNSortDirectionEnum2githubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐSortDirectionEnum(ctx, v)
@@ -14673,6 +14733,8 @@ func (ec *executionContext) _MetricHistoPoints(ctx context.Context, sel ast.Sele
if out.Values[i] == graphql.Null {
out.Invalids++
}
case "stat":
out.Values[i] = ec._MetricHistoPoints_stat(ctx, field, obj)
case "data":
out.Values[i] = ec._MetricHistoPoints_data(ctx, field, obj)
default:

View File

@@ -123,6 +123,7 @@ type MetricHistoPoint struct {
type MetricHistoPoints struct {
Metric string `json:"metric"`
Unit string `json:"unit"`
Stat *string `json:"stat,omitempty"`
Data []*MetricHistoPoint `json:"data,omitempty"`
}
@@ -142,6 +143,7 @@ type NodeMetrics struct {
type OrderByInput struct {
Field string `json:"field"`
Type string `json:"type"`
Order SortDirectionEnum `json:"order"`
}

View File

@@ -31,14 +31,28 @@ func (r *JobRepository) QueryJobs(
if order != nil {
field := toSnakeCase(order.Field)
switch order.Order {
case model.SortDirectionEnumAsc:
query = query.OrderBy(fmt.Sprintf("job.%s ASC", field))
case model.SortDirectionEnumDesc:
query = query.OrderBy(fmt.Sprintf("job.%s DESC", field))
default:
return nil, errors.New("REPOSITORY/QUERY > invalid sorting order")
if order.Type == "col" {
// "col": Fixed column name query
switch order.Order {
case model.SortDirectionEnumAsc:
query = query.OrderBy(fmt.Sprintf("job.%s ASC", field))
case model.SortDirectionEnumDesc:
query = query.OrderBy(fmt.Sprintf("job.%s DESC", field))
default:
return nil, errors.New("REPOSITORY/QUERY > invalid sorting order for column")
}
} else {
// "foot": Order by footprint JSON field values
// Verify and Search Only in Valid Jsons
query = query.Where("JSON_VALID(meta_data)")
switch order.Order {
case model.SortDirectionEnumAsc:
query = query.OrderBy(fmt.Sprintf("JSON_EXTRACT(footprint, \"$.%s\") ASC", field))
case model.SortDirectionEnumDesc:
query = query.OrderBy(fmt.Sprintf("JSON_EXTRACT(footprint, \"$.%s\") DESC", field))
default:
return nil, errors.New("REPOSITORY/QUERY > invalid sorting order for footprint")
}
}
}
@@ -177,8 +191,8 @@ func BuildWhereClause(filter *model.JobFilter, query sq.SelectBuilder) sq.Select
query = buildStringCondition("job.resources", filter.Node, query)
}
if filter.MetricStats != nil {
for _, m := range filter.MetricStats {
query = buildFloatJsonCondition("job.metric_stats", m.Range, query)
for _, ms := range filter.MetricStats {
query = buildFloatJsonCondition(ms.MetricName, ms.Range, query)
}
}
return query
@@ -200,8 +214,10 @@ func buildTimeCondition(field string, cond *schema.TimeRange, query sq.SelectBui
}
}
func buildFloatJsonCondition(field string, cond *model.FloatRange, query sq.SelectBuilder) sq.SelectBuilder {
return query.Where("JSON_EXTRACT(footprint, '$."+field+"') BETWEEN ? AND ?", cond.From, cond.To)
func buildFloatJsonCondition(condName string, condRange *model.FloatRange, query sq.SelectBuilder) sq.SelectBuilder {
// Verify and Search Only in Valid Jsons
query = query.Where("JSON_VALID(footprint)")
return query.Where("JSON_EXTRACT(footprint, \"$."+condName+"\") BETWEEN ? AND ?", condRange.From, condRange.To)
}
func buildStringCondition(field string, cond *model.StringInput, query sq.SelectBuilder) sq.SelectBuilder {

View File

@@ -552,12 +552,14 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
var metricConfig *schema.MetricConfig
var peak float64 = 0.0
var unit string = ""
var footprintStat string = ""
for _, f := range filters {
if f.Cluster != nil {
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
peak = metricConfig.Peak
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
footprintStat = metricConfig.Footprint
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
}
}
@@ -572,21 +574,26 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
if unit == "" {
unit = m.Unit.Prefix + m.Unit.Base
}
if footprintStat == "" {
footprintStat = m.Footprint
}
}
}
}
}
// log.Debugf("Metric %s: DB %s, Peak %f, Unit %s", metric, dbMetric, peak, unit)
// log.Debugf("Metric %s, Peak %f, Unit %s, Aggregation %s", metric, peak, unit, aggreg)
// Make bins, see https://jereze.com/code/sql-histogram/
start := time.Now()
jm := fmt.Sprintf(`json_extract(footprint, "$.%s")`, metric)
jm := fmt.Sprintf(`json_extract(footprint, "$.%s")`, (metric + "_" + footprintStat))
crossJoinQuery := sq.Select(
fmt.Sprintf(`max(%s) as max`, jm),
fmt.Sprintf(`min(%s) as min`, jm),
).From("job").Where(
"JSON_VALID(footprint)",
).Where(
fmt.Sprintf(`%s is not null`, jm),
).Where(
fmt.Sprintf(`%s <= %f`, jm, peak),
@@ -651,7 +658,7 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
points = append(points, &point)
}
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Data: points}
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points}
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
return &result, nil