add zoom in metric histograms for running and completed states

- keeping last zoomstate doe snot work
This commit is contained in:
Christoph Kluge 2025-01-23 17:48:45 +01:00
parent 6683a350aa
commit a3e5c424fd
4 changed files with 48 additions and 32 deletions

View File

@ -361,6 +361,10 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
var err error
var stats []*model.JobsStatistics
// Top Level Defaults
var defaultDurationBins int = 24
var defaultMetricBins int = 10
if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") ||
requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") {
if groupBy == nil {
@ -396,8 +400,7 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") || requireField(ctx, "histNumCores") || requireField(ctx, "histNumAccs") {
if numDurationBins == nil {
binCount := 24
numDurationBins = &binCount
numDurationBins = &defaultDurationBins
}
if groupBy == nil {
@ -413,8 +416,7 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
if requireField(ctx, "histMetrics") {
if numMetricBins == nil {
binCount := 10
numMetricBins = &binCount
numMetricBins = &defaultMetricBins
}
if groupBy == nil {

View File

@ -8,7 +8,6 @@ import (
"context"
"database/sql"
"fmt"
"math"
"time"
"github.com/ClusterCockpit/cc-backend/internal/config"
@ -504,8 +503,6 @@ func (r *JobRepository) AddMetricHistograms(
) (*model.JobsStatistics, error) {
start := time.Now()
log.Debugf(">>> HELLO ADD HISTO Metrics: Target %d", *targetBinCount)
// Running Jobs Only: First query jobdata from sqlite, then query data and make bins
for _, f := range filter {
if f.State != nil {
@ -699,13 +696,13 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
binQuery := fmt.Sprintf(`CAST( (case when %s = value.max
then value.max*0.999999999 else %s end - value.min) / (value.max -
value.min) * %d as INTEGER )`, jm, jm, bins)
value.min) * %v as INTEGER )`, jm, jm, *bins)
mainQuery := sq.Select(
fmt.Sprintf(`%s + 1 as bin`, binQuery),
fmt.Sprintf(`count(%s) as count`, jm),
fmt.Sprintf(`CAST(((value.max / %d) * (%s )) as INTEGER ) as min`, bins, binQuery),
fmt.Sprintf(`CAST(((value.max / %d) * (%s + 1 )) as INTEGER ) as max`, bins, binQuery),
fmt.Sprintf(`CAST(((value.max / %d) * (%v )) as INTEGER ) as min`, *bins, binQuery),
fmt.Sprintf(`CAST(((value.max / %d) * (%v + 1 )) as INTEGER ) as max`, *bins, binQuery),
).From("job").CrossJoin(
fmt.Sprintf(`(%s) as value`, crossJoinQuerySql), crossJoinQueryArgs...,
).Where(fmt.Sprintf(`%s is not null and %s <= %f`, jm, jm, peak))
@ -729,7 +726,15 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
return nil, err
}
// Setup Array
points := make([]*model.MetricHistoPoint, 0)
for i := 1; i <= *bins; i++ {
binMax := ((int(peak) / *bins) * i)
binMin := ((int(peak) / *bins) * (i - 1))
point := model.MetricHistoPoint{Bin: &i, Count: 0, Min: &binMin, Max: &binMax}
points = append(points, &point)
}
for rows.Next() {
point := model.MetricHistoPoint{}
if err := rows.Scan(&point.Bin, &point.Count, &point.Min, &point.Max); err != nil {
@ -737,7 +742,20 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
return nil, err // Totally bricks cc-backend if returned and if all metrics requested?
}
points = append(points, &point)
for _, e := range points {
if e.Bin != nil && point.Bin != nil {
if *e.Bin == *point.Bin {
e.Count = point.Count
if point.Min != nil {
e.Min = point.Min
}
if point.Max != nil {
e.Max = point.Max
}
break
}
}
}
}
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points}
@ -752,6 +770,7 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
filters []*model.JobFilter,
bins *int,
) []*model.MetricHistoPoints {
// Get Jobs
jobs, err := r.QueryJobs(ctx, filters, &model.PageRequest{Page: 1, ItemsPerPage: 500 + 1}, nil)
if err != nil {
@ -793,7 +812,6 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
peak = metricConfig.Peak
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
}
}
@ -813,27 +831,24 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
}
// Make and fill bins
peakBin := peak / float64(*bins)
peakBin := int(peak) / *bins
points := make([]*model.MetricHistoPoint, 0)
for b := 0; b < 10; b++ {
for b := 0; b < *bins; b++ {
count := 0
bindex := b + 1
bmin := math.Round(peakBin * float64(b))
bmax := math.Round(peakBin * (float64(b) + 1.0))
bmin := peakBin * b
bmax := peakBin * (b + 1)
// Iterate AVG values for indexed metric and count for bins
for _, val := range avgs[idx] {
if float64(val) >= bmin && float64(val) < bmax {
if int(val) >= bmin && int(val) < bmax {
count += 1
}
}
bminint := int(bmin)
bmaxint := int(bmax)
// Append Bin to Metric Result Array
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bminint, Max: &bmaxint}
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bmin, Max: &bmax}
points = append(points, &point)
}

View File

@ -41,7 +41,7 @@
points: 2,
};
const binCounts = xtime ? [24, 48, 96, 144, 288, 720, 1440] : [10, 20, 50, 100, 200, 500, 1000];
const binCounts = xtime ? [24, 48, 96, 144, 288, 720, 1440] : [10, 20, 50, 100, 200]; // , 500, 1000
function formatTime(t) {
if (t !== null) {
@ -142,7 +142,7 @@
(u) => {
if (zoomableHistogram) {
u.over.addEventListener("dblclick", (e) => {
// console.log('Dispatch Reset')
console.log('Dispatch Reset')
dispatch('zoom', {
lastZoomState: {
x: { time: false },
@ -159,17 +159,17 @@
if (zoomableHistogram) {
const numX = (u.series[0].idxs[1] - u.series[0].idxs[0])
if (xtime && numX <= 12 && lastBinCount !== 1440) {
console.log("Dispatch for Duration: ", numX, lastBinCount, binCounts[binCounts.indexOf(lastBinCount) + 1])
// console.log("Dispatch for Duration: ", numX, lastBinCount, binCounts[binCounts.indexOf(lastBinCount) + 1])
dispatch('zoom', {
durationBinCount: binCounts[binCounts.indexOf(lastBinCount) + 1],
lastZoomState: u?.scales,
});
} else if (!xtime && numX <= 5 && lastBinCount !== 1000) {
} else if (!xtime && numX <= 6 && lastBinCount !== 200) {
// console.log("Dispatch for Metrics: ", numX, lastBinCount, binCounts[binCounts.indexOf(lastBinCount) + 1])
// dispatch('zoom', {
// metricBinCount: binCounts[binCounts.indexOf(lastBinCount) + 1],
// lastZoomState: u?.scales,
// });
dispatch('zoom', {
metricBinCount: binCounts[binCounts.indexOf(lastBinCount) + 1],
lastZoomState: u?.scales,
});
};
}
};
@ -212,7 +212,6 @@
stroke: "#000000",
},
values: (_, t) => t.map((v) => {
// if (!usesBins) console.log("X Scale Val", xlabel, v)
if (xtime) {
return formatTime(v);
} else {
@ -236,7 +235,6 @@
stroke: "#000000",
},
values: (_, t) => t.map((v) => {
// if (!usesBins) console.log("Y Scale Val", ylabel, v)
return formatNumber(v)
}),
},
@ -275,6 +273,7 @@
};
if (zoomableHistogram && zoomState) {
console.log("Apply ZoomState ...", zoomState)
opts.scales = {...zoomState}
}

View File

@ -413,7 +413,7 @@ export function convert2uplot(canvasData, secondsToMinutes = false, secondsToHou
canvasData.forEach( cd => {
if (Object.keys(cd).length == 4) { // MetricHisto Datafromat
uplotData[0].push(cd?.max ? cd.max : 0)
uplotData[1].push(cd.count)
uplotData[1].push(cd?.count ? cd.count : 0)
} else { // Default -> Fill Histodata with zero values on unused value placing -> maybe allows zoom trigger as known
if (secondsToHours) {
let hours = cd.value / 3600