mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2025-02-04 15:25:17 +01:00
add zoom in metric histograms for running and completed states
- keeping last zoomstate doe snot work
This commit is contained in:
parent
6683a350aa
commit
a3e5c424fd
@ -361,6 +361,10 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
|||||||
var err error
|
var err error
|
||||||
var stats []*model.JobsStatistics
|
var stats []*model.JobsStatistics
|
||||||
|
|
||||||
|
// Top Level Defaults
|
||||||
|
var defaultDurationBins int = 24
|
||||||
|
var defaultMetricBins int = 10
|
||||||
|
|
||||||
if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") ||
|
if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") ||
|
||||||
requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") {
|
requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") {
|
||||||
if groupBy == nil {
|
if groupBy == nil {
|
||||||
@ -396,8 +400,7 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
|||||||
if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") || requireField(ctx, "histNumCores") || requireField(ctx, "histNumAccs") {
|
if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") || requireField(ctx, "histNumCores") || requireField(ctx, "histNumAccs") {
|
||||||
|
|
||||||
if numDurationBins == nil {
|
if numDurationBins == nil {
|
||||||
binCount := 24
|
numDurationBins = &defaultDurationBins
|
||||||
numDurationBins = &binCount
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if groupBy == nil {
|
if groupBy == nil {
|
||||||
@ -413,8 +416,7 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
|
|||||||
if requireField(ctx, "histMetrics") {
|
if requireField(ctx, "histMetrics") {
|
||||||
|
|
||||||
if numMetricBins == nil {
|
if numMetricBins == nil {
|
||||||
binCount := 10
|
numMetricBins = &defaultMetricBins
|
||||||
numMetricBins = &binCount
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if groupBy == nil {
|
if groupBy == nil {
|
||||||
|
@ -8,7 +8,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/ClusterCockpit/cc-backend/internal/config"
|
"github.com/ClusterCockpit/cc-backend/internal/config"
|
||||||
@ -504,8 +503,6 @@ func (r *JobRepository) AddMetricHistograms(
|
|||||||
) (*model.JobsStatistics, error) {
|
) (*model.JobsStatistics, error) {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
log.Debugf(">>> HELLO ADD HISTO Metrics: Target %d", *targetBinCount)
|
|
||||||
|
|
||||||
// Running Jobs Only: First query jobdata from sqlite, then query data and make bins
|
// Running Jobs Only: First query jobdata from sqlite, then query data and make bins
|
||||||
for _, f := range filter {
|
for _, f := range filter {
|
||||||
if f.State != nil {
|
if f.State != nil {
|
||||||
@ -699,13 +696,13 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
|
|||||||
|
|
||||||
binQuery := fmt.Sprintf(`CAST( (case when %s = value.max
|
binQuery := fmt.Sprintf(`CAST( (case when %s = value.max
|
||||||
then value.max*0.999999999 else %s end - value.min) / (value.max -
|
then value.max*0.999999999 else %s end - value.min) / (value.max -
|
||||||
value.min) * %d as INTEGER )`, jm, jm, bins)
|
value.min) * %v as INTEGER )`, jm, jm, *bins)
|
||||||
|
|
||||||
mainQuery := sq.Select(
|
mainQuery := sq.Select(
|
||||||
fmt.Sprintf(`%s + 1 as bin`, binQuery),
|
fmt.Sprintf(`%s + 1 as bin`, binQuery),
|
||||||
fmt.Sprintf(`count(%s) as count`, jm),
|
fmt.Sprintf(`count(%s) as count`, jm),
|
||||||
fmt.Sprintf(`CAST(((value.max / %d) * (%s )) as INTEGER ) as min`, bins, binQuery),
|
fmt.Sprintf(`CAST(((value.max / %d) * (%v )) as INTEGER ) as min`, *bins, binQuery),
|
||||||
fmt.Sprintf(`CAST(((value.max / %d) * (%s + 1 )) as INTEGER ) as max`, bins, binQuery),
|
fmt.Sprintf(`CAST(((value.max / %d) * (%v + 1 )) as INTEGER ) as max`, *bins, binQuery),
|
||||||
).From("job").CrossJoin(
|
).From("job").CrossJoin(
|
||||||
fmt.Sprintf(`(%s) as value`, crossJoinQuerySql), crossJoinQueryArgs...,
|
fmt.Sprintf(`(%s) as value`, crossJoinQuerySql), crossJoinQueryArgs...,
|
||||||
).Where(fmt.Sprintf(`%s is not null and %s <= %f`, jm, jm, peak))
|
).Where(fmt.Sprintf(`%s is not null and %s <= %f`, jm, jm, peak))
|
||||||
@ -729,7 +726,15 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Setup Array
|
||||||
points := make([]*model.MetricHistoPoint, 0)
|
points := make([]*model.MetricHistoPoint, 0)
|
||||||
|
for i := 1; i <= *bins; i++ {
|
||||||
|
binMax := ((int(peak) / *bins) * i)
|
||||||
|
binMin := ((int(peak) / *bins) * (i - 1))
|
||||||
|
point := model.MetricHistoPoint{Bin: &i, Count: 0, Min: &binMin, Max: &binMax}
|
||||||
|
points = append(points, &point)
|
||||||
|
}
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
point := model.MetricHistoPoint{}
|
point := model.MetricHistoPoint{}
|
||||||
if err := rows.Scan(&point.Bin, &point.Count, &point.Min, &point.Max); err != nil {
|
if err := rows.Scan(&point.Bin, &point.Count, &point.Min, &point.Max); err != nil {
|
||||||
@ -737,7 +742,20 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
|
|||||||
return nil, err // Totally bricks cc-backend if returned and if all metrics requested?
|
return nil, err // Totally bricks cc-backend if returned and if all metrics requested?
|
||||||
}
|
}
|
||||||
|
|
||||||
points = append(points, &point)
|
for _, e := range points {
|
||||||
|
if e.Bin != nil && point.Bin != nil {
|
||||||
|
if *e.Bin == *point.Bin {
|
||||||
|
e.Count = point.Count
|
||||||
|
if point.Min != nil {
|
||||||
|
e.Min = point.Min
|
||||||
|
}
|
||||||
|
if point.Max != nil {
|
||||||
|
e.Max = point.Max
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points}
|
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points}
|
||||||
@ -752,6 +770,7 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
|
|||||||
filters []*model.JobFilter,
|
filters []*model.JobFilter,
|
||||||
bins *int,
|
bins *int,
|
||||||
) []*model.MetricHistoPoints {
|
) []*model.MetricHistoPoints {
|
||||||
|
|
||||||
// Get Jobs
|
// Get Jobs
|
||||||
jobs, err := r.QueryJobs(ctx, filters, &model.PageRequest{Page: 1, ItemsPerPage: 500 + 1}, nil)
|
jobs, err := r.QueryJobs(ctx, filters, &model.PageRequest{Page: 1, ItemsPerPage: 500 + 1}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -793,7 +812,6 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
|
|||||||
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
|
||||||
peak = metricConfig.Peak
|
peak = metricConfig.Peak
|
||||||
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
|
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
|
||||||
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -813,27 +831,24 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Make and fill bins
|
// Make and fill bins
|
||||||
peakBin := peak / float64(*bins)
|
peakBin := int(peak) / *bins
|
||||||
|
|
||||||
points := make([]*model.MetricHistoPoint, 0)
|
points := make([]*model.MetricHistoPoint, 0)
|
||||||
for b := 0; b < 10; b++ {
|
for b := 0; b < *bins; b++ {
|
||||||
count := 0
|
count := 0
|
||||||
bindex := b + 1
|
bindex := b + 1
|
||||||
bmin := math.Round(peakBin * float64(b))
|
bmin := peakBin * b
|
||||||
bmax := math.Round(peakBin * (float64(b) + 1.0))
|
bmax := peakBin * (b + 1)
|
||||||
|
|
||||||
// Iterate AVG values for indexed metric and count for bins
|
// Iterate AVG values for indexed metric and count for bins
|
||||||
for _, val := range avgs[idx] {
|
for _, val := range avgs[idx] {
|
||||||
if float64(val) >= bmin && float64(val) < bmax {
|
if int(val) >= bmin && int(val) < bmax {
|
||||||
count += 1
|
count += 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bminint := int(bmin)
|
|
||||||
bmaxint := int(bmax)
|
|
||||||
|
|
||||||
// Append Bin to Metric Result Array
|
// Append Bin to Metric Result Array
|
||||||
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bminint, Max: &bmaxint}
|
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bmin, Max: &bmax}
|
||||||
points = append(points, &point)
|
points = append(points, &point)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@
|
|||||||
points: 2,
|
points: 2,
|
||||||
};
|
};
|
||||||
|
|
||||||
const binCounts = xtime ? [24, 48, 96, 144, 288, 720, 1440] : [10, 20, 50, 100, 200, 500, 1000];
|
const binCounts = xtime ? [24, 48, 96, 144, 288, 720, 1440] : [10, 20, 50, 100, 200]; // , 500, 1000
|
||||||
|
|
||||||
function formatTime(t) {
|
function formatTime(t) {
|
||||||
if (t !== null) {
|
if (t !== null) {
|
||||||
@ -142,7 +142,7 @@
|
|||||||
(u) => {
|
(u) => {
|
||||||
if (zoomableHistogram) {
|
if (zoomableHistogram) {
|
||||||
u.over.addEventListener("dblclick", (e) => {
|
u.over.addEventListener("dblclick", (e) => {
|
||||||
// console.log('Dispatch Reset')
|
console.log('Dispatch Reset')
|
||||||
dispatch('zoom', {
|
dispatch('zoom', {
|
||||||
lastZoomState: {
|
lastZoomState: {
|
||||||
x: { time: false },
|
x: { time: false },
|
||||||
@ -159,17 +159,17 @@
|
|||||||
if (zoomableHistogram) {
|
if (zoomableHistogram) {
|
||||||
const numX = (u.series[0].idxs[1] - u.series[0].idxs[0])
|
const numX = (u.series[0].idxs[1] - u.series[0].idxs[0])
|
||||||
if (xtime && numX <= 12 && lastBinCount !== 1440) {
|
if (xtime && numX <= 12 && lastBinCount !== 1440) {
|
||||||
console.log("Dispatch for Duration: ", numX, lastBinCount, binCounts[binCounts.indexOf(lastBinCount) + 1])
|
// console.log("Dispatch for Duration: ", numX, lastBinCount, binCounts[binCounts.indexOf(lastBinCount) + 1])
|
||||||
dispatch('zoom', {
|
dispatch('zoom', {
|
||||||
durationBinCount: binCounts[binCounts.indexOf(lastBinCount) + 1],
|
durationBinCount: binCounts[binCounts.indexOf(lastBinCount) + 1],
|
||||||
lastZoomState: u?.scales,
|
lastZoomState: u?.scales,
|
||||||
});
|
});
|
||||||
} else if (!xtime && numX <= 5 && lastBinCount !== 1000) {
|
} else if (!xtime && numX <= 6 && lastBinCount !== 200) {
|
||||||
// console.log("Dispatch for Metrics: ", numX, lastBinCount, binCounts[binCounts.indexOf(lastBinCount) + 1])
|
// console.log("Dispatch for Metrics: ", numX, lastBinCount, binCounts[binCounts.indexOf(lastBinCount) + 1])
|
||||||
// dispatch('zoom', {
|
dispatch('zoom', {
|
||||||
// metricBinCount: binCounts[binCounts.indexOf(lastBinCount) + 1],
|
metricBinCount: binCounts[binCounts.indexOf(lastBinCount) + 1],
|
||||||
// lastZoomState: u?.scales,
|
lastZoomState: u?.scales,
|
||||||
// });
|
});
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -212,7 +212,6 @@
|
|||||||
stroke: "#000000",
|
stroke: "#000000",
|
||||||
},
|
},
|
||||||
values: (_, t) => t.map((v) => {
|
values: (_, t) => t.map((v) => {
|
||||||
// if (!usesBins) console.log("X Scale Val", xlabel, v)
|
|
||||||
if (xtime) {
|
if (xtime) {
|
||||||
return formatTime(v);
|
return formatTime(v);
|
||||||
} else {
|
} else {
|
||||||
@ -236,7 +235,6 @@
|
|||||||
stroke: "#000000",
|
stroke: "#000000",
|
||||||
},
|
},
|
||||||
values: (_, t) => t.map((v) => {
|
values: (_, t) => t.map((v) => {
|
||||||
// if (!usesBins) console.log("Y Scale Val", ylabel, v)
|
|
||||||
return formatNumber(v)
|
return formatNumber(v)
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
@ -275,6 +273,7 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (zoomableHistogram && zoomState) {
|
if (zoomableHistogram && zoomState) {
|
||||||
|
console.log("Apply ZoomState ...", zoomState)
|
||||||
opts.scales = {...zoomState}
|
opts.scales = {...zoomState}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -413,7 +413,7 @@ export function convert2uplot(canvasData, secondsToMinutes = false, secondsToHou
|
|||||||
canvasData.forEach( cd => {
|
canvasData.forEach( cd => {
|
||||||
if (Object.keys(cd).length == 4) { // MetricHisto Datafromat
|
if (Object.keys(cd).length == 4) { // MetricHisto Datafromat
|
||||||
uplotData[0].push(cd?.max ? cd.max : 0)
|
uplotData[0].push(cd?.max ? cd.max : 0)
|
||||||
uplotData[1].push(cd.count)
|
uplotData[1].push(cd?.count ? cd.count : 0)
|
||||||
} else { // Default -> Fill Histodata with zero values on unused value placing -> maybe allows zoom trigger as known
|
} else { // Default -> Fill Histodata with zero values on unused value placing -> maybe allows zoom trigger as known
|
||||||
if (secondsToHours) {
|
if (secondsToHours) {
|
||||||
let hours = cd.value / 3600
|
let hours = cd.value / 3600
|
||||||
|
Loading…
Reference in New Issue
Block a user