Merge pull request #323 from ClusterCockpit/add_histogram_bin_select

Add histogram bin select
This commit is contained in:
Jan Eitzinger 2025-01-28 13:38:15 +01:00 committed by GitHub
commit 13c9a12336
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 1391 additions and 375 deletions

View File

@ -236,7 +236,7 @@ type Query {
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList! jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate): [JobsStatistics!]! jobsStatistics(filter: [JobFilter!], metrics: [String!], page: PageRequest, sortBy: SortByAggregate, groupBy: Aggregate, numDurationBins: String, numMetricBins: Int): [JobsStatistics!]!
rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]! rooflineHeatmap(filter: [JobFilter!]!, rows: Int!, cols: Int!, minX: Float!, minY: Float!, maxX: Float!, maxY: Float!): [[Float!]!]!

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@ package graph
// This file will be automatically regenerated based on the schema, any resolver implementations // This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end. // will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen version v0.17.49 // Code generated by github.com/99designs/gqlgen version v0.17.57
import ( import (
"context" "context"
@ -354,10 +354,14 @@ func (r *queryResolver) Jobs(ctx context.Context, filter []*model.JobFilter, pag
} }
// JobsStatistics is the resolver for the jobsStatistics field. // JobsStatistics is the resolver for the jobsStatistics field.
func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error) { func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate, numDurationBins *string, numMetricBins *int) ([]*model.JobsStatistics, error) {
var err error var err error
var stats []*model.JobsStatistics var stats []*model.JobsStatistics
// Top Level Defaults
var defaultDurationBins string = "1h"
var defaultMetricBins int = 10
if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") || if requireField(ctx, "totalJobs") || requireField(ctx, "totalWalltime") || requireField(ctx, "totalNodes") || requireField(ctx, "totalCores") ||
requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") { requireField(ctx, "totalAccs") || requireField(ctx, "totalNodeHours") || requireField(ctx, "totalCoreHours") || requireField(ctx, "totalAccHours") {
if groupBy == nil { if groupBy == nil {
@ -391,8 +395,13 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
} }
if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") || requireField(ctx, "histNumCores") || requireField(ctx, "histNumAccs") { if requireField(ctx, "histDuration") || requireField(ctx, "histNumNodes") || requireField(ctx, "histNumCores") || requireField(ctx, "histNumAccs") {
if numDurationBins == nil {
numDurationBins = &defaultDurationBins
}
if groupBy == nil { if groupBy == nil {
stats[0], err = r.Repo.AddHistograms(ctx, filter, stats[0]) stats[0], err = r.Repo.AddHistograms(ctx, filter, stats[0], numDurationBins)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -402,8 +411,13 @@ func (r *queryResolver) JobsStatistics(ctx context.Context, filter []*model.JobF
} }
if requireField(ctx, "histMetrics") { if requireField(ctx, "histMetrics") {
if numMetricBins == nil {
numMetricBins = &defaultMetricBins
}
if groupBy == nil { if groupBy == nil {
stats[0], err = r.Repo.AddMetricHistograms(ctx, filter, metrics, stats[0]) stats[0], err = r.Repo.AddMetricHistograms(ctx, filter, metrics, stats[0], numMetricBins)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -490,11 +504,9 @@ func (r *Resolver) Query() generated.QueryResolver { return &queryResolver{r} }
// SubCluster returns generated.SubClusterResolver implementation. // SubCluster returns generated.SubClusterResolver implementation.
func (r *Resolver) SubCluster() generated.SubClusterResolver { return &subClusterResolver{r} } func (r *Resolver) SubCluster() generated.SubClusterResolver { return &subClusterResolver{r} }
type ( type clusterResolver struct{ *Resolver }
clusterResolver struct{ *Resolver } type jobResolver struct{ *Resolver }
jobResolver struct{ *Resolver } type metricValueResolver struct{ *Resolver }
metricValueResolver struct{ *Resolver } type mutationResolver struct{ *Resolver }
mutationResolver struct{ *Resolver } type queryResolver struct{ *Resolver }
queryResolver struct{ *Resolver } type subClusterResolver struct{ *Resolver }
subClusterResolver struct{ *Resolver }
)

View File

@ -8,7 +8,6 @@ import (
"context" "context"
"database/sql" "database/sql"
"fmt" "fmt"
"math"
"time" "time"
"github.com/ClusterCockpit/cc-backend/internal/config" "github.com/ClusterCockpit/cc-backend/internal/config"
@ -447,15 +446,40 @@ func (r *JobRepository) AddHistograms(
ctx context.Context, ctx context.Context,
filter []*model.JobFilter, filter []*model.JobFilter,
stat *model.JobsStatistics, stat *model.JobsStatistics,
durationBins *string,
) (*model.JobsStatistics, error) { ) (*model.JobsStatistics, error) {
start := time.Now() start := time.Now()
var targetBinCount int
var targetBinSize int
switch {
case *durationBins == "1m": // 1 Minute Bins + Max 60 Bins -> Max 60 Minutes
targetBinCount = 60
targetBinSize = 60
case *durationBins == "10m": // 10 Minute Bins + Max 72 Bins -> Max 12 Hours
targetBinCount = 72
targetBinSize = 600
case *durationBins == "1h": // 1 Hour Bins + Max 48 Bins -> Max 48 Hours
targetBinCount = 48
targetBinSize = 3600
case *durationBins == "6h": // 6 Hour Bins + Max 12 Bins -> Max 3 Days
targetBinCount = 12
targetBinSize = 21600
case *durationBins == "12h": // 12 hour Bins + Max 14 Bins -> Max 7 Days
targetBinCount = 14
targetBinSize = 43200
default: // 24h
targetBinCount = 24
targetBinSize = 3600
}
castType := r.getCastType() castType := r.getCastType()
var err error var err error
value := fmt.Sprintf(`CAST(ROUND((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) / 3600) as %s) as value`, time.Now().Unix(), castType) // Return X-Values always as seconds, will be formatted into minutes and hours in frontend
stat.HistDuration, err = r.jobsStatisticsHistogram(ctx, value, filter) value := fmt.Sprintf(`CAST(ROUND(((CASE WHEN job.job_state = "running" THEN %d - job.start_time ELSE job.duration END) / %d) + 1) as %s) as value`, time.Now().Unix(), targetBinSize, castType)
stat.HistDuration, err = r.jobsDurationStatisticsHistogram(ctx, value, filter, targetBinSize, &targetBinCount)
if err != nil { if err != nil {
log.Warn("Error while loading job statistics histogram: running jobs") log.Warn("Error while loading job statistics histogram: job duration")
return nil, err return nil, err
} }
@ -487,6 +511,7 @@ func (r *JobRepository) AddMetricHistograms(
filter []*model.JobFilter, filter []*model.JobFilter,
metrics []string, metrics []string,
stat *model.JobsStatistics, stat *model.JobsStatistics,
targetBinCount *int,
) (*model.JobsStatistics, error) { ) (*model.JobsStatistics, error) {
start := time.Now() start := time.Now()
@ -494,7 +519,7 @@ func (r *JobRepository) AddMetricHistograms(
for _, f := range filter { for _, f := range filter {
if f.State != nil { if f.State != nil {
if len(f.State) == 1 && f.State[0] == "running" { if len(f.State) == 1 && f.State[0] == "running" {
stat.HistMetrics = r.runningJobsMetricStatisticsHistogram(ctx, metrics, filter) stat.HistMetrics = r.runningJobsMetricStatisticsHistogram(ctx, metrics, filter, targetBinCount)
log.Debugf("Timer AddMetricHistograms %s", time.Since(start)) log.Debugf("Timer AddMetricHistograms %s", time.Since(start))
return stat, nil return stat, nil
} }
@ -503,7 +528,7 @@ func (r *JobRepository) AddMetricHistograms(
// All other cases: Query and make bins in sqlite directly // All other cases: Query and make bins in sqlite directly
for _, m := range metrics { for _, m := range metrics {
metricHisto, err := r.jobsMetricStatisticsHistogram(ctx, m, filter) metricHisto, err := r.jobsMetricStatisticsHistogram(ctx, m, filter, targetBinCount)
if err != nil { if err != nil {
log.Warnf("Error while loading job metric statistics histogram: %s", m) log.Warnf("Error while loading job metric statistics histogram: %s", m)
continue continue
@ -540,6 +565,7 @@ func (r *JobRepository) jobsStatisticsHistogram(
} }
points := make([]*model.HistoPoint, 0) points := make([]*model.HistoPoint, 0)
// is it possible to introduce zero values here? requires info about bincount
for rows.Next() { for rows.Next() {
point := model.HistoPoint{} point := model.HistoPoint{}
if err := rows.Scan(&point.Value, &point.Count); err != nil { if err := rows.Scan(&point.Value, &point.Count); err != nil {
@ -553,10 +579,66 @@ func (r *JobRepository) jobsStatisticsHistogram(
return points, nil return points, nil
} }
func (r *JobRepository) jobsDurationStatisticsHistogram(
ctx context.Context,
value string,
filters []*model.JobFilter,
binSizeSeconds int,
targetBinCount *int,
) ([]*model.HistoPoint, error) {
start := time.Now()
query, qerr := SecurityCheck(ctx,
sq.Select(value, "COUNT(job.id) AS count").From("job"))
if qerr != nil {
return nil, qerr
}
// Setup Array
points := make([]*model.HistoPoint, 0)
for i := 1; i <= *targetBinCount; i++ {
point := model.HistoPoint{Value: i * binSizeSeconds, Count: 0}
points = append(points, &point)
}
for _, f := range filters {
query = BuildWhereClause(f, query)
}
rows, err := query.GroupBy("value").RunWith(r.DB).Query()
if err != nil {
log.Error("Error while running query")
return nil, err
}
// Fill Array at matching $Value
for rows.Next() {
point := model.HistoPoint{}
if err := rows.Scan(&point.Value, &point.Count); err != nil {
log.Warn("Error while scanning rows")
return nil, err
}
for _, e := range points {
if e.Value == (point.Value * binSizeSeconds) {
// Note:
// Matching on unmodified integer value (and multiplying point.Value by binSizeSeconds after match)
// causes frontend to loop into highest targetBinCount, due to zoom condition instantly being fullfilled (cause unknown)
e.Count = point.Count
break
}
}
}
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
return points, nil
}
func (r *JobRepository) jobsMetricStatisticsHistogram( func (r *JobRepository) jobsMetricStatisticsHistogram(
ctx context.Context, ctx context.Context,
metric string, metric string,
filters []*model.JobFilter, filters []*model.JobFilter,
bins *int,
) (*model.MetricHistoPoints, error) { ) (*model.MetricHistoPoints, error) {
// Get specific Peak or largest Peak // Get specific Peak or largest Peak
var metricConfig *schema.MetricConfig var metricConfig *schema.MetricConfig
@ -624,16 +706,15 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
return nil, sqlerr return nil, sqlerr
} }
bins := 10
binQuery := fmt.Sprintf(`CAST( (case when %s = value.max binQuery := fmt.Sprintf(`CAST( (case when %s = value.max
then value.max*0.999999999 else %s end - value.min) / (value.max - then value.max*0.999999999 else %s end - value.min) / (value.max -
value.min) * %d as INTEGER )`, jm, jm, bins) value.min) * %v as INTEGER )`, jm, jm, *bins)
mainQuery := sq.Select( mainQuery := sq.Select(
fmt.Sprintf(`%s + 1 as bin`, binQuery), fmt.Sprintf(`%s + 1 as bin`, binQuery),
fmt.Sprintf(`count(%s) as count`, jm), fmt.Sprintf(`count(%s) as count`, jm),
fmt.Sprintf(`CAST(((value.max / %d) * (%s )) as INTEGER ) as min`, bins, binQuery), fmt.Sprintf(`CAST(((value.max / %d) * (%v )) as INTEGER ) as min`, *bins, binQuery),
fmt.Sprintf(`CAST(((value.max / %d) * (%s + 1 )) as INTEGER ) as max`, bins, binQuery), fmt.Sprintf(`CAST(((value.max / %d) * (%v + 1 )) as INTEGER ) as max`, *bins, binQuery),
).From("job").CrossJoin( ).From("job").CrossJoin(
fmt.Sprintf(`(%s) as value`, crossJoinQuerySql), crossJoinQueryArgs..., fmt.Sprintf(`(%s) as value`, crossJoinQuerySql), crossJoinQueryArgs...,
).Where(fmt.Sprintf(`%s is not null and %s <= %f`, jm, jm, peak)) ).Where(fmt.Sprintf(`%s is not null and %s <= %f`, jm, jm, peak))
@ -657,7 +738,15 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
return nil, err return nil, err
} }
// Setup Array
points := make([]*model.MetricHistoPoint, 0) points := make([]*model.MetricHistoPoint, 0)
for i := 1; i <= *bins; i++ {
binMax := ((int(peak) / *bins) * i)
binMin := ((int(peak) / *bins) * (i - 1))
point := model.MetricHistoPoint{Bin: &i, Count: 0, Min: &binMin, Max: &binMax}
points = append(points, &point)
}
for rows.Next() { for rows.Next() {
point := model.MetricHistoPoint{} point := model.MetricHistoPoint{}
if err := rows.Scan(&point.Bin, &point.Count, &point.Min, &point.Max); err != nil { if err := rows.Scan(&point.Bin, &point.Count, &point.Min, &point.Max); err != nil {
@ -665,7 +754,20 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
return nil, err // Totally bricks cc-backend if returned and if all metrics requested? return nil, err // Totally bricks cc-backend if returned and if all metrics requested?
} }
points = append(points, &point) for _, e := range points {
if e.Bin != nil && point.Bin != nil {
if *e.Bin == *point.Bin {
e.Count = point.Count
if point.Min != nil {
e.Min = point.Min
}
if point.Max != nil {
e.Max = point.Max
}
break
}
}
}
} }
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points} result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points}
@ -678,7 +780,9 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
ctx context.Context, ctx context.Context,
metrics []string, metrics []string,
filters []*model.JobFilter, filters []*model.JobFilter,
bins *int,
) []*model.MetricHistoPoints { ) []*model.MetricHistoPoints {
// Get Jobs // Get Jobs
jobs, err := r.QueryJobs(ctx, filters, &model.PageRequest{Page: 1, ItemsPerPage: 500 + 1}, nil) jobs, err := r.QueryJobs(ctx, filters, &model.PageRequest{Page: 1, ItemsPerPage: 500 + 1}, nil)
if err != nil { if err != nil {
@ -720,7 +824,6 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric) metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
peak = metricConfig.Peak peak = metricConfig.Peak
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
} }
} }
@ -740,28 +843,24 @@ func (r *JobRepository) runningJobsMetricStatisticsHistogram(
} }
// Make and fill bins // Make and fill bins
bins := 10.0 peakBin := int(peak) / *bins
peakBin := peak / bins
points := make([]*model.MetricHistoPoint, 0) points := make([]*model.MetricHistoPoint, 0)
for b := 0; b < 10; b++ { for b := 0; b < *bins; b++ {
count := 0 count := 0
bindex := b + 1 bindex := b + 1
bmin := math.Round(peakBin * float64(b)) bmin := peakBin * b
bmax := math.Round(peakBin * (float64(b) + 1.0)) bmax := peakBin * (b + 1)
// Iterate AVG values for indexed metric and count for bins // Iterate AVG values for indexed metric and count for bins
for _, val := range avgs[idx] { for _, val := range avgs[idx] {
if float64(val) >= bmin && float64(val) < bmax { if int(val) >= bmin && int(val) < bmax {
count += 1 count += 1
} }
} }
bminint := int(bmin)
bmaxint := int(bmax)
// Append Bin to Metric Result Array // Append Bin to Metric Result Array
point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bminint, Max: &bmaxint} point := model.MetricHistoPoint{Bin: &bindex, Count: count, Min: &bmin, Max: &bmax}
points = append(points, &point) points = append(points, &point)
} }

View File

@ -174,6 +174,7 @@
}, },
}); });
// Note: Different footprints than those saved in DB per Job -> Caused by Legacy Naming
$: footprintsQuery = queryStore({ $: footprintsQuery = queryStore({
client: client, client: client,
query: gql` query: gql`
@ -470,10 +471,12 @@
height={300} height={300}
data={convert2uplot($statsQuery.data.stats[0].histDuration)} data={convert2uplot($statsQuery.data.stats[0].histDuration)}
title="Duration Distribution" title="Duration Distribution"
xlabel="Current Runtimes" xlabel="Current Job Runtimes"
xunit="Hours" xunit="Runtime"
ylabel="Number of Jobs" ylabel="Number of Jobs"
yunit="Jobs" yunit="Jobs"
usesBins
xtime
/> />
{/key} {/key}
</Col> </Col>

View File

@ -463,7 +463,7 @@
<hr /> <hr />
<!-- Usage Stats as Histograms --> <!-- User and Project Stats as Pie-Charts -->
<Row cols={{ lg: 4, md: 2, sm: 1 }}> <Row cols={{ lg: 4, md: 2, sm: 1 }}>
<Col class="p-2"> <Col class="p-2">
@ -587,17 +587,23 @@
{/key} {/key}
</Col> </Col>
</Row> </Row>
<hr class="my-2" /> <hr class="my-2" />
<!-- Static Stats as Histograms : Running Duration && Allocated Hardware Counts-->
<Row cols={{ lg: 2, md: 1 }}> <Row cols={{ lg: 2, md: 1 }}>
<Col class="p-2"> <Col class="p-2">
{#key $mainQuery.data.stats} {#key $mainQuery.data.stats}
<Histogram <Histogram
data={convert2uplot($mainQuery.data.stats[0].histDuration)} data={convert2uplot($mainQuery.data.stats[0].histDuration)}
title="Duration Distribution" title="Duration Distribution"
xlabel="Current Runtimes" xlabel="Current Job Runtimes"
xunit="Hours" xunit="Runtime"
ylabel="Number of Jobs" ylabel="Number of Jobs"
yunit="Jobs" yunit="Jobs"
usesBins
xtime
/> />
{/key} {/key}
</Col> </Col>
@ -640,7 +646,11 @@
{/key} {/key}
</Col> </Col>
</Row> </Row>
<hr class="my-2" /> <hr class="my-2" />
<!-- Selectable Stats as Histograms : Average Values of Running Jobs -->
{#if metricsInHistograms} {#if metricsInHistograms}
{#key $mainQuery.data.stats[0].histMetrics} {#key $mainQuery.data.stats[0].histMetrics}
<PlotGrid <PlotGrid

View File

@ -17,6 +17,9 @@
Icon, Icon,
Card, Card,
Spinner, Spinner,
Input,
InputGroup,
InputGroupText
} from "@sveltestrap/sveltestrap"; } from "@sveltestrap/sveltestrap";
import { import {
queryStore, queryStore,
@ -59,6 +62,11 @@
let showFootprint = filterPresets.cluster let showFootprint = filterPresets.cluster
? !!ccconfig[`plot_list_showFootprint:${filterPresets.cluster}`] ? !!ccconfig[`plot_list_showFootprint:${filterPresets.cluster}`]
: !!ccconfig.plot_list_showFootprint; : !!ccconfig.plot_list_showFootprint;
let numDurationBins = "1h";
let numMetricBins = 10;
let durationBinOptions = ["1m","10m","1h","6h","12h"];
let metricBinOptions = [10, 20, 50, 100];
$: metricsInHistograms = selectedCluster $: metricsInHistograms = selectedCluster
? ccconfig[`user_view_histogramMetrics:${selectedCluster}`] || [] ? ccconfig[`user_view_histogramMetrics:${selectedCluster}`] || []
@ -68,8 +76,8 @@
$: stats = queryStore({ $: stats = queryStore({
client: client, client: client,
query: gql` query: gql`
query ($jobFilters: [JobFilter!]!, $metricsInHistograms: [String!]) { query ($jobFilters: [JobFilter!]!, $metricsInHistograms: [String!], $numDurationBins: String, $numMetricBins: Int) {
jobsStatistics(filter: $jobFilters, metrics: $metricsInHistograms) { jobsStatistics(filter: $jobFilters, metrics: $metricsInHistograms, numDurationBins: $numDurationBins , numMetricBins: $numMetricBins ) {
totalJobs totalJobs
shortJobs shortJobs
totalWalltime totalWalltime
@ -96,7 +104,7 @@
} }
} }
`, `,
variables: { jobFilters, metricsInHistograms }, variables: { jobFilters, metricsInHistograms, numDurationBins, numMetricBins },
}); });
onMount(() => filterComponent.updateFilters()); onMount(() => filterComponent.updateFilters());
@ -118,8 +126,8 @@
{/if} {/if}
<!-- ROW2: Tools--> <!-- ROW2: Tools-->
<Row cols={{ xs: 1, md: 2, lg: 4}} class="mb-3"> <Row cols={{ xs: 1, md: 2, lg: 6}} class="mb-3">
<Col lg="2" class="mb-2 mb-lg-0"> <Col class="mb-2 mb-lg-0">
<ButtonGroup class="w-100"> <ButtonGroup class="w-100">
<Button outline color="primary" on:click={() => (isSortingOpen = true)}> <Button outline color="primary" on:click={() => (isSortingOpen = true)}>
<Icon name="sort-up" /> Sorting <Icon name="sort-up" /> Sorting
@ -133,7 +141,7 @@
</Button> </Button>
</ButtonGroup> </ButtonGroup>
</Col> </Col>
<Col lg="4" xl="6" class="mb-1 mb-lg-0"> <Col lg="4" class="mb-1 mb-lg-0">
<Filters <Filters
{filterPresets} {filterPresets}
{matchedJobs} {matchedJobs}
@ -148,12 +156,27 @@
}} }}
/> />
</Col> </Col>
<Col lg="3" xl="2" class="mb-2 mb-lg-0"> <Col class="mb-2 mb-lg-0">
<InputGroup>
<InputGroupText>
<Icon name="bar-chart-line-fill" />
</InputGroupText>
<InputGroupText>
Duration Bin Size
</InputGroupText>
<Input type="select" bind:value={numDurationBins} style="max-width: 120px;">
{#each durationBinOptions as dbin}
<option value={dbin}>{dbin}</option>
{/each}
</Input>
</InputGroup>
</Col>
<Col class="mb-2 mb-lg-0">
<TextFilter <TextFilter
on:set-filter={({ detail }) => filterComponent.updateFilters(detail)} on:set-filter={({ detail }) => filterComponent.updateFilters(detail)}
/> />
</Col> </Col>
<Col lg="3" xl="2" class="mb-1 mb-lg-0"> <Col class="mb-1 mb-lg-0">
<Refresher on:refresh={() => { <Refresher on:refresh={() => {
jobList.refreshJobs() jobList.refreshJobs()
jobList.refreshAllMetrics() jobList.refreshAllMetrics()
@ -215,10 +238,12 @@
<Histogram <Histogram
data={convert2uplot($stats.data.jobsStatistics[0].histDuration)} data={convert2uplot($stats.data.jobsStatistics[0].histDuration)}
title="Duration Distribution" title="Duration Distribution"
xlabel="Current Runtimes" xlabel="Job Runtimes"
xunit="Hours" xunit="Runtime"
ylabel="Number of Jobs" ylabel="Number of Jobs"
yunit="Jobs" yunit="Jobs"
usesBins
xtime
/> />
{/key} {/key}
</Col> </Col>
@ -238,16 +263,32 @@
</Row> </Row>
<!-- ROW4+5: Selectable Histograms --> <!-- ROW4+5: Selectable Histograms -->
<Row cols={{ xs: 1, md: 5}}> <Row>
<Col> <Col xs="12" md="3" lg="2" class="mb-2 mb-md-0">
<Button <Button
outline outline
color="secondary" color="secondary"
class="w-100"
on:click={() => (isHistogramSelectionOpen = true)} on:click={() => (isHistogramSelectionOpen = true)}
> >
<Icon name="bar-chart-line" /> Select Histograms <Icon name="bar-chart-line" /> Select Histograms
</Button> </Button>
</Col> </Col>
<Col xs="12" md="9" lg="10" class="mb-2 mb-md-0">
<InputGroup>
<InputGroupText>
<Icon name="bar-chart-line-fill" />
</InputGroupText>
<InputGroupText>
Metric Bins
</InputGroupText>
<Input type="select" bind:value={numMetricBins} style="max-width: 120px;">
{#each metricBinOptions as mbin}
<option value={mbin}>{mbin}</option>
{/each}
</Input>
</InputGroup>
</Col>
</Row> </Row>
{#if metricsInHistograms?.length > 0} {#if metricsInHistograms?.length > 0}
{#if $stats.error} {#if $stats.error}
@ -273,12 +314,12 @@
> >
<Histogram <Histogram
data={convert2uplot(item.data)} data={convert2uplot(item.data)}
usesBins={true}
title="Distribution of '{item.metric} ({item.stat})' footprints" title="Distribution of '{item.metric} ({item.stat})' footprints"
xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`} xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`}
xunit={item.unit} xunit={item.unit}
ylabel="Number of Jobs" ylabel="Number of Jobs"
yunit="Jobs" yunit="Jobs"
usesBins
/> />
</PlotGrid> </PlotGrid>
{/key} {/key}

View File

@ -15,8 +15,8 @@
<script> <script>
import uPlot from "uplot"; import uPlot from "uplot";
import { formatNumber } from "../units.js";
import { onMount, onDestroy } from "svelte"; import { onMount, onDestroy } from "svelte";
import { formatNumber } from "../units.js";
import { Card } from "@sveltestrap/sveltestrap"; import { Card } from "@sveltestrap/sveltestrap";
export let data; export let data;
@ -26,16 +26,31 @@
export let title = ""; export let title = "";
export let xlabel = ""; export let xlabel = "";
export let xunit = ""; export let xunit = "";
export let xtime = false;
export let ylabel = ""; export let ylabel = "";
export let yunit = ""; export let yunit = "";
const { bars } = uPlot.paths; const { bars } = uPlot.paths;
const drawStyles = { const drawStyles = {
bars: 1, bars: 1,
points: 2, points: 2,
}; };
function formatTime(t) {
if (t !== null) {
if (isNaN(t)) {
return t;
} else {
const tAbs = Math.abs(t);
const h = Math.floor(tAbs / 3600);
const m = Math.floor((tAbs % 3600) / 60);
if (h == 0) return `${m}m`;
else if (m == 0) return `${h}h`;
else return `${h}:${m}h`;
}
}
}
function paths(u, seriesIdx, idx0, idx1, extendGap, buildClip) { function paths(u, seriesIdx, idx0, idx1, extendGap, buildClip) {
let s = u.series[seriesIdx]; let s = u.series[seriesIdx];
let style = s.drawStyle; let style = s.drawStyle;
@ -139,7 +154,7 @@
label: xlabel, label: xlabel,
labelGap: 10, labelGap: 10,
size: 25, size: 25,
incrs: [1, 2, 5, 6, 10, 12, 50, 100, 500, 1000, 5000, 10000], incrs: xtime ? [60, 120, 300, 600, 1800, 3600, 7200, 14400, 18000, 21600, 43200, 86400] : [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000],
border: { border: {
show: true, show: true,
stroke: "#000000", stroke: "#000000",
@ -149,7 +164,13 @@
size: 5 / devicePixelRatio, size: 5 / devicePixelRatio,
stroke: "#000000", stroke: "#000000",
}, },
values: (_, t) => t.map((v) => formatNumber(v)), values: (_, t) => t.map((v) => {
if (xtime) {
return formatTime(v);
} else {
return formatNumber(v)
}
}),
}, },
{ {
stroke: "#000000", stroke: "#000000",
@ -166,17 +187,25 @@
size: 5 / devicePixelRatio, size: 5 / devicePixelRatio,
stroke: "#000000", stroke: "#000000",
}, },
values: (_, t) => t.map((v) => formatNumber(v)), values: (_, t) => t.map((v) => {
return formatNumber(v)
}),
}, },
], ],
series: [ series: [
{ {
label: xunit !== "" ? xunit : null, label: xunit !== "" ? xunit : null,
value: (u, ts, sidx, didx) => { value: (u, ts, sidx, didx) => {
if (usesBins) { if (usesBins && xtime) {
const min = u.data[sidx][didx - 1] ? formatTime(u.data[sidx][didx - 1]) : 0;
const max = formatTime(u.data[sidx][didx]);
ts = min + "-" + max; // narrow spaces
} else if (usesBins) {
const min = u.data[sidx][didx - 1] ? u.data[sidx][didx - 1] : 0; const min = u.data[sidx][didx - 1] ? u.data[sidx][didx - 1] : 0;
const max = u.data[sidx][didx]; const max = u.data[sidx][didx];
ts = min + "-" + max; // narrow spaces ts = min + "-" + max; // narrow spaces
} else if (xtime) {
ts = formatTime(ts);
} }
return ts; return ts;
}, },
@ -191,6 +220,7 @@
}, },
{ {
drawStyle: drawStyles.bars, drawStyle: drawStyles.bars,
width: 1, // 1 / lastBinCount,
lineInterpolation: null, lineInterpolation: null,
stroke: "#85abce", stroke: "#85abce",
fill: "#85abce", // + "1A", // Transparent Fill fill: "#85abce", // + "1A", // Transparent Fill

View File

@ -438,7 +438,7 @@ function getMetricConfigDeep(metric, cluster, subCluster) {
} }
} }
export function convert2uplot(canvasData) { export function convert2uplot(canvasData, secondsToMinutes = false, secondsToHours = false) {
// Prep: Uplot Data Structure // Prep: Uplot Data Structure
let uplotData = [[],[]] // [X, Y1, Y2, ...] let uplotData = [[],[]] // [X, Y1, Y2, ...]
// Iterate if exists // Iterate if exists
@ -446,11 +446,21 @@ export function convert2uplot(canvasData) {
canvasData.forEach( cd => { canvasData.forEach( cd => {
if (Object.keys(cd).length == 4) { // MetricHisto Datafromat if (Object.keys(cd).length == 4) { // MetricHisto Datafromat
uplotData[0].push(cd?.max ? cd.max : 0) uplotData[0].push(cd?.max ? cd.max : 0)
uplotData[1].push(cd?.count ? cd.count : 0)
} else { // Default -> Fill Histodata with zero values on unused value placing -> maybe allows zoom trigger as known
if (secondsToHours) {
let hours = cd.value / 3600
console.log("x seconds to y hours", cd.value, hours)
uplotData[0].push(hours)
} else if (secondsToMinutes) {
let minutes = cd.value / 60
console.log("x seconds to y minutes", cd.value, minutes)
uplotData[0].push(minutes)
} else {
uplotData[0].push(cd.value)
}
uplotData[1].push(cd.count) uplotData[1].push(cd.count)
} else { // Default }
uplotData[0].push(cd.value)
uplotData[1].push(cd.count)
}
}) })
} }
return uplotData return uplotData