Refactor svelte frontend

- Adapt to new metricConfig logic
- Footprint-Metrics generalized for bar card
- Footprint-Metrics in stats filter and sorting
- Frontend always uses GQL, except adminOptions
- Job View will load scopes for all metrics on request
This commit is contained in:
Christoph Kluge 2024-07-22 15:41:33 +02:00
parent c4d93e492b
commit 6a1cb51c2f
38 changed files with 627 additions and 810 deletions

View File

@ -272,6 +272,7 @@ input JobFilter {
input OrderByInput { input OrderByInput {
field: String! field: String!
type: String!,
order: SortDirectionEnum! = ASC order: SortDirectionEnum! = ASC
} }
@ -319,6 +320,7 @@ type HistoPoint {
type MetricHistoPoints { type MetricHistoPoints {
metric: String! metric: String!
unit: String! unit: String!
stat: String
data: [MetricHistoPoint!] data: [MetricHistoPoint!]
} }

View File

@ -119,7 +119,6 @@ func (api *RestApi) MountFrontendApiRoutes(r *mux.Router) {
if api.Authentication != nil { if api.Authentication != nil {
r.HandleFunc("/jwt/", api.getJWT).Methods(http.MethodGet) r.HandleFunc("/jwt/", api.getJWT).Methods(http.MethodGet)
r.HandleFunc("/configuration/", api.updateConfiguration).Methods(http.MethodPost) r.HandleFunc("/configuration/", api.updateConfiguration).Methods(http.MethodPost)
r.HandleFunc("/jobs/metrics/{id}", api.getJobMetrics).Methods(http.MethodGet) // Fetched in Job.svelte: Needs All-User-Access-Session-Auth
} }
} }

View File

@ -211,6 +211,7 @@ type ComplexityRoot struct {
MetricHistoPoints struct { MetricHistoPoints struct {
Data func(childComplexity int) int Data func(childComplexity int) int
Metric func(childComplexity int) int Metric func(childComplexity int) int
Stat func(childComplexity int) int
Unit func(childComplexity int) int Unit func(childComplexity int) int
} }
@ -1104,6 +1105,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.MetricHistoPoints.Metric(childComplexity), true return e.complexity.MetricHistoPoints.Metric(childComplexity), true
case "MetricHistoPoints.stat":
if e.complexity.MetricHistoPoints.Stat == nil {
break
}
return e.complexity.MetricHistoPoints.Stat(childComplexity), true
case "MetricHistoPoints.unit": case "MetricHistoPoints.unit":
if e.complexity.MetricHistoPoints.Unit == nil { if e.complexity.MetricHistoPoints.Unit == nil {
break break
@ -2100,6 +2108,7 @@ input JobFilter {
input OrderByInput { input OrderByInput {
field: String! field: String!
type: String!,
order: SortDirectionEnum! = ASC order: SortDirectionEnum! = ASC
} }
@ -2147,6 +2156,7 @@ type HistoPoint {
type MetricHistoPoints { type MetricHistoPoints {
metric: String! metric: String!
unit: String! unit: String!
stat: String
data: [MetricHistoPoint!] data: [MetricHistoPoint!]
} }
@ -6445,6 +6455,8 @@ func (ec *executionContext) fieldContext_JobsStatistics_histMetrics(_ context.Co
return ec.fieldContext_MetricHistoPoints_metric(ctx, field) return ec.fieldContext_MetricHistoPoints_metric(ctx, field)
case "unit": case "unit":
return ec.fieldContext_MetricHistoPoints_unit(ctx, field) return ec.fieldContext_MetricHistoPoints_unit(ctx, field)
case "stat":
return ec.fieldContext_MetricHistoPoints_stat(ctx, field)
case "data": case "data":
return ec.fieldContext_MetricHistoPoints_data(ctx, field) return ec.fieldContext_MetricHistoPoints_data(ctx, field)
} }
@ -7295,6 +7307,47 @@ func (ec *executionContext) fieldContext_MetricHistoPoints_unit(_ context.Contex
return fc, nil return fc, nil
} }
func (ec *executionContext) _MetricHistoPoints_stat(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_MetricHistoPoints_stat(ctx, field)
if err != nil {
return graphql.Null
}
ctx = graphql.WithFieldContext(ctx, fc)
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Stat, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*string)
fc.Result = res
return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
func (ec *executionContext) fieldContext_MetricHistoPoints_stat(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: "MetricHistoPoints",
Field: field,
IsMethod: false,
IsResolver: false,
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
return nil, errors.New("field of type String does not have child fields")
},
}
return fc, nil
}
func (ec *executionContext) _MetricHistoPoints_data(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) { func (ec *executionContext) _MetricHistoPoints_data(ctx context.Context, field graphql.CollectedField, obj *model.MetricHistoPoints) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_MetricHistoPoints_data(ctx, field) fc, err := ec.fieldContext_MetricHistoPoints_data(ctx, field)
if err != nil { if err != nil {
@ -13217,7 +13270,7 @@ func (ec *executionContext) unmarshalInputOrderByInput(ctx context.Context, obj
asMap["order"] = "ASC" asMap["order"] = "ASC"
} }
fieldsInOrder := [...]string{"field", "order"} fieldsInOrder := [...]string{"field", "type", "order"}
for _, k := range fieldsInOrder { for _, k := range fieldsInOrder {
v, ok := asMap[k] v, ok := asMap[k]
if !ok { if !ok {
@ -13231,6 +13284,13 @@ func (ec *executionContext) unmarshalInputOrderByInput(ctx context.Context, obj
return it, err return it, err
} }
it.Field = data it.Field = data
case "type":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type"))
data, err := ec.unmarshalNString2string(ctx, v)
if err != nil {
return it, err
}
it.Type = data
case "order": case "order":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("order")) ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("order"))
data, err := ec.unmarshalNSortDirectionEnum2githubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐSortDirectionEnum(ctx, v) data, err := ec.unmarshalNSortDirectionEnum2githubᚗcomᚋClusterCockpitᚋccᚑbackendᚋinternalᚋgraphᚋmodelᚐSortDirectionEnum(ctx, v)
@ -14673,6 +14733,8 @@ func (ec *executionContext) _MetricHistoPoints(ctx context.Context, sel ast.Sele
if out.Values[i] == graphql.Null { if out.Values[i] == graphql.Null {
out.Invalids++ out.Invalids++
} }
case "stat":
out.Values[i] = ec._MetricHistoPoints_stat(ctx, field, obj)
case "data": case "data":
out.Values[i] = ec._MetricHistoPoints_data(ctx, field, obj) out.Values[i] = ec._MetricHistoPoints_data(ctx, field, obj)
default: default:

View File

@ -123,6 +123,7 @@ type MetricHistoPoint struct {
type MetricHistoPoints struct { type MetricHistoPoints struct {
Metric string `json:"metric"` Metric string `json:"metric"`
Unit string `json:"unit"` Unit string `json:"unit"`
Stat *string `json:"stat,omitempty"`
Data []*MetricHistoPoint `json:"data,omitempty"` Data []*MetricHistoPoint `json:"data,omitempty"`
} }
@ -142,6 +143,7 @@ type NodeMetrics struct {
type OrderByInput struct { type OrderByInput struct {
Field string `json:"field"` Field string `json:"field"`
Type string `json:"type"`
Order SortDirectionEnum `json:"order"` Order SortDirectionEnum `json:"order"`
} }

View File

@ -31,14 +31,28 @@ func (r *JobRepository) QueryJobs(
if order != nil { if order != nil {
field := toSnakeCase(order.Field) field := toSnakeCase(order.Field)
if order.Type == "col" {
// "col": Fixed column name query
switch order.Order { switch order.Order {
case model.SortDirectionEnumAsc: case model.SortDirectionEnumAsc:
query = query.OrderBy(fmt.Sprintf("job.%s ASC", field)) query = query.OrderBy(fmt.Sprintf("job.%s ASC", field))
case model.SortDirectionEnumDesc: case model.SortDirectionEnumDesc:
query = query.OrderBy(fmt.Sprintf("job.%s DESC", field)) query = query.OrderBy(fmt.Sprintf("job.%s DESC", field))
default: default:
return nil, errors.New("REPOSITORY/QUERY > invalid sorting order") return nil, errors.New("REPOSITORY/QUERY > invalid sorting order for column")
}
} else {
// "foot": Order by footprint JSON field values
// Verify and Search Only in Valid Jsons
query = query.Where("JSON_VALID(meta_data)")
switch order.Order {
case model.SortDirectionEnumAsc:
query = query.OrderBy(fmt.Sprintf("JSON_EXTRACT(footprint, \"$.%s\") ASC", field))
case model.SortDirectionEnumDesc:
query = query.OrderBy(fmt.Sprintf("JSON_EXTRACT(footprint, \"$.%s\") DESC", field))
default:
return nil, errors.New("REPOSITORY/QUERY > invalid sorting order for footprint")
}
} }
} }
@ -177,8 +191,8 @@ func BuildWhereClause(filter *model.JobFilter, query sq.SelectBuilder) sq.Select
query = buildStringCondition("job.resources", filter.Node, query) query = buildStringCondition("job.resources", filter.Node, query)
} }
if filter.MetricStats != nil { if filter.MetricStats != nil {
for _, m := range filter.MetricStats { for _, ms := range filter.MetricStats {
query = buildFloatJsonCondition("job.metric_stats", m.Range, query) query = buildFloatJsonCondition(ms.MetricName, ms.Range, query)
} }
} }
return query return query
@ -200,8 +214,10 @@ func buildTimeCondition(field string, cond *schema.TimeRange, query sq.SelectBui
} }
} }
func buildFloatJsonCondition(field string, cond *model.FloatRange, query sq.SelectBuilder) sq.SelectBuilder { func buildFloatJsonCondition(condName string, condRange *model.FloatRange, query sq.SelectBuilder) sq.SelectBuilder {
return query.Where("JSON_EXTRACT(footprint, '$."+field+"') BETWEEN ? AND ?", cond.From, cond.To) // Verify and Search Only in Valid Jsons
query = query.Where("JSON_VALID(footprint)")
return query.Where("JSON_EXTRACT(footprint, \"$."+condName+"\") BETWEEN ? AND ?", condRange.From, condRange.To)
} }
func buildStringCondition(field string, cond *model.StringInput, query sq.SelectBuilder) sq.SelectBuilder { func buildStringCondition(field string, cond *model.StringInput, query sq.SelectBuilder) sq.SelectBuilder {

View File

@ -552,12 +552,14 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
var metricConfig *schema.MetricConfig var metricConfig *schema.MetricConfig
var peak float64 = 0.0 var peak float64 = 0.0
var unit string = "" var unit string = ""
var footprintStat string = ""
for _, f := range filters { for _, f := range filters {
if f.Cluster != nil { if f.Cluster != nil {
metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric) metricConfig = archive.GetMetricConfig(*f.Cluster.Eq, metric)
peak = metricConfig.Peak peak = metricConfig.Peak
unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base unit = metricConfig.Unit.Prefix + metricConfig.Unit.Base
footprintStat = metricConfig.Footprint
log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric) log.Debugf("Cluster %s filter found with peak %f for %s", *f.Cluster.Eq, peak, metric)
} }
} }
@ -572,21 +574,26 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
if unit == "" { if unit == "" {
unit = m.Unit.Prefix + m.Unit.Base unit = m.Unit.Prefix + m.Unit.Base
} }
if footprintStat == "" {
footprintStat = m.Footprint
}
} }
} }
} }
} }
// log.Debugf("Metric %s: DB %s, Peak %f, Unit %s", metric, dbMetric, peak, unit) // log.Debugf("Metric %s, Peak %f, Unit %s, Aggregation %s", metric, peak, unit, aggreg)
// Make bins, see https://jereze.com/code/sql-histogram/ // Make bins, see https://jereze.com/code/sql-histogram/
start := time.Now() start := time.Now()
jm := fmt.Sprintf(`json_extract(footprint, "$.%s")`, metric) jm := fmt.Sprintf(`json_extract(footprint, "$.%s")`, (metric + "_" + footprintStat))
crossJoinQuery := sq.Select( crossJoinQuery := sq.Select(
fmt.Sprintf(`max(%s) as max`, jm), fmt.Sprintf(`max(%s) as max`, jm),
fmt.Sprintf(`min(%s) as min`, jm), fmt.Sprintf(`min(%s) as min`, jm),
).From("job").Where( ).From("job").Where(
"JSON_VALID(footprint)",
).Where(
fmt.Sprintf(`%s is not null`, jm), fmt.Sprintf(`%s is not null`, jm),
).Where( ).Where(
fmt.Sprintf(`%s <= %f`, jm, peak), fmt.Sprintf(`%s <= %f`, jm, peak),
@ -651,7 +658,7 @@ func (r *JobRepository) jobsMetricStatisticsHistogram(
points = append(points, &point) points = append(points, &point)
} }
result := model.MetricHistoPoints{Metric: metric, Unit: unit, Data: points} result := model.MetricHistoPoints{Metric: metric, Unit: unit, Stat: &footprintStat, Data: points}
log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start)) log.Debugf("Timer jobsStatisticsHistogram %s", time.Since(start))
return &result, nil return &result, nil

View File

@ -48,8 +48,10 @@
let colWidth1, colWidth2, colWidth3, colWidth4; let colWidth1, colWidth2, colWidth3, colWidth4;
let numBins = 50; let numBins = 50;
let maxY = -1; let maxY = -1;
const initialized = getContext("initialized");
const globalMetrics = getContext("globalMetrics");
const ccconfig = getContext("cc-config"); const ccconfig = getContext("cc-config");
const metricConfig = getContext("metrics");
let metricsInHistograms = ccconfig.analysis_view_histogramMetrics, let metricsInHistograms = ccconfig.analysis_view_histogramMetrics,
metricsInScatterplots = ccconfig.analysis_view_scatterPlotMetrics; metricsInScatterplots = ccconfig.analysis_view_scatterPlotMetrics;
@ -268,6 +270,19 @@
} }
} }
let availableMetrics = [];
let metricUnits = {};
let metricScopes = {};
function loadMetrics(isInitialized) {
if (!isInitialized) return
availableMetrics = [...globalMetrics.filter((gm) => gm?.availability.find((av) => av.cluster == cluster.name))]
for (let sm of availableMetrics) {
metricUnits[sm.name] = (sm?.unit?.prefix ? sm.unit.prefix : "") + (sm?.unit?.base ? sm.unit.base : "")
metricScopes[sm.name] = sm?.scope
}
}
$: loadMetrics($initialized)
$: updateEntityConfiguration(groupSelection.key); $: updateEntityConfiguration(groupSelection.key);
$: updateCategoryConfiguration(sortSelection.key); $: updateCategoryConfiguration(sortSelection.key);
@ -285,7 +300,7 @@
<Card body color="danger">{$initq.error.message}</Card> <Card body color="danger">{$initq.error.message}</Card>
{:else if cluster} {:else if cluster}
<PlotSelection <PlotSelection
availableMetrics={cluster.metricConfig.map((mc) => mc.name)} availableMetrics={availableMetrics.map((av) => av.name)}
bind:metricsInHistograms bind:metricsInHistograms
bind:metricsInScatterplots bind:metricsInScatterplots
/> />
@ -506,7 +521,7 @@
metric, metric,
...binsFromFootprint( ...binsFromFootprint(
$footprintsQuery.data.footprints.timeWeights, $footprintsQuery.data.footprints.timeWeights,
metricConfig(cluster.name, metric)?.scope, metricScopes[metric],
$footprintsQuery.data.footprints.metrics.find( $footprintsQuery.data.footprints.metrics.find(
(f) => f.metric == metric, (f) => f.metric == metric,
).data, ).data,
@ -521,22 +536,8 @@
height={250} height={250}
usesBins={true} usesBins={true}
title="Average Distribution of '{item.metric}'" title="Average Distribution of '{item.metric}'"
xlabel={`${item.metric} bin maximum ${ xlabel={`${item.metric} bin maximum [${metricUnits[item.metric]}]`}
(metricConfig(cluster.name, item.metric)?.unit?.prefix xunit={`${metricUnits[item.metric]}`}
? "[" + metricConfig(cluster.name, item.metric)?.unit?.prefix
: "") +
(metricConfig(cluster.name, item.metric)?.unit?.base
? metricConfig(cluster.name, item.metric)?.unit?.base + "]"
: "")
}`}
xunit={`${
(metricConfig(cluster.name, item.metric)?.unit?.prefix
? metricConfig(cluster.name, item.metric)?.unit?.prefix
: "") +
(metricConfig(cluster.name, item.metric)?.unit?.base
? metricConfig(cluster.name, item.metric)?.unit?.base
: "")
}`}
ylabel="Normalized Hours" ylabel="Normalized Hours"
yunit="Hours" yunit="Hours"
/> />
@ -578,22 +579,8 @@
{width} {width}
height={250} height={250}
color={"rgba(0, 102, 204, 0.33)"} color={"rgba(0, 102, 204, 0.33)"}
xLabel={`${item.m1} [${ xLabel={`${item.m1} [${metricUnits[item.m1]}]`}
(metricConfig(cluster.name, item.m1)?.unit?.prefix yLabel={`${item.m2} [${metricUnits[item.m2]}]`}
? metricConfig(cluster.name, item.m1)?.unit?.prefix
: "") +
(metricConfig(cluster.name, item.m1)?.unit?.base
? metricConfig(cluster.name, item.m1)?.unit?.base
: "")
}]`}
yLabel={`${item.m2} [${
(metricConfig(cluster.name, item.m2)?.unit?.prefix
? metricConfig(cluster.name, item.m2)?.unit?.prefix
: "") +
(metricConfig(cluster.name, item.m2)?.unit?.base
? metricConfig(cluster.name, item.m2)?.unit?.base
: "")
}]`}
X={item.f1} X={item.f1}
Y={item.f2} Y={item.f2}
S={$footprintsQuery.data.footprints.timeWeights.nodeHours} S={$footprintsQuery.data.footprints.timeWeights.nodeHours}

View File

@ -1,12 +1,9 @@
<script> <script>
// import { init } from "./utils.js";
import { Card, CardHeader, CardTitle } from "@sveltestrap/sveltestrap"; import { Card, CardHeader, CardTitle } from "@sveltestrap/sveltestrap";
import UserSettings from "./config/UserSettings.svelte"; import UserSettings from "./config/UserSettings.svelte";
import AdminSettings from "./config/AdminSettings.svelte"; import AdminSettings from "./config/AdminSettings.svelte";
// const { query: initq } = init();
export let isAdmin; export let isAdmin;
export let isApi; export let isApi;
export let username; export let username;

View File

@ -8,15 +8,25 @@
ListGroup, ListGroup,
ListGroupItem, ListGroupItem,
} from "@sveltestrap/sveltestrap"; } from "@sveltestrap/sveltestrap";
import { getContext } from "svelte";
import { gql, getContextClient, mutationStore } from "@urql/svelte"; import { gql, getContextClient, mutationStore } from "@urql/svelte";
export let cluster; export let cluster;
export let metricsInHistograms; export let metricsInHistograms;
export let isOpen; export let isOpen;
let availableMetrics = ["cpu_load", "flops_any", "mem_used", "mem_bw"]; // 'net_bw', 'file_bw'
let pendingMetrics = [...metricsInHistograms]; // Copy
const client = getContextClient(); const client = getContextClient();
const initialized = getContext("initialized");
let availableMetrics = []
function loadHistoMetrics(isInitialized) {
if (!isInitialized) return;
const rawAvailableMetrics = getContext("globalMetrics").filter((gm) => gm?.footprint).map((fgm) => { return fgm.name })
availableMetrics = [...rawAvailableMetrics]
}
let pendingMetrics = [...metricsInHistograms]; // Copy
const updateConfigurationMutation = ({ name, value }) => { const updateConfigurationMutation = ({ name, value }) => {
return mutationStore({ return mutationStore({
@ -37,7 +47,6 @@
}).subscribe((res) => { }).subscribe((res) => {
if (res.fetching === false && res.error) { if (res.fetching === false && res.error) {
throw res.error; throw res.error;
// console.log('Error on subscription: ' + res.error)
} }
}); });
} }
@ -52,6 +61,9 @@
value: metricsInHistograms, value: metricsInHistograms,
}); });
} }
$: loadHistoMetrics($initialized);
</script> </script>
<Modal {isOpen} toggle={() => (isOpen = !isOpen)}> <Modal {isOpen} toggle={() => (isOpen = !isOpen)}>

View File

@ -2,10 +2,14 @@
import { import {
init, init,
groupByScope, groupByScope,
fetchMetricsStore,
checkMetricDisabled, checkMetricDisabled,
transformDataForRoofline, transformDataForRoofline,
} from "./utils.js"; } from "./utils.js";
import {
queryStore,
gql,
getContextClient
} from "@urql/svelte";
import { import {
Row, Row,
Col, Col,
@ -34,15 +38,27 @@
export let authlevel; export let authlevel;
export let roles; export let roles;
const accMetrics = [ // Setup General
"acc_utilization",
"acc_mem_used", const ccconfig = getContext("cc-config")
"acc_power",
"nv_mem_util", let isMetricsSelectionOpen = false,
"nv_sm_clock", showFootprint = !!ccconfig[`job_view_showFootprint`],
"nv_temp", selectedMetrics = [],
]; selectedScopes = [];
let accNodeOnly;
let plots = {},
jobTags,
statsTable,
jobFootprint;
let missingMetrics = [],
missingHosts = [],
somethingMissing = false;
// Setup GQL
// First: Add Job Query to init function -> Only requires DBID as argument, received via URL-ID
// Second: Trigger jobMetrics query with now received jobInfos (scopes: from job metadata, selectedMetrics: from config or all, job: from url-id)
const { query: initq } = init(` const { query: initq } = init(`
job(id: "${dbid}") { job(id: "${dbid}") {
@ -55,99 +71,100 @@
metaData, metaData,
userData { name, email }, userData { name, email },
concurrentJobs { items { id, jobId }, count, listQuery }, concurrentJobs { items { id, jobId }, count, listQuery },
flopsAnyAvg, memBwAvg, loadAvg footprint { name, stat, value }
} }
`); `);
const ccconfig = getContext("cc-config"), const client = getContextClient();
clusters = getContext("clusters"), const query = gql`
metrics = getContext("metrics"); query ($dbid: ID!, $selectedMetrics: [String!]!, $selectedScopes: [MetricScope!]!) {
jobMetrics(id: $dbid, metrics: $selectedMetrics, scopes: $selectedScopes) {
name
scope
metric {
unit {
prefix
base
}
timestep
statisticsSeries {
min
median
max
}
series {
hostname
id
data
statistics {
min
avg
max
}
}
}
}
}
`;
let isMetricsSelectionOpen = false, $: jobMetrics = queryStore({
selectedMetrics = [], client: client,
showFootprint = true, query: query,
isFetched = new Set(); variables: { dbid, selectedMetrics, selectedScopes },
const [jobMetrics, startFetching] = fetchMetricsStore(); });
function loadAllScopes() {
selectedScopes = [...selectedScopes, "socket", "core"]
jobMetrics = queryStore({
client: client,
query: query,
variables: { dbid, selectedMetrics, selectedScopes},
});
}
// Handle Job Query on Init -> is not executed anymore
getContext("on-init")(() => { getContext("on-init")(() => {
let job = $initq.data.job; let job = $initq.data.job;
if (!job) return; if (!job) return;
selectedMetrics = const pendingMetrics = [
ccconfig[`job_view_selectedMetrics:${job.cluster}`] ||
clusters
.find((c) => c.name == job.cluster)
.metricConfig.map((mc) => mc.name);
showFootprint =
ccconfig[`job_view_showFootprint`]
let toFetch = new Set([
"flops_any", "flops_any",
"mem_bw", "mem_bw",
...selectedMetrics, ...(ccconfig[`job_view_selectedMetrics:${job.cluster}`] ||
$initq.data.globalMetrics.reduce((names, gm) => {
if (gm.availability.find((av) => av.cluster === job.cluster)) {
names.push(gm.name);
}
return names;
}, [])
),
...(ccconfig[`job_view_polarPlotMetrics:${job.cluster}`] || ...(ccconfig[`job_view_polarPlotMetrics:${job.cluster}`] ||
ccconfig[`job_view_polarPlotMetrics`]), ccconfig[`job_view_polarPlotMetrics`]
),
...(ccconfig[`job_view_nodestats_selectedMetrics:${job.cluster}`] || ...(ccconfig[`job_view_nodestats_selectedMetrics:${job.cluster}`] ||
ccconfig[`job_view_nodestats_selectedMetrics`]), ccconfig[`job_view_nodestats_selectedMetrics`]
]); ),
];
// Select default Scopes to load: Check before if accelerator metrics are not on accelerator scope by default // Select default Scopes to load: Check before if any metric has accelerator scope by default
accNodeOnly = [...toFetch].some(function (m) { const accScopeDefault = [...pendingMetrics].some(function (m) {
if (accMetrics.includes(m)) { const cluster = $initq.data.clusters.find((c) => c.name == job.cluster);
const mc = metrics(job.cluster, m); const subCluster = cluster.subClusters.find((sc) => sc.name == job.subCluster);
return mc.scope !== "accelerator"; return subCluster.metricConfig.find((smc) => smc.name == m)?.scope === "accelerator";
} else {
return false;
}
}); });
if (job.numAcc === 0 || accNodeOnly === true) { const pendingScopes = ["node"]
// No Accels or Accels on Node Scope if (accScopeDefault) pendingScopes.push("accelerator")
startFetching( if (job.numNodes === 1) {
job, pendingScopes.push("socket")
[...toFetch], pendingScopes.push("core")
job.numNodes > 2 ? ["node"] : ["node", "socket", "core"],
);
} else {
// Accels and not on node scope
startFetching(
job,
[...toFetch],
job.numNodes > 2
? ["node", "accelerator"]
: ["node", "accelerator", "socket", "core"],
);
} }
isFetched = toFetch; selectedMetrics = [...new Set(pendingMetrics)];
selectedScopes = [...new Set(pendingScopes)];
}); });
const lazyFetchMoreMetrics = () => { // Interactive Document Title
let notYetFetched = new Set();
for (let m of selectedMetrics) {
if (!isFetched.has(m)) {
notYetFetched.add(m);
isFetched.add(m);
}
}
if (notYetFetched.size > 0)
startFetching(
$initq.data.job,
[...notYetFetched],
$initq.data.job.numNodes > 2 ? ["node"] : ["node", "core"],
);
};
// Fetch more data once required:
$: if ($initq.data && $jobMetrics.data && selectedMetrics)
lazyFetchMoreMetrics();
let plots = {},
jobTags,
statsTable,
jobFootprint;
$: document.title = $initq.fetching $: document.title = $initq.fetching
? "Loading..." ? "Loading..."
: $initq.error : $initq.error
@ -155,15 +172,15 @@
: `Job ${$initq.data.job.jobId} - ClusterCockpit`; : `Job ${$initq.data.job.jobId} - ClusterCockpit`;
// Find out what metrics or hosts are missing: // Find out what metrics or hosts are missing:
let missingMetrics = [], $: if ($initq?.data && $jobMetrics?.data?.jobMetrics) {
missingHosts = [],
somethingMissing = false;
$: if ($initq.data && $jobMetrics.data) {
let job = $initq.data.job, let job = $initq.data.job,
metrics = $jobMetrics.data.jobMetrics, metrics = $jobMetrics.data.jobMetrics,
metricNames = clusters metricNames = $initq.data.globalMetrics.reduce((names, gm) => {
.find((c) => c.name == job.cluster) if (gm.availability.find((av) => av.cluster === job.cluster)) {
.metricConfig.map((mc) => mc.name); names.push(gm.name);
}
return names;
}, []);
// Metric not found in JobMetrics && Metric not explicitly disabled in config or deselected: Was expected, but is Missing // Metric not found in JobMetrics && Metric not explicitly disabled in config or deselected: Was expected, but is Missing
missingMetrics = metricNames.filter( missingMetrics = metricNames.filter(
@ -192,6 +209,7 @@
somethingMissing = missingMetrics.length > 0 || missingHosts.length > 0; somethingMissing = missingMetrics.length > 0 || missingHosts.length > 0;
} }
// Helper
const orderAndMap = (grouped, selectedMetrics) => const orderAndMap = (grouped, selectedMetrics) =>
selectedMetrics.map((metric) => ({ selectedMetrics.map((metric) => ({
metric: metric, metric: metric,
@ -214,18 +232,15 @@
<Spinner secondary /> <Spinner secondary />
{/if} {/if}
</Col> </Col>
{#if $jobMetrics.data && showFootprint} {#if $initq.data && showFootprint}
{#key $jobMetrics.data}
<Col> <Col>
<JobFootprint <JobFootprint
bind:this={jobFootprint} bind:this={jobFootprint}
job={$initq.data.job} job={$initq.data.job}
jobMetrics={$jobMetrics.data.jobMetrics}
/> />
</Col> </Col>
{/key}
{/if} {/if}
{#if $jobMetrics.data && $initq.data} {#if $initq?.data && $jobMetrics?.data?.jobMetrics}
{#if $initq.data.job.concurrentJobs != null && $initq.data.job.concurrentJobs.items.length != 0} {#if $initq.data.job.concurrentJobs != null && $initq.data.job.concurrentJobs.items.length != 0}
{#if authlevel > roles.manager} {#if authlevel > roles.manager}
<Col> <Col>
@ -270,27 +285,29 @@
`job_view_polarPlotMetrics:${$initq.data.job.cluster}` `job_view_polarPlotMetrics:${$initq.data.job.cluster}`
] || ccconfig[`job_view_polarPlotMetrics`]} ] || ccconfig[`job_view_polarPlotMetrics`]}
cluster={$initq.data.job.cluster} cluster={$initq.data.job.cluster}
subCluster={$initq.data.job.subCluster}
jobMetrics={$jobMetrics.data.jobMetrics} jobMetrics={$jobMetrics.data.jobMetrics}
/> />
</Col> </Col>
<Col> <Col>
<Roofline <Roofline
renderTime={true} renderTime={true}
cluster={clusters cluster={$initq.data.clusters
.find((c) => c.name == $initq.data.job.cluster) .find((c) => c.name == $initq.data.job.cluster)
.subClusters.find((sc) => sc.name == $initq.data.job.subCluster)} .subClusters.find((sc) => sc.name == $initq.data.job.subCluster)}
data={transformDataForRoofline( data={transformDataForRoofline(
$jobMetrics.data.jobMetrics.find( $jobMetrics.data.jobMetrics.find(
(m) => m.name == "flops_any" && m.scope == "node", (m) => m.name == "flops_any" && m.scope == "node",
).metric, )?.metric,
$jobMetrics.data.jobMetrics.find( $jobMetrics.data.jobMetrics.find(
(m) => m.name == "mem_bw" && m.scope == "node", (m) => m.name == "mem_bw" && m.scope == "node",
).metric, )?.metric,
)} )}
/> />
</Col> </Col>
{:else} {:else}
<Col /> <Col />
<Spinner secondary />
<Col /> <Col />
{/if} {/if}
</Row> </Row>
@ -318,7 +335,7 @@
<Card body color="danger">{$jobMetrics.error.message}</Card> <Card body color="danger">{$jobMetrics.error.message}</Card>
{:else if $jobMetrics.fetching} {:else if $jobMetrics.fetching}
<Spinner secondary /> <Spinner secondary />
{:else if $jobMetrics.data && $initq.data} {:else if $initq?.data && $jobMetrics?.data?.jobMetrics}
<PlotTable <PlotTable
let:item let:item
let:width let:width
@ -332,9 +349,11 @@
{#if item.data} {#if item.data}
<Metric <Metric
bind:this={plots[item.metric]} bind:this={plots[item.metric]}
on:more-loaded={({ detail }) => statsTable.moreLoaded(detail)} on:load-all={loadAllScopes}
job={$initq.data.job} job={$initq.data.job}
metricName={item.metric} metricName={item.metric}
metricUnit={$initq.data.globalMetrics.find((gm) => gm.name == item.metric)?.unit}
nativeScope={$initq.data.globalMetrics.find((gm) => gm.name == item.metric)?.scope}
rawData={item.data.map((x) => x.metric)} rawData={item.data.map((x) => x.metric)}
scopes={item.data.map((x) => x.scope)} scopes={item.data.map((x) => x.scope)}
{width} {width}
@ -388,8 +407,8 @@
tab="Statistics Table" tab="Statistics Table"
active={!somethingMissing} active={!somethingMissing}
> >
{#if $jobMetrics.data} {#if $jobMetrics?.data?.jobMetrics}
{#key $jobMetrics.data} {#key $jobMetrics.data.jobMetrics}
<StatsTable <StatsTable
bind:this={statsTable} bind:this={statsTable}
job={$initq.data.job} job={$initq.data.job}

View File

@ -1,34 +1,25 @@
<script context="module"> <script context="module">
export function findJobThresholds(job, metricConfig, subClusterConfig) { export function findJobThresholds(job, metricConfig) {
if (!job || !metricConfig || !subClusterConfig) { if (!job || !metricConfig) {
console.warn("Argument missing for findJobThresholds!"); console.warn("Argument missing for findJobThresholds!");
return null; return null;
} }
const subclusterThresholds = metricConfig.subClusters.find( // metricConfig is on subCluster-Level
(sc) => sc.name == subClusterConfig.name,
);
const defaultThresholds = { const defaultThresholds = {
peak: subclusterThresholds peak: metricConfig.peak,
? subclusterThresholds.peak normal: metricConfig.normal,
: metricConfig.peak, caution: metricConfig.caution,
normal: subclusterThresholds alert: metricConfig.alert
? subclusterThresholds.normal
: metricConfig.normal,
caution: subclusterThresholds
? subclusterThresholds.caution
: metricConfig.caution,
alert: subclusterThresholds
? subclusterThresholds.alert
: metricConfig.alert,
}; };
// Job_Exclusivity does not matter, only aggregation // Job_Exclusivity does not matter, only aggregation
if (metricConfig.aggregation === "avg") { if (metricConfig.aggregation === "avg") {
return defaultThresholds; return defaultThresholds;
} else if (metricConfig.aggregation === "sum") { } else if (metricConfig.aggregation === "sum") {
const jobFraction = const topol = getContext("getHardwareTopology")(job.cluster, job.subCluster)
job.numHWThreads / subClusterConfig.topology.node.length; const jobFraction = job.numHWThreads / topol.node.length;
return { return {
peak: round(defaultThresholds.peak * jobFraction, 0), peak: round(defaultThresholds.peak * jobFraction, 0),
normal: round(defaultThresholds.normal * jobFraction, 0), normal: round(defaultThresholds.normal * jobFraction, 0),
@ -55,109 +46,56 @@
Progress, Progress,
Icon, Icon,
Tooltip, Tooltip,
Row,
Col
} from "@sveltestrap/sveltestrap"; } from "@sveltestrap/sveltestrap";
import { mean, round } from "mathjs"; import { round } from "mathjs";
export let job; export let job;
export let jobMetrics;
export let view = "job"; export let view = "job";
export let width = "auto"; export let width = "auto";
export let height = "310px";
const clusters = getContext("clusters"); const footprintData = job?.footprint?.map((jf) => {
const subclusterConfig = clusters
.find((c) => c.name == job.cluster)
.subClusters.find((sc) => sc.name == job.subCluster);
const footprintMetrics =
job.numAcc !== 0
? job.exclusive !== 1 // GPU
? ["acc_utilization", "acc_mem_used", "nv_sm_clock", "nv_mem_util"] // Shared
: ["acc_utilization", "acc_mem_used", "nv_sm_clock", "nv_mem_util"] // Exclusive
: (job.exclusive !== 1) // CPU Only
? ["flops_any", "mem_used"] // Shared
: ["cpu_load", "flops_any", "mem_used", "mem_bw"]; // Exclusive
const footprintData = footprintMetrics.map((fm) => {
// Unit // Unit
const fmc = getContext("metrics")(job.cluster, fm); const fmc = getContext("getMetricConfig")(job.cluster, job.subCluster, jf.name);
let unit = ""; const unit = (fmc?.unit?.prefix ? fmc.unit.prefix : "") + (fmc?.unit?.base ? fmc.unit.base : "")
if (fmc?.unit?.base) unit = fmc.unit.prefix + fmc.unit.base;
// Threshold / -Differences // Threshold / -Differences
const fmt = findJobThresholds(job, fmc, subclusterConfig); const fmt = findJobThresholds(job, fmc);
if (fm === "flops_any") fmt.peak = round(fmt.peak * 0.85, 0); if (jf.name === "flops_any") fmt.peak = round(fmt.peak * 0.85, 0);
// Value: Primarily use backend sourced avgs from job.*, secondarily calculate/read from metricdata // Define basic data -> Value: Use as Provided
// Exclusivity does not matter
let mv = 0.0;
if (fmc.aggregation === "avg") {
if (fm === "cpu_load" && job.loadAvg !== 0) {
mv = round(job.loadAvg, 2);
} else if (fm === "flops_any" && job.flopsAnyAvg !== 0) {
mv = round(job.flopsAnyAvg, 2);
} else if (fm === "mem_bw" && job.memBwAvg !== 0) {
mv = round(job.memBwAvg, 2);
} else {
// Calculate Avg from jobMetrics
const jm = jobMetrics.find((jm) => jm.name === fm && jm.scope === "node");
if (jm?.metric?.statisticsSeries) {
const noNan = jm.metric.statisticsSeries.median.filter(function (val) {
return val != null;
});
mv = round(mean(noNan), 2);
} else if (jm?.metric?.series?.length > 1) {
const avgs = jm.metric.series.map((jms) => jms.statistics.avg);
mv = round(mean(avgs), 2);
} else if (jm?.metric?.series) {
mv = round(jm.metric.series[0].statistics.avg, 2);
}
}
} else if (fmc.aggregation === "sum") {
// Calculate Sum from jobMetrics: Sum all node averages
const jm = jobMetrics.find((jm) => jm.name === fm && jm.scope === "node");
if (jm?.metric?.series?.length > 1) { // More than 1 node
const avgs = jm.metric.series.map((jms) => jms.statistics.avg);
mv = round(avgs.reduce((a, b) => a + b, 0));
} else if (jm?.metric?.series) {
mv = round(jm.metric.series[0].statistics.avg, 2);
}
} else {
console.warn(
"Missing or unkown aggregation mode (sum/avg) for metric:",
metricConfig,
);
}
// Define basic data
const fmBase = { const fmBase = {
name: fm, name: jf.name + ' (' + jf.stat + ')',
avg: jf.value,
unit: unit, unit: unit,
avg: mv,
max: fmt.peak, max: fmt.peak,
dir: fmc.lowerIsBetter
}; };
if (evalFootprint(fm, mv, fmt, "alert")) { if (evalFootprint(jf.value, fmt, fmc.lowerIsBetter, "alert")) {
return { return {
...fmBase, ...fmBase,
color: "danger", color: "danger",
message: `Metric average way ${fm === "mem_used" ? "above" : "below"} expected normal thresholds.`, message: `Metric average way ${fmc.lowerIsBetter ? "above" : "below"} expected normal thresholds.`,
impact: 3, impact: 3
}; };
} else if (evalFootprint(fm, mv, fmt, "caution")) { } else if (evalFootprint(jf.value, fmt, fmc.lowerIsBetter, "caution")) {
return { return {
...fmBase, ...fmBase,
color: "warning", color: "warning",
message: `Metric average ${fm === "mem_used" ? "above" : "below"} expected normal thresholds.`, message: `Metric average ${fmc.lowerIsBetter ? "above" : "below"} expected normal thresholds.`,
impact: 2, impact: 2,
}; };
} else if (evalFootprint(fm, mv, fmt, "normal")) { } else if (evalFootprint(jf.value, fmt, fmc.lowerIsBetter, "normal")) {
return { return {
...fmBase, ...fmBase,
color: "success", color: "success",
message: "Metric average within expected thresholds.", message: "Metric average within expected thresholds.",
impact: 1, impact: 1,
}; };
} else if (evalFootprint(fm, mv, fmt, "peak")) { } else if (evalFootprint(jf.value, fmt, fmc.lowerIsBetter, "peak")) {
return { return {
...fmBase, ...fmBase,
color: "info", color: "info",
@ -176,23 +114,23 @@
} }
}); });
function evalFootprint(metric, mean, thresholds, level) { function evalFootprint(mean, thresholds, lowerIsBetter, level) {
// mem_used has inverse logic regarding threshold levels, notify levels triggered if mean > threshold // Handle Metrics in which less value is better
switch (level) { switch (level) {
case "peak": case "peak":
if (metric === "mem_used") if (lowerIsBetter)
return false; // mem_used over peak -> return false to trigger impact -1 return false; // metric over peak -> return false to trigger impact -1
else return mean <= thresholds.peak && mean > thresholds.normal; else return mean <= thresholds.peak && mean > thresholds.normal;
case "alert": case "alert":
if (metric === "mem_used") if (lowerIsBetter)
return mean <= thresholds.peak && mean >= thresholds.alert; return mean <= thresholds.peak && mean >= thresholds.alert;
else return mean <= thresholds.alert && mean >= 0; else return mean <= thresholds.alert && mean >= 0;
case "caution": case "caution":
if (metric === "mem_used") if (lowerIsBetter)
return mean < thresholds.alert && mean >= thresholds.caution; return mean < thresholds.alert && mean >= thresholds.caution;
else return mean <= thresholds.caution && mean > thresholds.alert; else return mean <= thresholds.caution && mean > thresholds.alert;
case "normal": case "normal":
if (metric === "mem_used") if (lowerIsBetter)
return mean < thresholds.caution && mean >= 0; return mean < thresholds.caution && mean >= 0;
else return mean <= thresholds.normal && mean > thresholds.caution; else return mean <= thresholds.normal && mean > thresholds.caution;
default: default:
@ -201,7 +139,7 @@
} }
</script> </script>
<Card class="h-auto mt-1" style="width: {width}px;"> <Card class="mt-1 overflow-auto" style="width: {width}; height: {height}">
{#if view === "job"} {#if view === "job"}
<CardHeader> <CardHeader>
<CardTitle class="mb-0 d-flex justify-content-center"> <CardTitle class="mb-0 d-flex justify-content-center">
@ -250,9 +188,21 @@
offset={[0, 20]}>{fpd.message}</Tooltip offset={[0, 20]}>{fpd.message}</Tooltip
> >
</div> </div>
<div class="mb-2"> <Row cols={12} class="{(footprintData.length == (index + 1)) ? 'mb-0' : 'mb-2'}">
{#if fpd.dir}
<Col xs="1">
<Icon name="caret-left-fill" />
</Col>
{/if}
<Col xs="11" class="align-content-center">
<Progress value={fpd.avg} max={fpd.max} color={fpd.color} /> <Progress value={fpd.avg} max={fpd.max} color={fpd.color} />
</div> </Col>
{#if !fpd.dir}
<Col xs="1">
<Icon name="caret-right-fill" />
</Col>
{/if}
</Row>
{/each} {/each}
{#if job?.metaData?.message} {#if job?.metaData?.message}
<hr class="mt-1 mb-2" /> <hr class="mt-1 mb-2" />

View File

@ -27,7 +27,7 @@
let filterComponent; // see why here: https://stackoverflow.com/questions/58287729/how-can-i-export-a-function-from-a-svelte-component-that-changes-a-value-in-the let filterComponent; // see why here: https://stackoverflow.com/questions/58287729/how-can-i-export-a-function-from-a-svelte-component-that-changes-a-value-in-the
let jobList, let jobList,
matchedJobs = null; matchedJobs = null;
let sorting = { field: "startTime", order: "DESC" }, let sorting = { field: "startTime", type: "col", order: "DESC" },
isSortingOpen = false, isSortingOpen = false,
isMetricsSelectionOpen = false; isMetricsSelectionOpen = false;
let metrics = filterPresets.cluster let metrics = filterPresets.cluster

View File

@ -1,5 +1,5 @@
<script> <script>
import { getContext, createEventDispatcher } from "svelte"; import { createEventDispatcher } from "svelte";
import Timeseries from "./plots/MetricPlot.svelte"; import Timeseries from "./plots/MetricPlot.svelte";
import { import {
InputGroup, InputGroup,
@ -7,25 +7,19 @@
Spinner, Spinner,
Card, Card,
} from "@sveltestrap/sveltestrap"; } from "@sveltestrap/sveltestrap";
import { fetchMetrics, minScope } from "./utils"; import { minScope } from "./utils";
export let job; export let job;
export let metricName; export let metricName;
export let metricUnit;
export let nativeScope;
export let scopes; export let scopes;
export let width; export let width;
export let rawData; export let rawData;
export let isShared = false; export let isShared = false;
const dispatch = createEventDispatcher(); const dispatch = createEventDispatcher();
const cluster = getContext("clusters").find( const unit = (metricUnit?.prefix ? metricUnit.prefix : "") + (metricUnit?.base ? metricUnit.base : "")
(cluster) => cluster.name == job.cluster,
);
const subCluster = cluster.subClusters.find(
(subCluster) => subCluster.name == job.subCluster,
);
const metricConfig = cluster.metricConfig.find(
(metricConfig) => metricConfig.name == metricName,
);
let selectedHost = null, let selectedHost = null,
plot, plot,
@ -56,37 +50,12 @@
} }
$: if (plot != null) plot.setTimeRange(from, to); $: if (plot != null) plot.setTimeRange(from, to);
$: if (selectedScope == "load-all") dispatch("load-all");
export async function loadMore() {
fetching = true;
let response = await fetchMetrics(job, [metricName], ["core"]);
fetching = false;
if (response.error) {
error = response.error;
return;
}
for (let jm of response.data.jobMetrics) {
if (jm.scope != "node") {
scopes = [...scopes, jm.scope];
rawData.push(jm.metric);
statsSeries = rawData.map((data) => data?.statisticsSeries ? data.statisticsSeries : null)
selectedScope = jm.scope;
selectedScopeIndex = scopes.findIndex((s) => s == jm.scope);
dispatch("more-loaded", jm);
}
}
}
$: if (selectedScope == "load-more") loadMore();
</script> </script>
<InputGroup> <InputGroup>
<InputGroupText style="min-width: 150px;"> <InputGroupText style="min-width: 150px;">
{metricName} ({(metricConfig?.unit?.prefix {metricName} ({unit})
? metricConfig.unit.prefix
: "") + (metricConfig?.unit?.base ? metricConfig.unit.base : "")})
</InputGroupText> </InputGroupText>
<select class="form-select" bind:value={selectedScope}> <select class="form-select" bind:value={selectedScope}>
{#each availableScopes as scope, index} {#each availableScopes as scope, index}
@ -95,8 +64,8 @@
<option value={scope + '-stat'}>stats series ({scope})</option> <option value={scope + '-stat'}>stats series ({scope})</option>
{/if} {/if}
{/each} {/each}
{#if availableScopes.length == 1 && metricConfig?.scope != "node"} {#if availableScopes.length == 1 && nativeScope != "node"}
<option value={"load-more"}>Load more...</option> <option value={"load-all"}>Load all...</option>
{/if} {/if}
</select> </select>
{#if job.resources.length > 1} {#if job.resources.length > 1}
@ -118,8 +87,8 @@
bind:this={plot} bind:this={plot}
{width} {width}
height={300} height={300}
{cluster} cluster={job.cluster}
{subCluster} subCluster={job.subCluster}
timestep={data.timestep} timestep={data.timestep}
scope={selectedScope} scope={selectedScope}
metric={metricName} metric={metricName}
@ -132,8 +101,8 @@
bind:this={plot} bind:this={plot}
{width} {width}
height={300} height={300}
{cluster} cluster={job.cluster}
{subCluster} subCluster={job.subCluster}
timestep={data.timestep} timestep={data.timestep}
scope={selectedScope} scope={selectedScope}
metric={metricName} metric={metricName}

View File

@ -27,8 +27,8 @@
export let showFootprint = false; export let showFootprint = false;
export let view = "job"; export let view = "job";
const clusters = getContext("clusters"), const onInit = getContext("on-init")
onInit = getContext("on-init"); const globalMetrics = getContext("globalMetrics")
let newMetricsOrder = []; let newMetricsOrder = [];
let unorderedMetrics = [...metrics]; let unorderedMetrics = [...metrics];
@ -36,30 +36,34 @@
onInit(() => { onInit(() => {
if (allMetrics == null) allMetrics = new Set(); if (allMetrics == null) allMetrics = new Set();
for (let c of clusters) for (let metric of globalMetrics) allMetrics.add(metric.name);
for (let metric of c.metricConfig) allMetrics.add(metric.name);
}); });
$: { $: {
if (allMetrics != null) { if (allMetrics != null) {
if (cluster == null) { if (cluster == null) {
// console.log('Reset to full metric list') for (let metric of globalMetrics) allMetrics.add(metric.name);
for (let c of clusters)
for (let metric of c.metricConfig) allMetrics.add(metric.name);
} else { } else {
// console.log('Recalculate available metrics for ' + cluster)
allMetrics.clear(); allMetrics.clear();
for (let c of clusters) for (let gm of globalMetrics) {
if (c.name == cluster) if (gm.availability.find((av) => av.cluster === cluster)) allMetrics.add(gm.name);
for (let metric of c.metricConfig) allMetrics.add(metric.name); }
} }
newMetricsOrder = [...allMetrics].filter((m) => !metrics.includes(m)); newMetricsOrder = [...allMetrics].filter((m) => !metrics.includes(m));
newMetricsOrder.unshift(...metrics.filter((m) => allMetrics.has(m))); newMetricsOrder.unshift(...metrics.filter((m) => allMetrics.has(m)));
unorderedMetrics = unorderedMetrics.filter((m) => allMetrics.has(m)); unorderedMetrics = unorderedMetrics.filter((m) => allMetrics.has(m));
} }
} }
function printAvailability(metric, cluster) {
const avail = globalMetrics.find((gm) => gm.name === metric)?.availability
if (cluster == null) {
return avail.map((av) => av.cluster).join(',')
} else {
return avail.find((av) => av.cluster === cluster).subClusters.join(',')
}
}
const client = getContextClient(); const client = getContextClient();
const updateConfigurationMutation = ({ name, value }) => { const updateConfigurationMutation = ({ name, value }) => {
return mutationStore({ return mutationStore({
@ -106,7 +110,6 @@
}).subscribe((res) => { }).subscribe((res) => {
if (res.fetching === false && res.error) { if (res.fetching === false && res.error) {
throw res.error; throw res.error;
// console.log('Error on subscription: ' + res.error)
} }
}); });
@ -118,7 +121,6 @@
value: JSON.stringify(showFootprint), value: JSON.stringify(showFootprint),
}).subscribe((res) => { }).subscribe((res) => {
if (res.fetching === false && res.error) { if (res.fetching === false && res.error) {
console.log("Error on footprint subscription: " + res.error);
throw res.error; throw res.error;
} }
}); });
@ -161,34 +163,7 @@
{/if} {/if}
{metric} {metric}
<span style="float: right;"> <span style="float: right;">
{cluster == null {printAvailability(metric, cluster)}
? clusters // No single cluster specified: List Clusters with Metric
.filter(
(c) => c.metricConfig.find((m) => m.name == metric) != null,
)
.map((c) => c.name)
.join(", ")
: clusters // Single cluster requested: List Subclusters with do not have metric remove flag
.filter((c) => c.name == cluster)
.filter(
(c) => c.metricConfig.find((m) => m.name == metric) != null,
)
.map(function (c) {
let scNames = c.subClusters.map((sc) => sc.name);
scNames.forEach(function (scName) {
let met = c.metricConfig.find((m) => m.name == metric);
let msc = met.subClusters.find(
(msc) => msc.name == scName,
);
if (msc != null) {
if (msc.remove == true) {
scNames = scNames.filter((scn) => scn != msc.name);
}
}
});
return scNames;
})
.join(", ")}
</span> </span>
</li> </li>
{/each} {/each}

View File

@ -29,6 +29,8 @@
from.setMinutes(from.getMinutes() - 30); from.setMinutes(from.getMinutes() - 30);
} }
const initialized = getContext("initialized")
const globalMetrics = getContext("globalMetrics")
const ccconfig = getContext("cc-config"); const ccconfig = getContext("cc-config");
const clusters = getContext("clusters"); const clusters = getContext("clusters");
const client = getContextClient(); const client = getContextClient();
@ -74,15 +76,11 @@
let itemsPerPage = ccconfig.plot_list_jobsPerPage; let itemsPerPage = ccconfig.plot_list_jobsPerPage;
let page = 1; let page = 1;
let paging = { itemsPerPage, page }; let paging = { itemsPerPage, page };
let sorting = { field: "startTime", order: "DESC" }; let sorting = { field: "startTime", type: "col", order: "DESC" };
$: filter = [ $: filter = [
{ cluster: { eq: cluster } }, { cluster: { eq: cluster } },
{ node: { contains: hostname } }, { node: { contains: hostname } },
{ state: ["running"] }, { state: ["running"] },
// {startTime: {
// from: from.toISOString(),
// to: to.toISOString()
// }}
]; ];
const nodeJobsQuery = gql` const nodeJobsQuery = gql`
@ -92,10 +90,6 @@
$paging: PageRequest! $paging: PageRequest!
) { ) {
jobs(filter: $filter, order: $sorting, page: $paging) { jobs(filter: $filter, order: $sorting, page: $paging) {
# items {
# id
# jobId
# }
count count
} }
} }
@ -107,26 +101,16 @@
variables: { paging, sorting, filter }, variables: { paging, sorting, filter },
}); });
let metricUnits = {}; let systemUnits = {};
$: if ($nodeMetricsData.data) { function loadUnits(isInitialized) {
let thisCluster = clusters.find((c) => c.name == cluster); if (!isInitialized) return
if (thisCluster) { const systemMetrics = [...globalMetrics.filter((gm) => gm?.availability.find((av) => av.cluster == cluster))]
for (let metric of thisCluster.metricConfig) { for (let sm of systemMetrics) {
if (metric.unit.prefix || metric.unit.base) { systemUnits[sm.name] = (sm?.unit?.prefix ? sm.unit.prefix : "") + (sm?.unit?.base ? sm.unit.base : "")
metricUnits[metric.name] =
"(" +
(metric.unit.prefix ? metric.unit.prefix : "") +
(metric.unit.base ? metric.unit.base : "") +
")";
} else {
// If no unit defined: Omit Unit Display
metricUnits[metric.name] = "";
}
}
} }
} }
const dateToUnixEpoch = (rfc3339) => Math.floor(Date.parse(rfc3339) / 1000); $: loadUnits($initialized)
</script> </script>
<Row> <Row>
@ -195,7 +179,7 @@
> >
<h4 style="text-align: center; padding-top:15px;"> <h4 style="text-align: center; padding-top:15px;">
{item.name} {item.name}
{metricUnits[item.name]} {systemUnits[item.name] ? "(" + systemUnits[item.name] + ")" : ""}
</h4> </h4>
{#if item.disabled === false && item.metric} {#if item.disabled === false && item.metric}
<MetricPlot <MetricPlot

View File

@ -41,7 +41,6 @@
}).subscribe((res) => { }).subscribe((res) => {
if (res.fetching === false && res.error) { if (res.fetching === false && res.error) {
throw res.error; throw res.error;
// console.log('Error on subscription: ' + res.error)
} }
}); });
} }

View File

@ -74,10 +74,6 @@
return s.dir != "up" ? s1[stat] - s2[stat] : s2[stat] - s1[stat]; return s.dir != "up" ? s1[stat] - s2[stat] : s2[stat] - s1[stat];
}); });
} }
export function moreLoaded(jobMetric) {
jobMetrics = [...jobMetrics, jobMetric];
}
</script> </script>
<Table> <Table>
@ -85,7 +81,6 @@
<tr> <tr>
<th> <th>
<Button outline on:click={() => (isMetricSelectionOpen = true)}> <Button outline on:click={() => (isMetricSelectionOpen = true)}>
<!-- log to click ', console.log(isMetricSelectionOpen)' -->
Metrics Metrics
</Button> </Button>
</th> </th>

View File

@ -146,7 +146,7 @@
`, `,
variables: { variables: {
cluster: cluster, cluster: cluster,
metrics: ["flops_any", "mem_bw"], metrics: ["flops_any", "mem_bw"], // Fixed names for roofline and status bars
from: from.toISOString(), from: from.toISOString(),
to: to.toISOString(), to: to.toISOString(),
filter: [{ state: ["running"] }, { cluster: { eq: cluster } }], filter: [{ state: ["running"] }, { cluster: { eq: cluster } }],

View File

@ -29,9 +29,10 @@
from.setMinutes(from.getMinutes() - 30); from.setMinutes(from.getMinutes() - 30);
} }
const clusters = getContext("clusters"); const initialized = getContext("initialized");
const ccconfig = getContext("cc-config"); const ccconfig = getContext("cc-config");
const metricConfig = getContext("metrics"); const clusters = getContext("clusters");
const globalMetrics = getContext("globalMetrics");
let plotHeight = 300; let plotHeight = 300;
let hostnameFilter = ""; let hostnameFilter = "";
@ -80,24 +81,18 @@
}, },
}); });
let metricUnits = {}; let systemMetrics = [];
$: if ($nodesQuery.data) { let systemUnits = {};
let thisCluster = clusters.find((c) => c.name == cluster); function loadMetrics(isInitialized) {
if (thisCluster) { if (!isInitialized) return
for (let metric of thisCluster.metricConfig) { systemMetrics = [...globalMetrics.filter((gm) => gm?.availability.find((av) => av.cluster == cluster))]
if (metric.unit.prefix || metric.unit.base) { for (let sm of systemMetrics) {
metricUnits[metric.name] = systemUnits[sm.name] = (sm?.unit?.prefix ? sm.unit.prefix : "") + (sm?.unit?.base ? sm.unit.base : "")
"(" +
(metric.unit.prefix ? metric.unit.prefix : "") +
(metric.unit.base ? metric.unit.base : "") +
")";
} else {
// If no unit defined: Omit Unit Display
metricUnits[metric.name] = "";
}
}
} }
} }
$: loadMetrics($initialized)
</script> </script>
<Row> <Row>
@ -123,9 +118,9 @@
<InputGroupText><Icon name="graph-up" /></InputGroupText> <InputGroupText><Icon name="graph-up" /></InputGroupText>
<InputGroupText>Metric</InputGroupText> <InputGroupText>Metric</InputGroupText>
<select class="form-select" bind:value={selectedMetric}> <select class="form-select" bind:value={selectedMetric}>
{#each clusters.find((c) => c.name == cluster).metricConfig as metric} {#each systemMetrics as metric}
<option value={metric.name} <option value={metric.name}
>{metric.name} {metricUnits[metric.name]}</option >{metric.name} {systemUnits[metric.name] ? "("+systemUnits[metric.name]+")" : ""}</option
> >
{/each} {/each}
</select> </select>

View File

@ -107,7 +107,6 @@
addTagToJob(res.data.createTag); addTagToJob(res.data.createTag);
} else if (res.fetching === false && res.error) { } else if (res.fetching === false && res.error) {
throw res.error; throw res.error;
// console.log('Error on subscription: ' + res.error)
} }
}); });
} }
@ -120,7 +119,6 @@
pendingChange = false; pendingChange = false;
} else if (res.fetching === false && res.error) { } else if (res.fetching === false && res.error) {
throw res.error; throw res.error;
// console.log('Error on subscription: ' + res.error)
} }
}); });
} }
@ -134,7 +132,6 @@
pendingChange = false; pendingChange = false;
} else if (res.fetching === false && res.error) { } else if (res.fetching === false && res.error) {
throw res.error; throw res.error;
// console.log('Error on subscription: ' + res.error)
} }
}, },
); );

View File

@ -32,7 +32,7 @@
let filterComponent; // see why here: https://stackoverflow.com/questions/58287729/how-can-i-export-a-function-from-a-svelte-component-that-changes-a-value-in-the let filterComponent; // see why here: https://stackoverflow.com/questions/58287729/how-can-i-export-a-function-from-a-svelte-component-that-changes-a-value-in-the
let jobList; let jobList;
let jobFilters = []; let jobFilters = [];
let sorting = { field: "startTime", order: "DESC" }, let sorting = { field: "startTime", type: "col", order: "DESC" },
isSortingOpen = false; isSortingOpen = false;
let metrics = ccconfig.plot_list_selectedMetrics, let metrics = ccconfig.plot_list_selectedMetrics,
isMetricsSelectionOpen = false; isMetricsSelectionOpen = false;
@ -70,6 +70,7 @@
histMetrics { histMetrics {
metric metric
unit unit
stat
data { data {
min min
max max
@ -245,7 +246,7 @@
usesBins={true} usesBins={true}
{width} {width}
height={250} height={250}
title="Distribution of '{item.metric}' averages" title="Distribution of '{item.metric} ({item.stat})' footprints"
xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`} xlabel={`${item.metric} bin maximum ${item?.unit ? `[${item.unit}]` : ``}`}
xunit={item.unit} xunit={item.unit}
ylabel="Number of Jobs" ylabel="Number of Jobs"

View File

@ -1,65 +0,0 @@
<script>
import { Icon, InputGroup, InputGroupText } from "@sveltestrap/sveltestrap";
export let timeseriesPlots;
let windowSize = 100; // Goes from 0 to 100
let windowPosition = 50; // Goes from 0 to 100
function updatePlots() {
let ws = windowSize / (100 * 2),
wp = windowPosition / 100;
let from = wp - ws,
to = wp + ws;
Object.values(timeseriesPlots).forEach((plot) =>
plot.setTimeRange(from, to),
);
}
// Rendering a big job can take a long time, so we
// throttle the rerenders to every 100ms here.
let timeoutId = null;
function requestUpdatePlots() {
if (timeoutId != null) window.cancelAnimationFrame(timeoutId);
timeoutId = window.requestAnimationFrame(() => {
updatePlots();
timeoutId = null;
}, 100);
}
$: requestUpdatePlots(windowSize, windowPosition);
</script>
<div>
<InputGroup>
<InputGroupText>
<Icon name="zoom-in" />
</InputGroupText>
<InputGroupText>
Window Size:
<input
style="margin: 0em 0em 0em 1em"
type="range"
bind:value={windowSize}
min="1"
max="100"
step="1"
/>
<span style="width: 5em;">
({windowSize}%)
</span>
</InputGroupText>
<InputGroupText>
Window Position:
<input
style="margin: 0em 0em 0em 1em"
type="range"
bind:value={windowPosition}
min="0"
max="100"
step="1"
/>
</InputGroupText>
</InputGroup>
</div>

View File

@ -23,7 +23,6 @@
popMessage(text, target, "#048109"); popMessage(text, target, "#048109");
} else { } else {
let text = await res.text(); let text = await res.text();
// console.log(res.statusText)
throw new Error("Response Code " + res.status + "-> " + text); throw new Error("Response Code " + res.status + "-> " + text);
} }
} catch (err) { } catch (err) {

View File

@ -23,7 +23,6 @@
form.reset(); form.reset();
} else { } else {
let text = await res.text(); let text = await res.text();
// console.log(res.statusText)
throw new Error("Response Code " + res.status + "-> " + text); throw new Error("Response Code " + res.status + "-> " + text);
} }
} catch (err) { } catch (err) {

View File

@ -32,7 +32,6 @@
reloadUserList(); reloadUserList();
} else { } else {
let text = await res.text(); let text = await res.text();
// console.log(res.statusText)
throw new Error("Response Code " + res.status + "-> " + text); throw new Error("Response Code " + res.status + "-> " + text);
} }
} catch (err) { } catch (err) {
@ -64,7 +63,6 @@
reloadUserList(); reloadUserList();
} else { } else {
let text = await res.text(); let text = await res.text();
// console.log(res.statusText)
throw new Error("Response Code " + res.status + "-> " + text); throw new Error("Response Code " + res.status + "-> " + text);
} }
} catch (err) { } catch (err) {

View File

@ -34,7 +34,6 @@
reloadUserList(); reloadUserList();
} else { } else {
let text = await res.text(); let text = await res.text();
// console.log(res.statusText)
throw new Error("Response Code " + res.status + "-> " + text); throw new Error("Response Code " + res.status + "-> " + text);
} }
} catch (err) { } catch (err) {
@ -66,7 +65,6 @@
reloadUserList(); reloadUserList();
} else { } else {
let text = await res.text(); let text = await res.text();
// console.log(res.statusText)
throw new Error("Response Code " + res.status + "-> " + text); throw new Error("Response Code " + res.status + "-> " + text);
} }
} catch (err) { } catch (err) {

View File

@ -136,8 +136,8 @@
if (filters.project) if (filters.project)
items.push({ project: { [filters.projectMatch]: filters.project } }); items.push({ project: { [filters.projectMatch]: filters.project } });
if (filters.jobName) items.push({ jobName: { contains: filters.jobName } }); if (filters.jobName) items.push({ jobName: { contains: filters.jobName } });
for (let stat of filters.stats) if (filters.stats.length != 0)
items.push({ [stat.field]: { from: stat.from, to: stat.to } }); items.push({ metricStats: filters.stats.map((st) => { return { metricName: st.field, range: { from: st.from, to: st.to }} }) });
dispatch("update", { filters: items }); dispatch("update", { filters: items });
changeURL(); changeURL();
@ -412,7 +412,6 @@
/> />
<Statistics <Statistics
cluster={filters.cluster}
bind:isOpen={isStatsOpen} bind:isOpen={isStatsOpen}
bind:stats={filters.stats} bind:stats={filters.stats}
on:update={() => update()} on:update={() => update()}

View File

@ -59,7 +59,6 @@
0, 0,
); );
// console.log(header)
let minNumNodes = 1, let minNumNodes = 1,
maxNumNodes = 0, maxNumNodes = 0,
minNumHWThreads = 1, minNumHWThreads = 1,

View File

@ -1,5 +1,6 @@
<script> <script>
import { createEventDispatcher, getContext } from "svelte"; import { createEventDispatcher, getContext } from "svelte";
import { getStatsItems } from "../utils.js";
import { import {
Button, Button,
Modal, Modal,
@ -9,53 +10,26 @@
} from "@sveltestrap/sveltestrap"; } from "@sveltestrap/sveltestrap";
import DoubleRangeSlider from "./DoubleRangeSlider.svelte"; import DoubleRangeSlider from "./DoubleRangeSlider.svelte";
const clusters = getContext("clusters"), const initialized = getContext("initialized"),
initialized = getContext("initialized"),
dispatch = createEventDispatcher(); dispatch = createEventDispatcher();
export let cluster = null;
export let isModified = false; export let isModified = false;
export let isOpen = false; export let isOpen = false;
export let stats = []; export let stats = [];
let statistics = [ let statistics = []
{ function loadRanges(isInitialized) {
field: "flopsAnyAvg", if (!isInitialized) return;
text: "FLOPs (Avg.)", statistics = getStatsItems();
metric: "flops_any", }
from: 0,
to: 0, function resetRanges() {
peak: 0, for (let st of statistics) {
enabled: false, st.enabled = false
}, st.from = 0
{ st.to = st.peak
field: "memBwAvg", }
text: "Mem. Bw. (Avg.)", }
metric: "mem_bw",
from: 0,
to: 0,
peak: 0,
enabled: false,
},
{
field: "loadAvg",
text: "Load (Avg.)",
metric: "cpu_load",
from: 0,
to: 0,
peak: 0,
enabled: false,
},
{
field: "memUsedMax",
text: "Mem. used (Max.)",
metric: "mem_used",
from: 0,
to: 0,
peak: 0,
enabled: false,
},
];
$: isModified = !statistics.every((a) => { $: isModified = !statistics.every((a) => {
let b = stats.find((s) => s.field == a.field); let b = stats.find((s) => s.field == a.field);
@ -64,35 +38,8 @@
return a.from == b.from && a.to == b.to; return a.from == b.from && a.to == b.to;
}); });
function getPeak(cluster, metric) { $: loadRanges($initialized);
const mc = cluster.metricConfig.find((mc) => mc.name == metric);
return mc ? mc.peak : 0;
}
function resetRange(isInitialized, cluster) {
if (!isInitialized) return;
if (cluster != null) {
let c = clusters.find((c) => c.name == cluster);
for (let stat of statistics) {
stat.peak = getPeak(c, stat.metric);
stat.from = 0;
stat.to = stat.peak;
}
} else {
for (let stat of statistics) {
for (let c of clusters) {
stat.peak = Math.max(stat.peak, getPeak(c, stat.metric));
}
stat.from = 0;
stat.to = stat.peak;
}
}
statistics = [...statistics];
}
$: resetRange($initialized, cluster);
</script> </script>
<Modal {isOpen} toggle={() => (isOpen = !isOpen)}> <Modal {isOpen} toggle={() => (isOpen = !isOpen)}>
@ -126,8 +73,7 @@
color="danger" color="danger"
on:click={() => { on:click={() => {
isOpen = false; isOpen = false;
resetRange($initialized, cluster); resetRanges();
statistics.forEach((stat) => (stat.enabled = false));
stats = []; stats = [];
dispatch("update", { stats }); dispatch("update", { stats });
}}>Reset</Button }}>Reset</Button

View File

@ -3,7 +3,7 @@
Properties: Properties:
- metrics: [String] (can change from outside) - metrics: [String] (can change from outside)
- sorting: { field: String, order: "DESC" | "ASC" } (can change from outside) - sorting: { field: String, type: String, order: "DESC" | "ASC" } (can change from outside)
- matchedJobs: Number (changes from inside) - matchedJobs: Number (changes from inside)
Functions: Functions:
- update(filters?: [JobFilter]) - update(filters?: [JobFilter])
@ -22,10 +22,10 @@
import { stickyHeader } from "../utils.js"; import { stickyHeader } from "../utils.js";
const ccconfig = getContext("cc-config"), const ccconfig = getContext("cc-config"),
clusters = getContext("clusters"), initialized = getContext("initialized"),
initialized = getContext("initialized"); globalMetrics = getContext("globalMetrics");
export let sorting = { field: "startTime", order: "DESC" }; export let sorting = { field: "startTime", type: "col", order: "DESC" };
export let matchedJobs = 0; export let matchedJobs = 0;
export let metrics = ccconfig.plot_list_selectedMetrics; export let metrics = ccconfig.plot_list_selectedMetrics;
export let showFootprint; export let showFootprint;
@ -36,6 +36,11 @@
let paging = { itemsPerPage, page }; let paging = { itemsPerPage, page };
let filter = []; let filter = [];
function getUnit(m) {
const rawUnit = globalMetrics.find((gm) => gm.name === m)?.unit
return (rawUnit?.prefix ? rawUnit.prefix : "") + (rawUnit?.base ? rawUnit.base : "")
}
const client = getContextClient(); const client = getContextClient();
const query = gql` const query = gql`
query ( query (
@ -75,7 +80,11 @@
name name
} }
metaData metaData
footprint footprint {
name
stat
value
}
} }
count count
hasNextPage hasNextPage
@ -141,7 +150,6 @@
paging = { itemsPerPage: value, page: page }; // Trigger reload of jobList paging = { itemsPerPage: value, page: page }; // Trigger reload of jobList
} else if (res.fetching === false && res.error) { } else if (res.fetching === false && res.error) {
throw res.error; throw res.error;
// console.log('Error on subscription: ' + res.error)
} }
}); });
} }
@ -215,22 +223,7 @@
> >
{metric} {metric}
{#if $initialized} {#if $initialized}
({clusters ({getUnit(metric)})
.map((cluster) =>
cluster.metricConfig.find((m) => m.name == metric),
)
.filter((m) => m != null)
.map(
(m) =>
(m.unit?.prefix ? m.unit?.prefix : "") +
(m.unit?.base ? m.unit?.base : ""),
) // Build unitStr
.reduce(
(arr, unitStr) =>
arr.includes(unitStr) ? arr : [...arr, unitStr],
[],
) // w/o this, output would be [unitStr, unitStr]
.join(", ")})
{/if} {/if}
</th> </th>
{/each} {/each}

View File

@ -30,16 +30,11 @@
: ["core"] : ["core"]
: ["node"]; : ["node"];
function distinct(value, index, array) {
return array.indexOf(value) === index;
}
const cluster = getContext("clusters").find((c) => c.name == job.cluster); const cluster = getContext("clusters").find((c) => c.name == job.cluster);
const metricConfig = getContext("metrics"); // Get all MetricConfs which include subCluster-specific settings for this job
const client = getContextClient(); const client = getContextClient();
const query = gql` const query = gql`
query ($id: ID!, $queryMetrics: [String!]!, $scopes: [MetricScope!]!) { query ($id: ID!, $metrics: [String!]!, $scopes: [MetricScope!]!) {
jobMetrics(id: $id, metrics: $queryMetrics, scopes: $scopes) { jobMetrics(id: $id, metrics: $metrics, scopes: $scopes) {
name name
scope scope
metric { metric {
@ -71,34 +66,14 @@
$: metricsQuery = queryStore({ $: metricsQuery = queryStore({
client: client, client: client,
query: query, query: query,
variables: { id, queryMetrics, scopes }, variables: { id, metrics, scopes },
}); });
let queryMetrics = null;
$: if (showFootprint) {
queryMetrics = [
"cpu_load",
"flops_any",
"mem_used",
"mem_bw",
"acc_utilization",
...metrics,
].filter(distinct);
scopes = ["node"];
} else {
queryMetrics = [...metrics];
scopes = job.numNodes == 1
? job.numAcc >= 1
? ["core", "accelerator"]
: ["core"]
: ["node"];
}
export function refresh() { export function refresh() {
metricsQuery = queryStore({ metricsQuery = queryStore({
client: client, client: client,
query: query, query: query,
variables: { id, queryMetrics, scopes }, variables: { id, metrics, scopes },
// requestPolicy: 'network-only' // use default cache-first for refresh // requestPolicy: 'network-only' // use default cache-first for refresh
}); });
} }
@ -166,8 +141,8 @@
<td> <td>
<JobFootprint <JobFootprint
{job} {job}
jobMetrics={$metricsQuery.data.jobMetrics}
width={plotWidth} width={plotWidth}
height="{plotHeight}px"
view="list" view="list"
/> />
</td> </td>

View File

@ -17,24 +17,39 @@
ModalHeader, ModalHeader,
ModalFooter, ModalFooter,
} from "@sveltestrap/sveltestrap"; } from "@sveltestrap/sveltestrap";
import { getContext } from "svelte";
import { getSortItems } from "../utils.js";
export let isOpen = false; export let isOpen = false;
export let sorting = { field: "startTime", order: "DESC" }; export let sorting = { field: "startTime", type: "col", order: "DESC" };
let sortableColumns = [ let sortableColumns = [];
{ field: "startTime", text: "Start Time", order: "DESC" }, let activeColumnIdx;
{ field: "duration", text: "Duration", order: "DESC" },
{ field: "numNodes", text: "Number of Nodes", order: "DESC" },
{ field: "memUsedMax", text: "Max. Memory Used", order: "DESC" },
{ field: "flopsAnyAvg", text: "Avg. FLOPs", order: "DESC" },
{ field: "memBwAvg", text: "Avg. Memory Bandwidth", order: "DESC" },
{ field: "netBwAvg", text: "Avg. Network Bandwidth", order: "DESC" },
];
let activeColumnIdx = sortableColumns.findIndex( const initialized = getContext("initialized");
function loadSortables(isInitialized) {
if (!isInitialized) return;
sortableColumns = [
{ field: "startTime", type: "col", text: "Start Time", order: "DESC" },
{ field: "duration", type: "col", text: "Duration", order: "DESC" },
{ field: "numNodes", type: "col", text: "Number of Nodes", order: "DESC" },
{ field: "numHwthreads", type: "col", text: "Number of HWThreads", order: "DESC" },
{ field: "numAcc", type: "col", text: "Number of Accelerators", order: "DESC" },
...getSortItems()
]
}
function loadActiveIndex(isInitialized) {
if (!isInitialized) return;
activeColumnIdx = sortableColumns.findIndex(
(col) => col.field == sorting.field, (col) => col.field == sorting.field,
); );
sortableColumns[activeColumnIdx].order = sorting.order; sortableColumns[activeColumnIdx].order = sorting.order;
}
$: loadSortables($initialized);
$: loadActiveIndex($initialized)
</script> </script>
<Modal <Modal
@ -62,7 +77,7 @@
sortableColumns[i] = { ...sortableColumns[i] }; sortableColumns[i] = { ...sortableColumns[i] };
activeColumnIdx = i; activeColumnIdx = i;
sortableColumns = [...sortableColumns]; sortableColumns = [...sortableColumns];
sorting = { field: col.field, order: col.order }; sorting = { field: col.field, type: col.type, order: col.order };
}} }}
> >
<Icon <Icon

View File

@ -1,5 +1,5 @@
<script context="module"> <script context="module">
export function formatTime(t, forNode = false) { function formatTime(t, forNode = false) {
if (t !== null) { if (t !== null) {
if (isNaN(t)) { if (isNaN(t)) {
return t; return t;
@ -15,7 +15,7 @@
} }
} }
export function timeIncrs(timestep, maxX, forNode) { function timeIncrs(timestep, maxX, forNode) {
if (forNode === true) { if (forNode === true) {
return [60, 300, 900, 1800, 3600, 7200, 14400, 21600]; // forNode fixed increments return [60, 300, 900, 1800, 3600, 7200, 14400, 21600]; // forNode fixed increments
} else { } else {
@ -27,94 +27,64 @@
} }
} }
export function findThresholds( // removed arg "subcluster": input metricconfig and topology now directly derived from subcluster
function findThresholds(
subClusterTopology,
metricConfig, metricConfig,
scope, scope,
subCluster,
isShared, isShared,
numhwthreads, numhwthreads,
numaccs numaccs
) { ) {
// console.log('NAME ' + metricConfig.name + ' / SCOPE ' + scope + ' / SUBCLUSTER ' + subCluster.name)
if (!metricConfig || !scope || !subCluster) { if (!subClusterTopology || !metricConfig || !scope) {
console.warn("Argument missing for findThresholds!"); console.warn("Argument missing for findThresholds!");
return null; return null;
} }
if ( if (
(scope == "node" && isShared == false) || (scope == "node" && isShared == false) ||
metricConfig.aggregation == "avg" metricConfig?.aggregation == "avg"
) { ) {
if (metricConfig.subClusters && metricConfig.subClusters.length === 0) {
// console.log('subClusterConfigs array empty, use metricConfig defaults')
return { return {
normal: metricConfig.normal, normal: metricConfig.normal,
caution: metricConfig.caution, caution: metricConfig.caution,
alert: metricConfig.alert, alert: metricConfig.alert,
peak: metricConfig.peak, peak: metricConfig.peak,
}; };
} else if (
metricConfig.subClusters &&
metricConfig.subClusters.length > 0
) {
// console.log('subClusterConfigs found, use subCluster Settings if matching jobs subcluster:')
let forSubCluster = metricConfig.subClusters.find(
(sc) => sc.name == subCluster.name,
);
if (
forSubCluster &&
forSubCluster.normal &&
forSubCluster.caution &&
forSubCluster.alert &&
forSubCluster.peak
)
return forSubCluster;
else
return {
normal: metricConfig.normal,
caution: metricConfig.caution,
alert: metricConfig.alert,
peak: metricConfig.peak,
};
} else {
console.warn("metricConfig.subClusters not found!");
return null;
}
} }
if (metricConfig.aggregation != "sum") {
console.warn(
"Missing or unkown aggregation mode (sum/avg) for metric:",
metricConfig,
);
return null;
}
if (metricConfig?.aggregation == "sum") {
let divisor = 1 let divisor = 1
if (isShared == true) { // Shared if (isShared == true) { // Shared
if (numaccs > 0) divisor = subCluster.topology.accelerators.length / numaccs; if (numaccs > 0) divisor = subClusterTopology.accelerators.length / numaccs;
else if (numhwthreads > 0) divisor = subCluster.topology.node.length / numhwthreads; else if (numhwthreads > 0) divisor = subClusterTopology.node.length / numhwthreads;
} }
else if (scope == 'socket') divisor = subCluster.topology.socket.length; else if (scope == 'socket') divisor = subClusterTopology.socket.length;
else if (scope == "core") divisor = subCluster.topology.core.length; else if (scope == "core") divisor = subClusterTopology.core.length;
else if (scope == "accelerator") else if (scope == "accelerator")
divisor = subCluster.topology.accelerators.length; divisor = subClusterTopology.accelerators.length;
else if (scope == "hwthread") divisor = subCluster.topology.node.length; else if (scope == "hwthread") divisor = subClusterTopology.node.length;
else { else {
// console.log('TODO: how to calc thresholds for ', scope) // console.log('TODO: how to calc thresholds for ', scope)
return null; return null;
} }
let mc =
metricConfig?.subClusters?.find((sc) => sc.name == subCluster.name) ||
metricConfig;
return { return {
peak: mc.peak / divisor, peak: metricConfig.peak / divisor,
normal: mc.normal / divisor, normal: metricConfig.normal / divisor,
caution: mc.caution / divisor, caution: metricConfig.caution / divisor,
alert: mc.alert / divisor, alert: metricConfig.alert / divisor,
}; };
} }
console.warn(
"Missing or unkown aggregation mode (sum/avg) for metric:",
metricConfig,
);
return null;
}
</script> </script>
<!-- <!--
@ -165,7 +135,8 @@
if (useStatsSeries == false && series == null) useStatsSeries = true; if (useStatsSeries == false && series == null) useStatsSeries = true;
const metricConfig = getContext("metrics")(cluster, metric); const subClusterTopology = getContext("getHardwareTopology")(cluster, subCluster);
const metricConfig = getContext("getMetricConfig")(cluster, subCluster, metric);
const clusterCockpitConfig = getContext("cc-config"); const clusterCockpitConfig = getContext("cc-config");
const resizeSleepTime = 250; const resizeSleepTime = 250;
const normalLineColor = "#000000"; const normalLineColor = "#000000";
@ -178,11 +149,9 @@
alert: "rgba(255, 0, 0, 0.3)", alert: "rgba(255, 0, 0, 0.3)",
}; };
const thresholds = findThresholds( const thresholds = findThresholds(
subClusterTopology,
metricConfig, metricConfig,
scope, scope,
typeof subCluster == "string"
? cluster.subClusters.find((sc) => sc.name == subCluster)
: subCluster,
isShared, isShared,
numhwthreads, numhwthreads,
numaccs numaccs
@ -479,8 +448,6 @@
cursor: { drag: { x: true, y: true } }, cursor: { drag: { x: true, y: true } },
}; };
// console.log(opts)
let plotWrapper = null; let plotWrapper = null;
let uplot = null; let uplot = null;
let timeoutId = null; let timeoutId = null;

View File

@ -24,10 +24,11 @@
export let metrics export let metrics
export let cluster export let cluster
export let subCluster
export let jobMetrics export let jobMetrics
export let height = 365 export let height = 365
const metricConfig = getContext('metrics') const getMetricConfig = getContext("getMetricConfig")
const labels = metrics.filter(name => { const labels = metrics.filter(name => {
if (!jobMetrics.find(m => m.name == name && m.scope == "node")) { if (!jobMetrics.find(m => m.name == name && m.scope == "node")) {
@ -38,7 +39,7 @@
}) })
const getValuesForStat = (getStat) => labels.map(name => { const getValuesForStat = (getStat) => labels.map(name => {
const peak = metricConfig(cluster, name).peak const peak = getMetricConfig(cluster, subCluster, name).peak
const metric = jobMetrics.find(m => m.name == name && m.scope == "node") const metric = jobMetrics.find(m => m.name == name && m.scope == "node")
const value = getStat(metric.metric) / peak const value = getStat(metric.metric) / peak
return value <= 1. ? value : 1. return value <= 1. ? value : 1.

View File

@ -209,7 +209,6 @@
draw: [ draw: [
(u) => { (u) => {
// draw roofs when cluster set // draw roofs when cluster set
// console.log(u)
if (cluster != null) { if (cluster != null) {
const padding = u._padding; // [top, right, bottom, left] const padding = u._padding; // [top, right, bottom, left]
@ -237,9 +236,6 @@
true, true,
); );
// Debug get zoomLevel from browser
// console.log("Zoom", Math.round(window.devicePixelRatio * 100))
if ( if (
scalarKneeX < scalarKneeX <
width * window.devicePixelRatio - width * window.devicePixelRatio -
@ -323,7 +319,7 @@
}; };
uplot = new uPlot(opts, plotData, plotWrapper); uplot = new uPlot(opts, plotData, plotWrapper);
} else { } else {
console.log("No data for roofline!"); // console.log("No data for roofline!");
} }
} }

View File

@ -31,3 +31,4 @@ export function scaleNumbers(x, y , p = '') {
return Math.abs(rawYValue) >= 1000 ? `${rawXValue.toExponential()} / ${rawYValue.toExponential()}` : `${rawYValue.toString()} / ${rawYValue.toString()}` return Math.abs(rawYValue) >= 1000 ? `${rawXValue.toExponential()} / ${rawYValue.toExponential()}` : `${rawYValue.toString()} / ${rawYValue.toString()}`
} }
// export const dateToUnixEpoch = (rfc3339) => Math.floor(Date.parse(rfc3339) / 1000);

View File

@ -6,7 +6,6 @@ import {
} from "@urql/svelte"; } from "@urql/svelte";
import { setContext, getContext, hasContext, onDestroy, tick } from "svelte"; import { setContext, getContext, hasContext, onDestroy, tick } from "svelte";
import { readable } from "svelte/store"; import { readable } from "svelte/store";
// import { formatNumber } from './units.js'
/* /*
* Call this function only at component initialization time! * Call this function only at component initialization time!
@ -16,7 +15,9 @@ import { readable } from "svelte/store";
* - Creates a readable store 'initialization' which indicates when the values below can be used. * - Creates a readable store 'initialization' which indicates when the values below can be used.
* - Adds 'tags' to the context (list of all tags) * - Adds 'tags' to the context (list of all tags)
* - Adds 'clusters' to the context (object with cluster names as keys) * - Adds 'clusters' to the context (object with cluster names as keys)
* - Adds 'metrics' to the context, a function that takes a cluster and metric name and returns the MetricConfig (or undefined) * - Adds 'globalMetrics' to the context (list of globally available metric infos)
* - Adds 'getMetricConfig' to the context, a function that takes a cluster, subCluster and metric name and returns the MetricConfig (or undefined)
* - Adds 'getHardwareTopology' to the context, a function that takes a cluster nad subCluster and returns the subCluster topology (or undefined)
*/ */
export function init(extraInitQuery = "") { export function init(extraInitQuery = "") {
const jwt = hasContext("jwt") const jwt = hasContext("jwt")
@ -71,11 +72,19 @@ export function init(extraInitQuery = "") {
normal normal
caution caution
alert alert
lowerIsBetter
} }
footprint footprint
} }
} }
tags { id, name, type } tags { id, name, type }
globalMetrics {
name
scope
footprint
unit { base, prefix }
availability { cluster, subClusters }
}
${extraInitQuery} ${extraInitQuery}
}` }`
) )
@ -91,12 +100,13 @@ export function init(extraInitQuery = "") {
}; };
}; };
const tags = [], const tags = []
clusters = []; const clusters = []
const allMetrics = []; const globalMetrics = []
setContext("tags", tags); setContext("tags", tags);
setContext("clusters", clusters); setContext("clusters", clusters);
setContext("allmetrics", allMetrics); setContext("globalMetrics", globalMetrics);
setContext("getMetricConfig", (cluster, subCluster, metric) => { setContext("getMetricConfig", (cluster, subCluster, metric) => {
if (typeof cluster !== "object") if (typeof cluster !== "object")
cluster = clusters.find((c) => c.name == cluster); cluster = clusters.find((c) => c.name == cluster);
@ -106,6 +116,15 @@ export function init(extraInitQuery = "") {
return subCluster.metricConfig.find((m) => m.name == metric); return subCluster.metricConfig.find((m) => m.name == metric);
}); });
setContext("getHardwareTopology", (cluster, subCluster) => {
if (typeof cluster !== "object")
cluster = clusters.find((c) => c.name == cluster);
if (typeof subCluster !== "object")
subCluster = cluster.subClusters.find((sc) => sc.name == subCluster);
return subCluster?.topology;
});
setContext("on-init", (callback) => setContext("on-init", (callback) =>
state.fetching ? subscribers.push(callback) : callback(state) state.fetching ? subscribers.push(callback) : callback(state)
); );
@ -124,32 +143,11 @@ export function init(extraInitQuery = "") {
} }
for (let tag of data.tags) tags.push(tag); for (let tag of data.tags) tags.push(tag);
for (let cluster of data.clusters) clusters.push(cluster);
for (let gm of data.globalMetrics) globalMetrics.push(gm);
let globalmetrics = []; // Unified Sort
for (let cluster of data.clusters) { globalMetrics.sort((a, b) => a.name.localeCompare(b.name))
// Add full info to context object
clusters.push(cluster);
// Build global metric list with availability for joblist metricselect
for (let subcluster of cluster.subClusters) {
for (let scm of subcluster.metricConfig) {
let match = globalmetrics.find((gm) => gm.name == scm.name);
if (match) {
let submatch = match.availability.find((av) => av.cluster == cluster.name);
if (submatch) {
submatch.subclusters.push(subcluster.name)
} else {
match.availability.push({cluster: cluster.name, subclusters: [subcluster.name]})
}
} else {
globalmetrics.push({name: scm.name, availability: [{cluster: cluster.name, subclusters: [subcluster.name]}]});
}
}
}
}
// Add to ctx object
for (let gm of globalmetrics) allMetrics.push(gm);
console.log('All Metrics List', allMetrics);
state.data = data; state.data = data;
tick().then(() => subscribers.forEach((cb) => cb(state))); tick().then(() => subscribers.forEach((cb) => cb(state)));
@ -159,6 +157,7 @@ export function init(extraInitQuery = "") {
query: { subscribe }, query: { subscribe },
tags, tags,
clusters, clusters,
globalMetrics
}; };
} }
@ -171,6 +170,11 @@ function fuzzyMatch(term, string) {
return string.toLowerCase().includes(term); return string.toLowerCase().includes(term);
} }
// Use in filter() function to return only unique values
export function distinct(value, index, array) {
return array.indexOf(value) === index;
}
export function fuzzySearchTags(term, tags) { export function fuzzySearchTags(term, tags) {
if (!tags) return []; if (!tags) return [];
@ -260,56 +264,6 @@ export function minScope(scopes) {
return sm; return sm;
} }
export async function fetchMetrics(job, metrics, scopes) {
if (job.monitoringStatus == 0) return null;
let query = [];
if (metrics != null) {
for (let metric of metrics) {
query.push(`metric=${metric}`);
}
}
if (scopes != null) {
for (let scope of scopes) {
query.push(`scope=${scope}`);
}
}
try {
let res = await fetch(
`/frontend/jobs/metrics/${job.id}${query.length > 0 ? "?" : ""}${query.join(
"&"
)}`
);
if (res.status != 200) {
return { error: { status: res.status, message: await res.text() } };
}
return await res.json();
} catch (e) {
return { error: e };
}
}
export function fetchMetricsStore() {
let set = null;
let prev = { fetching: true, error: null, data: null };
return [
readable(prev, (_set) => {
set = _set;
}),
(job, metrics, scopes) =>
fetchMetrics(job, metrics, scopes).then((res) => {
let next = { fetching: false, error: res.error, data: res.data };
if (prev.data && next.data)
next.data.jobMetrics.push(...prev.data.jobMetrics);
prev = next;
set(next);
}),
];
}
export function stickyHeader(datatableHeaderSelector, updatePading) { export function stickyHeader(datatableHeaderSelector, updatePading) {
const header = document.querySelector("header > nav.navbar"); const header = document.querySelector("header > nav.navbar");
if (!header) return; if (!header) return;
@ -336,22 +290,98 @@ export function stickyHeader(datatableHeaderSelector, updatePading) {
onDestroy(() => document.removeEventListener("scroll", onscroll)); onDestroy(() => document.removeEventListener("scroll", onscroll));
} }
// Outdated: Frontend Will Now Receive final MetricList from backend
export function checkMetricDisabled(m, c, s) { //[m]etric, [c]luster, [s]ubcluster export function checkMetricDisabled(m, c, s) { //[m]etric, [c]luster, [s]ubcluster
const mc = getContext("metrics"); const metrics = getContext("globalMetrics");
const thisConfig = mc(c, m); const result = metrics?.find((gm) => gm.name === m)?.availability?.find((av) => av.cluster === c)?.subClusters?.includes(s)
let thisSCIndex = -1; return !result
if (thisConfig) { }
thisSCIndex = thisConfig.subClusters.findIndex(
(subcluster) => subcluster.name == s export function getStatsItems() {
); // console.time('stats')
// console.log('getStatsItems ...')
const globalMetrics = getContext("globalMetrics")
const result = globalMetrics.map((gm) => {
if (gm?.footprint) {
// Footprint contains suffix naming the used stat-type
// console.time('deep')
// console.log('Deep Config for', gm.name)
const mc = getMetricConfigDeep(gm.name, null, null)
// console.timeEnd('deep')
return {
field: gm.name + '_' + gm.footprint,
text: gm.name + ' (' + gm.footprint + ')',
metric: gm.name,
from: 0,
to: mc.peak,
peak: mc.peak,
enabled: false
}
}
return null
}).filter((r) => r != null)
// console.timeEnd('stats')
return [...result];
}; };
if (thisSCIndex >= 0) {
if (thisConfig.subClusters[thisSCIndex].remove == true) { export function getSortItems() {
return true; //console.time('sort')
//console.log('getSortItems ...')
const globalMetrics = getContext("globalMetrics")
const result = globalMetrics.map((gm) => {
if (gm?.footprint) {
// Footprint contains suffix naming the used stat-type
return {
field: gm.name + '_' + gm.footprint,
type: 'foot',
text: gm.name + ' (' + gm.footprint + ')',
order: 'DESC'
} }
} }
return false; return null
}).filter((r) => r != null)
//console.timeEnd('sort')
return [...result];
};
function getMetricConfigDeep(metric, cluster, subCluster) {
const clusters = getContext("clusters");
if (cluster != null) {
let c = clusters.find((c) => c.name == cluster);
if (subCluster != null) {
let sc = c.subClusters.find((sc) => sc.name == subCluster);
return sc.metricConfig.find((mc) => mc.name == metric)
} else {
let result;
for (let sc of c.subClusters) {
const mc = sc.metricConfig.find((mc) => mc.name == metric)
if (result) { // If lowerIsBetter: Peak is still maximum value, no special case required
result.alert = (mc.alert > result.alert) ? mc.alert : result.alert
result.caution = (mc.caution > result.caution) ? mc.caution : result.caution
result.normal = (mc.normal > result.normal) ? mc.normal : result.normal
result.peak = (mc.peak > result.peak) ? mc.peak : result.peak
} else {
if (mc) result = {...mc};
}
}
return result
}
} else {
let result;
for (let c of clusters) {
for (let sc of c.subClusters) {
const mc = sc.metricConfig.find((mc) => mc.name == metric)
if (result) { // If lowerIsBetter: Peak is still maximum value, no special case required
result.alert = (mc.alert > result.alert) ? mc.alert : result.alert
result.caution = (mc.caution > result.caution) ? mc.caution : result.caution
result.normal = (mc.normal > result.normal) ? mc.normal : result.normal
result.peak = (mc.peak > result.peak) ? mc.peak : result.peak
} else {
if (mc) result = {...mc};
}
}
}
return result
}
} }
export function convert2uplot(canvasData) { export function convert2uplot(canvasData) {
@ -413,14 +443,14 @@ export function binsFromFootprint(weights, scope, values, numBins) {
} }
export function transformDataForRoofline(flopsAny, memBw) { // Uses Metric Objects: {series:[{},{},...], timestep:60, name:$NAME} export function transformDataForRoofline(flopsAny, memBw) { // Uses Metric Objects: {series:[{},{},...], timestep:60, name:$NAME}
const nodes = flopsAny.series.length
const timesteps = flopsAny.series[0].data.length
/* c will contain values from 0 to 1 representing the time */ /* c will contain values from 0 to 1 representing the time */
let data = null let data = null
const x = [], y = [], c = [] const x = [], y = [], c = []
if (flopsAny && memBw) { if (flopsAny && memBw) {
const nodes = flopsAny.series.length
const timesteps = flopsAny.series[0].data.length
for (let i = 0; i < nodes; i++) { for (let i = 0; i < nodes; i++) {
const flopsData = flopsAny.series[i].data const flopsData = flopsAny.series[i].data
const memBwData = memBw.series[i].data const memBwData = memBw.series[i].data
@ -446,7 +476,7 @@ export function transformDataForRoofline(flopsAny, memBw) { // Uses Metric Objec
// Return something to be plotted. The argument shall be the result of the // Return something to be plotted. The argument shall be the result of the
// `nodeMetrics` GraphQL query. // `nodeMetrics` GraphQL query.
// Remove "hardcoded" here or deemed necessary? // Hardcoded metric names required for correct render
export function transformPerNodeDataForRoofline(nodes) { export function transformPerNodeDataForRoofline(nodes) {
let data = null let data = null
const x = [], y = [] const x = [], y = []

View File

@ -9,8 +9,6 @@
<script> <script>
const jobInfos = { const jobInfos = {
id: "{{ .Infos.id }}", id: "{{ .Infos.id }}",
jobId: "{{ .Infos.jobId }}",
clusterId: "{{ .Infos.clusterId }}"
}; };
const clusterCockpitConfig = {{ .Config }}; const clusterCockpitConfig = {{ .Config }};
const authlevel = {{ .User.GetAuthLevel }}; const authlevel = {{ .User.GetAuthLevel }};