From 0c1b66aad9c1c2acc79cc6e31ccc46ccba267f1b Mon Sep 17 00:00:00 2001 From: Christoph Kluge Date: Thu, 30 Mar 2023 15:21:35 +0200 Subject: [PATCH] Adapt svelte to new schema, add removed metric box - Moved 'scope' field to parent jobMetric - Implemented unit { prefix, base } where necessary - SubCluster Metric Config 'remove' option implemented in Joblists --- internal/graph/schema.resolvers.go | 12 +++- internal/metricdata/metricdata.go | 11 ++-- pkg/archive/nodelist.go | 69 ++++++++++++++++++++++ web/frontend/src/Analysis.root.svelte | 43 +++++++++----- web/frontend/src/Job.root.svelte | 11 ++-- web/frontend/src/Metric.svelte | 38 ++++++++---- web/frontend/src/Node.root.svelte | 24 ++++++-- web/frontend/src/StatsTable.svelte | 4 +- web/frontend/src/StatsTableEntry.svelte | 2 +- web/frontend/src/Status.root.svelte | 37 +++++++----- web/frontend/src/Systems.root.svelte | 27 ++++++--- web/frontend/src/User.root.svelte | 12 ++-- web/frontend/src/joblist/JobList.svelte | 8 ++- web/frontend/src/joblist/Row.svelte | 78 +++++++++++++++++++++---- web/frontend/src/plots/Histogram.svelte | 8 +-- web/frontend/src/plots/Polar.svelte | 4 +- web/frontend/src/plots/Roofline.svelte | 20 +++---- web/frontend/src/utils.js | 10 ++-- 18 files changed, 313 insertions(+), 105 deletions(-) diff --git a/internal/graph/schema.resolvers.go b/internal/graph/schema.resolvers.go index 9d25045..e5bfc77 100644 --- a/internal/graph/schema.resolvers.go +++ b/internal/graph/schema.resolvers.go @@ -269,6 +269,7 @@ func (r *queryResolver) NodeMetrics(ctx context.Context, cluster string, nodes [ for _, scopedMetric := range scopedMetrics { host.Metrics = append(host.Metrics, &model.JobMetricWithName{ Name: metric, + Scope: schema.MetricScopeNode, // NodeMetrics allow fixed scope? Metric: scopedMetric, }) } @@ -282,7 +283,16 @@ func (r *queryResolver) NodeMetrics(ctx context.Context, cluster string, nodes [ // NumberOfNodes is the resolver for the numberOfNodes field. func (r *subClusterResolver) NumberOfNodes(ctx context.Context, obj *schema.SubCluster) (int, error) { - panic(fmt.Errorf("not implemented: NumberOfNodes - numberOfNodes")) + nodeList, err := archive.ParseNodeList(obj.Nodes) + if err != nil { + return 0, err + } + // log.Debugf(">>>> See raw list definition here: %v", nodeList) + stringList := nodeList.PrintList() + // log.Debugf(">>>> See parsed list here: %v", stringList) + numOfNodes := len(stringList) + // log.Debugf(">>>> See numOfNodes here: %v", len(stringList)) + return numOfNodes, nil } // Cluster returns generated.ClusterResolver implementation. diff --git a/internal/metricdata/metricdata.go b/internal/metricdata/metricdata.go index 5b618a5..bbad606 100644 --- a/internal/metricdata/metricdata.go +++ b/internal/metricdata/metricdata.go @@ -324,10 +324,13 @@ func ArchiveJob(job *schema.Job, ctx context.Context) (*schema.JobMeta, error) { } jobMeta.Statistics[metric] = schema.JobStatistics{ - Unit: archive.GetMetricConfig(job.Cluster, metric).Unit, - Avg: avg / float64(job.NumNodes), - Min: min, - Max: max, + Unit: schema.Unit{ + Prefix: archive.GetMetricConfig(job.Cluster, metric).Unit.Prefix, + Base: archive.GetMetricConfig(job.Cluster, metric).Unit.Base, + }, + Avg: avg / float64(job.NumNodes), + Min: min, + Max: max, } } diff --git a/pkg/archive/nodelist.go b/pkg/archive/nodelist.go index f621942..81d8a52 100644 --- a/pkg/archive/nodelist.go +++ b/pkg/archive/nodelist.go @@ -14,6 +14,8 @@ import ( type NodeList [][]interface { consume(input string) (next string, ok bool) + limits() []map[string]int64 + prefix() string } func (nl *NodeList) Contains(name string) bool { @@ -35,6 +37,29 @@ func (nl *NodeList) Contains(name string) bool { return false } +func (nl *NodeList) PrintList() []string { + var out []string + for _, term := range *nl { + // log.Debugf("Term: %v", term) + + prefix := term[0].prefix() + // log.Debugf("Prefix as String: %s", prefix) + + limitArr := term[1].limits() + for _, inner := range limitArr { + for i := inner["start"]; i < inner["end"]+1; i++ { + node := fmt.Sprintf("%s%02d", prefix, i) + out = append(out, node) + } + // log.Debugf("Inner Map @ %d: %#v", indx, inner) + // log.Debugf("Start: %#v", inner["start"]) + // log.Debugf("End: %#v", inner["end"]) + } + } + // log.Debugf("Node List as Strings: %#v", out) + return out +} + type NLExprString string func (nle NLExprString) consume(input string) (next string, ok bool) { @@ -45,6 +70,16 @@ func (nle NLExprString) consume(input string) (next string, ok bool) { return "", false } +func (nle NLExprString) limits() []map[string]int64 { + // Null implementation to fullfill interface requirement + l := make([]map[string]int64, 0) + return l +} + +func (nle NLExprString) prefix() string { + return string(nle) +} + type NLExprIntRanges []NLExprIntRange func (nles NLExprIntRanges) consume(input string) (next string, ok bool) { @@ -56,6 +91,22 @@ func (nles NLExprIntRanges) consume(input string) (next string, ok bool) { return "", false } +func (nles NLExprIntRanges) limits() []map[string]int64 { + l := make([]map[string]int64, 0) + for _, nle := range nles { + inner := nle.limits() + // log.Debugf("limits @ nles: %#v", inner) + l = append(l, inner[0]) + } + return l +} + +func (nles NLExprIntRanges) prefix() string { + // Null implementation to fullfill interface requirement + var s string + return s +} + type NLExprIntRange struct { start, end int64 zeroPadded bool @@ -89,6 +140,22 @@ func (nle NLExprIntRange) consume(input string) (next string, ok bool) { return "", false } +func (nle NLExprIntRange) limits() []map[string]int64 { + l := make([]map[string]int64, 0) + m := make(map[string]int64) + m["start"] = nle.start + m["end"] = nle.end + l = append(l, m) + // log.Debugf("limits @ nle: %#v", l) + return l +} + +func (nles NLExprIntRange) prefix() string { + // Null implementation to fullfill interface requirement + var s string + return s +} + func ParseNodeList(raw string) (NodeList, error) { isLetter := func(r byte) bool { return ('a' <= r && r <= 'z') || ('A' <= r && r <= 'Z') } isDigit := func(r byte) bool { return '0' <= r && r <= '9' } @@ -116,6 +183,8 @@ func ParseNodeList(raw string) (NodeList, error) { for _, rawterm := range rawterms { exprs := []interface { consume(input string) (next string, ok bool) + limits() []map[string]int64 + prefix() string }{} for i := 0; i < len(rawterm); i++ { c := rawterm[i] diff --git a/web/frontend/src/Analysis.root.svelte b/web/frontend/src/Analysis.root.svelte index a92aea7..4d446a5 100644 --- a/web/frontend/src/Analysis.root.svelte +++ b/web/frontend/src/Analysis.root.svelte @@ -30,8 +30,8 @@ let rooflineMaxY let colWidth let numBins = 50 - const ccconfig = getContext('cc-config'), - metricConfig = getContext('metrics') + const ccconfig = getContext('cc-config') + const metricConfig = getContext('metrics') let metricsInHistograms = ccconfig.analysis_view_histogramMetrics, metricsInScatterplots = ccconfig.analysis_view_scatterPlotMetrics @@ -161,24 +161,29 @@ b.count - a.count).map(({ count }, idx) => ({ count, value: idx }))} - label={(x) => x < $statsQuery.data.topUsers.length ? $statsQuery.data.topUsers[Math.floor(x)].name : '0'} /> + label={(x) => x < $statsQuery.data.topUsers.length ? $statsQuery.data.topUsers[Math.floor(x)].name : 'No Users'} + ylabel="Node Hours [h]"/> {/key}
{#key $statsQuery.data.stats[0].histDuration} -

Walltime Distribution

+

Duration Distribution

+ width={colWidth - 25} + data={$statsQuery.data.stats[0].histDuration} + xlabel="Current Runtimes [h]" + ylabel="Number of Jobs"/> {/key}
{#key $statsQuery.data.stats[0].histNumNodes}

Number of Nodes Distribution

+ width={colWidth - 25} + data={$statsQuery.data.stats[0].histNumNodes} + xlabel="Allocated Nodes [#]" + ylabel="Number of Jobs" /> {/key}
@@ -189,7 +194,7 @@ {:else if $rooflineQuery.data && cluster} {#key $rooflineQuery.data} @@ -224,12 +229,16 @@ $footprintsQuery.data.footprints.nodehours, $footprintsQuery.data.footprints.metrics.find(f => f.metric == metric).data, numBins) }))} itemsPerRow={ccconfig.plot_view_plotsPerRow}> -

{item.metric} [{metricConfig(cluster.name, item.metric)?.unit}]

+

Average Distribution of '{item.metric}'

+ data={item.bins} + label={item.label} + xlabel={`${item.metric} Average [${(metricConfig(cluster.name, item.metric)?.unit?.prefix ? metricConfig(cluster.name, item.metric)?.unit?.prefix : '') + + (metricConfig(cluster.name, item.metric)?.unit?.base ? metricConfig(cluster.name, item.metric)?.unit?.base : '')}]`} + ylabel="Node Hours [h]" /> @@ -254,12 +263,18 @@ {/if} - + diff --git a/web/frontend/src/Job.root.svelte b/web/frontend/src/Job.root.svelte index 9ff68ba..efccadf 100644 --- a/web/frontend/src/Job.root.svelte +++ b/web/frontend/src/Job.root.svelte @@ -81,7 +81,7 @@ missingMetrics = metricNames.filter(metric => !metrics.some(jm => jm.name == metric)) missingHosts = job.resources.map(({ hostname }) => ({ hostname: hostname, - metrics: metricNames.filter(metric => !metrics.some(jm => jm.metric.scope == 'node' && jm.metric.series.some(series => series.hostname == hostname))) + metrics: metricNames.filter(metric => !metrics.some(jm => jm.scope == 'node' && jm.metric.series.some(series => series.hostname == hostname))) })).filter(({ metrics }) => metrics.length > 0) somethingMissing = missingMetrics.length > 0 || missingHosts.length > 0 } @@ -114,8 +114,8 @@ cluster={clusters .find(c => c.name == $initq.data.job.cluster).subClusters .find(sc => sc.name == $initq.data.job.subCluster)} - flopsAny={$jobMetrics.data.jobMetrics.find(m => m.name == 'flops_any' && m.metric.scope == 'node').metric} - memBw={$jobMetrics.data.jobMetrics.find(m => m.name == 'mem_bw' && m.metric.scope == 'node').metric} /> + flopsAny={$jobMetrics.data.jobMetrics.find(m => m.name == 'flops_any' && m.scope == 'node').metric} + memBw={$jobMetrics.data.jobMetrics.find(m => m.name == 'mem_bw' && m.scope == 'node').metric} /> {:else} @@ -163,8 +163,9 @@ bind:this={plots[item.metric]} on:more-loaded={({ detail }) => statsTable.moreLoaded(detail)} job={$initq.data.job} - metric={item.metric} - scopes={item.data.map(x => x.metric)} + metricName={item.metric} + rawData={item.data.map(x => x.metric)} + scopes={item.data.map(x => x.scope)} width={width}/> {:else} No data for {item.metric} diff --git a/web/frontend/src/Metric.svelte b/web/frontend/src/Metric.svelte index f414827..5b437f7 100644 --- a/web/frontend/src/Metric.svelte +++ b/web/frontend/src/Metric.svelte @@ -5,21 +5,36 @@ import { fetchMetrics, minScope } from './utils' export let job - export let metric + export let metricName export let scopes export let width + export let rawData const dispatch = createEventDispatcher() const cluster = getContext('clusters').find(cluster => cluster.name == job.cluster) const subCluster = cluster.subClusters.find(subCluster => subCluster.name == job.subCluster) - const metricConfig = cluster.metricConfig.find(metricConfig => metricConfig.name == metric) + const metricConfig = cluster.metricConfig.find(metricConfig => metricConfig.name == metricName) + + let selectedHost = null, plot, fetching = false, error = null + let selectedScope = minScope(scopes) + let selectedScopeIndex = scopes.findIndex(s => s == selectedScope) + + // console.log('- Inputs -') + // console.log(metricName) + // console.log(scopes) + // console.log(rawData) + // console.log('- Prep Scopes -') + // console.log(selectedScope) + // console.log(selectedScopeIndex) - let selectedScope = minScope(scopes.map(s => s.scope)), selectedHost = null, plot, fetching = false, error = null - - $: avaliableScopes = scopes.map(metric => metric.scope) - $: data = scopes.find(metric => metric.scope == selectedScope) + $: avaliableScopes = scopes + $: data = rawData[selectedScopeIndex] $: series = data?.series.filter(series => selectedHost == null || series.hostname == selectedHost) + // console.log('- Prep Data -') + // console.log(rawData[selectedScopeIndex]) + // console.log(rawData[selectedScopeIndex].series.filter(series => selectedHost == null || series.hostname == selectedHost)) + let from = null, to = null export function setTimeRange(f, t) { from = f, to = t @@ -29,7 +44,7 @@ export async function loadMore() { fetching = true - let response = await fetchMetrics(job, [metric], ["core"]) + let response = await fetchMetrics(job, [metricName], ["core"]) fetching = false if (response.error) { @@ -38,9 +53,9 @@ } for (let jm of response.data.jobMetrics) { - if (jm.metric.scope != "node") { + if (jm.scope != "node") { scopes.push(jm.metric) - selectedScope = jm.metric.scope + selectedScope = jm.scope dispatch('more-loaded', jm) if (!avaliableScopes.includes(selectedScope)) avaliableScopes = [...avaliableScopes, selectedScope] @@ -52,7 +67,8 @@ - {metric} ({metricConfig?.unit}) + {metricName} ({(metricConfig?.unit?.prefix ? metricConfig.unit.prefix : '') + + (metricConfig?.unit?.base ? metricConfig.unit.base : '')}) {#each clusters.find(c => c.name == cluster).metricConfig as metric} - + {/each} @@ -98,8 +111,8 @@ let:width itemsPerRow={ccconfig.plot_view_plotsPerRow} items={$nodesQuery.data.nodeMetrics - .filter(h => h.host.includes(hostnameFilter) && h.metrics.some(m => m.name == selectedMetric && m.metric.scope == 'node')) - .map(h => ({ host: h.host, subCluster: h.subCluster, data: h.metrics.find(m => m.name == selectedMetric && m.metric.scope == 'node') })) + .filter(h => h.host.includes(hostnameFilter) && h.metrics.some(m => m.name == selectedMetric && m.scope == 'node')) + .map(h => ({ host: h.host, subCluster: h.subCluster, data: h.metrics.find(m => m.name == selectedMetric && m.scope == 'node') })) .sort((a, b) => a.host.localeCompare(b.host))}>

{item.host} ({item.subCluster})

diff --git a/web/frontend/src/User.root.svelte b/web/frontend/src/User.root.svelte index 5a8d14d..652db6d 100644 --- a/web/frontend/src/User.root.svelte +++ b/web/frontend/src/User.root.svelte @@ -136,19 +136,23 @@
- Walltime + Duration Distribution {#key $stats.data.jobsStatistics[0].histDuration} + width={w1 - 25} height={histogramHeight} + xlabel="Current Runtimes [h]" + ylabel="Number of Jobs"/> {/key}
- Number of Nodes + Number of Nodes Distribution {#key $stats.data.jobsStatistics[0].histNumNodes} + width={w2 - 25} height={histogramHeight} + xlabel="Allocated Nodes [#]" + ylabel="Number of Jobs" /> {/key}
{/if} diff --git a/web/frontend/src/joblist/JobList.svelte b/web/frontend/src/joblist/JobList.svelte index 8cdca26..587562c 100644 --- a/web/frontend/src/joblist/JobList.svelte +++ b/web/frontend/src/joblist/JobList.svelte @@ -101,9 +101,11 @@ {#if $initialized} ({clusters .map(cluster => cluster.metricConfig.find(m => m.name == metric)) - .filter(m => m != null).map(m => m.unit) - .reduce((arr, unit) => arr.includes(unit) ? arr : [...arr, unit], []) - .join(', ')}) + .filter(m => m != null) + .map(m => (m.unit?.prefix?m.unit?.prefix:'') + (m.unit?.base?m.unit?.base:'')) // Build unitStr + .reduce((arr, unitStr) => arr.includes(unitStr) ? arr : [...arr, unitStr], []) // w/o this, output would be [unitStr, unitStr] + .join(', ') + }) {/if} {/each} diff --git a/web/frontend/src/joblist/Row.svelte b/web/frontend/src/joblist/Row.svelte index b3a3655..eec056d 100644 --- a/web/frontend/src/joblist/Row.svelte +++ b/web/frontend/src/joblist/Row.svelte @@ -24,12 +24,14 @@ let scopes = [job.numNodes == 1 ? 'core' : 'node'] const cluster = getContext('clusters').find(c => c.name == job.cluster) - + // Get all MetricConfs which include subCluster-specific settings for this job + const metricConfig = getContext('metrics') const metricsQuery = operationStore(`query($id: ID!, $metrics: [String!]!, $scopes: [MetricScope!]!) { jobMetrics(id: $id, metrics: $metrics, scopes: $scopes) { name + scope metric { - unit, scope, timestep + unit { prefix, base }, timestep statisticsSeries { min, mean, max } series { hostname, id, data @@ -44,13 +46,64 @@ }) const selectScope = (jobMetrics) => jobMetrics.reduce( - (a, b) => maxScope([a.metric.scope, b.metric.scope]) == a.metric.scope + (a, b) => maxScope([a.scope, b.scope]) == a.scope ? (job.numNodes > 1 ? a : b) : (job.numNodes > 1 ? b : a), jobMetrics[0]) const sortAndSelectScope = (jobMetrics) => metrics - .map(name => jobMetrics.filter(jobMetric => jobMetric.name == name)) - .map(jobMetrics => jobMetrics.length > 0 ? selectScope(jobMetrics) : null) + .map(function(name) { + // Get MetricConf for this selected/requested metric + let thisConfig = metricConfig(cluster, name) + let thisSCIndex = thisConfig.subClusters.findIndex(sc => sc.name == job.subCluster) + // Check if Subcluster has MetricConf: If not found (index == -1), no further remove flag check required + if (thisSCIndex >= 0) { + // SubCluster Config present: Check if remove flag is set + if (thisConfig.subClusters[thisSCIndex].remove == true) { + // Return null data and informational flag + // console.log('Case 1.1 -> Returned') + // console.log({removed: true, data: null}) + return {removed: true, data: null} + } else { + // load and return metric, if data available + let thisMetric = jobMetrics.filter(jobMetric => jobMetric.name == name) // Returns Array + if (thisMetric.length > 0) { + // console.log('Case 1.2.1 -> Returned') + // console.log({removed: false, data: thisMetric}) + return {removed: false, data: thisMetric} + } else { + // console.log('Case 1.2.2 -> Returned:') + // console.log({removed: false, data: null}) + return {removed: false, data: null} + } + } + } else { + // No specific subCluster config: 'remove' flag not set, deemed false -> load and return metric, if data available + let thisMetric = jobMetrics.filter(jobMetric => jobMetric.name == name) // Returns Array + if (thisMetric.length > 0) { + // console.log('Case 2.1 -> Returned') + // console.log({removed: false, data: thisMetric}) + return {removed: false, data: thisMetric} + } else { + // console.log('Case 2.2 -> Returned') + // console.log({removed: false, data: null}) + return {removed: false, data: null} + } + } + }) + .map(function(jobMetrics) { + if (jobMetrics.data != null && jobMetrics.data.length > 0) { + // console.log('Before') + // console.log(jobMetrics.data) + // console.log('After') + // console.log(selectScope(jobMetrics.data)) + let res = {removed: jobMetrics.removed, data: selectScope(jobMetrics.data)} + // console.log('Packed') + // console.log(res) + return res + } else { + return jobMetrics + } + }) $: metricsQuery.variables = { id: job.id, metrics, scopes } @@ -81,17 +134,20 @@ {:else} {#each sortAndSelectScope($metricsQuery.data.jobMetrics) as metric, i (metric || i)} - {#if metric != null} + + {#if metric.removed == false && metric.data != null} + {:else if metric.removed == true && metric.data == null} + Metric disabled for subcluster '{ job.subCluster }' {:else} Missing Data {/if} diff --git a/web/frontend/src/plots/Histogram.svelte b/web/frontend/src/plots/Histogram.svelte index b114f07..a9c0bd1 100644 --- a/web/frontend/src/plots/Histogram.svelte +++ b/web/frontend/src/plots/Histogram.svelte @@ -18,10 +18,10 @@ import { onMount } from 'svelte' export let data - export let width - export let height - export let xlabel - export let ylabel + export let width = 500 + export let height = 300 + export let xlabel = '' + export let ylabel = '' export let min = null export let max = null export let label = formatNumber diff --git a/web/frontend/src/plots/Polar.svelte b/web/frontend/src/plots/Polar.svelte index 6731d8a..6a013dc 100644 --- a/web/frontend/src/plots/Polar.svelte +++ b/web/frontend/src/plots/Polar.svelte @@ -18,7 +18,7 @@ let ctx, canvasElement const labels = metrics.filter(name => { - if (!jobMetrics.find(m => m.name == name && m.metric.scope == "node")) { + if (!jobMetrics.find(m => m.name == name && m.scope == "node")) { console.warn(`PolarPlot: No metric data for '${name}'`) return false } @@ -27,7 +27,7 @@ const getValuesForStat = (getStat) => labels.map(name => { const peak = metricConfig(cluster, name).peak - const metric = jobMetrics.find(m => m.name == name && m.metric.scope == "node") + const metric = jobMetrics.find(m => m.name == name && m.scope == "node") const value = getStat(metric.metric) / peak return value <= 1. ? value : 1. }) diff --git a/web/frontend/src/plots/Roofline.svelte b/web/frontend/src/plots/Roofline.svelte index dbf0431..e1da9f3 100644 --- a/web/frontend/src/plots/Roofline.svelte +++ b/web/frontend/src/plots/Roofline.svelte @@ -71,7 +71,7 @@ if (width <= 0) return - const [minX, maxX, minY, maxY] = [0.01, 1000, 1., cluster?.flopRateSimd || defaultMaxY] + const [minX, maxX, minY, maxY] = [0.01, 1000, 1., cluster?.flopRateSimd?.value || defaultMaxY] const w = width - paddingLeft - paddingRight const h = height - paddingTop - paddingBottom @@ -185,13 +185,13 @@ ctx.lineWidth = 2 ctx.beginPath() if (cluster != null) { - const ycut = 0.01 * cluster.memoryBandwidth - const scalarKnee = (cluster.flopRateScalar - ycut) / cluster.memoryBandwidth - const simdKnee = (cluster.flopRateSimd - ycut) / cluster.memoryBandwidth + const ycut = 0.01 * cluster.memoryBandwidth.value + const scalarKnee = (cluster.flopRateScalar.value - ycut) / cluster.memoryBandwidth.value + const simdKnee = (cluster.flopRateSimd.value - ycut) / cluster.memoryBandwidth.value const scalarKneeX = getCanvasX(scalarKnee), simdKneeX = getCanvasX(simdKnee), - flopRateScalarY = getCanvasY(cluster.flopRateScalar), - flopRateSimdY = getCanvasY(cluster.flopRateSimd) + flopRateScalarY = getCanvasY(cluster.flopRateScalar.value), + flopRateSimdY = getCanvasY(cluster.flopRateSimd.value) if (scalarKneeX < width - paddingRight) { ctx.moveTo(scalarKneeX, flopRateScalarY) @@ -270,8 +270,8 @@ export function transformPerNodeData(nodes) { const x = [], y = [], c = [] for (let node of nodes) { - let flopsAny = node.metrics.find(m => m.name == 'flops_any' && m.metric.scope == 'node')?.metric - let memBw = node.metrics.find(m => m.name == 'mem_bw' && m.metric.scope == 'node')?.metric + let flopsAny = node.metrics.find(m => m.name == 'flops_any' && m.scope == 'node')?.metric + let memBw = node.metrics.find(m => m.name == 'mem_bw' && m.scope == 'node')?.metric if (!flopsAny || !memBw) continue @@ -301,8 +301,8 @@ export let memBw = null export let cluster = null export let maxY = null - export let width - export let height + export let width = 500 + export let height = 300 export let tiles = null export let colorDots = true export let showTime = true diff --git a/web/frontend/src/utils.js b/web/frontend/src/utils.js index 25212f3..a954fea 100644 --- a/web/frontend/src/utils.js +++ b/web/frontend/src/utils.js @@ -37,11 +37,11 @@ export function init(extraInitQuery = '') { clusters { name, metricConfig { - name, unit {base, prefix}, peak, + name, unit { base, prefix }, peak, normal, caution, alert, timestep, scope, aggregation, - subClusters { name, peak, normal, caution, alert } + subClusters { name, peak, normal, caution, alert, remove } } partitions subClusters { @@ -49,9 +49,9 @@ export function init(extraInitQuery = '') { socketsPerNode coresPerSocket threadsPerCore - flopRateScalar - flopRateSimd - memoryBandwidth + flopRateScalar { unit { base, prefix }, value } + flopRateSimd { unit { base, prefix }, value } + memoryBandwidth { unit { base, prefix }, value } numberOfNodes topology { node, socket, core